1169689Skan/* FIXME: We need to go back and add the warning messages about code 2169689Skan moved across setjmp. */ 3169689Skan 4169689Skan 5169689Skan/* Scanning of rtl for dataflow analysis. 6169689Skan Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 7169689Skan Free Software Foundation, Inc. 8169689Skan Originally contributed by Michael P. Hayes 9169689Skan (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com) 10169689Skan Major rewrite contributed by Danny Berlin (dberlin@dberlin.org) 11169689Skan and Kenneth Zadeck (zadeck@naturalbridge.com). 12169689Skan 13169689SkanThis file is part of GCC. 14169689Skan 15169689SkanGCC is free software; you can redistribute it and/or modify it under 16169689Skanthe terms of the GNU General Public License as published by the Free 17169689SkanSoftware Foundation; either version 2, or (at your option) any later 18169689Skanversion. 19169689Skan 20169689SkanGCC is distributed in the hope that it will be useful, but WITHOUT ANY 21169689SkanWARRANTY; without even the implied warranty of MERCHANTABILITY or 22169689SkanFITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 23169689Skanfor more details. 24169689Skan 25169689SkanYou should have received a copy of the GNU General Public License 26169689Skanalong with GCC; see the file COPYING. If not, write to the Free 27169689SkanSoftware Foundation, 51 Franklin Street, Fifth Floor, Boston, MA 28169689Skan02110-1301, USA. 29169689Skan*/ 30169689Skan 31169689Skan#include "config.h" 32169689Skan#include "system.h" 33169689Skan#include "coretypes.h" 34169689Skan#include "tm.h" 35169689Skan#include "rtl.h" 36169689Skan#include "tm_p.h" 37169689Skan#include "insn-config.h" 38169689Skan#include "recog.h" 39169689Skan#include "function.h" 40169689Skan#include "regs.h" 41169689Skan#include "output.h" 42169689Skan#include "alloc-pool.h" 43169689Skan#include "flags.h" 44169689Skan#include "hard-reg-set.h" 45169689Skan#include "basic-block.h" 46169689Skan#include "sbitmap.h" 47169689Skan#include "bitmap.h" 48169689Skan#include "timevar.h" 49169689Skan#include "tree.h" 50169689Skan#include "target.h" 51169689Skan#include "target-def.h" 52169689Skan#include "df.h" 53169689Skan 54169689Skan#ifndef HAVE_epilogue 55169689Skan#define HAVE_epilogue 0 56169689Skan#endif 57169689Skan#ifndef HAVE_prologue 58169689Skan#define HAVE_prologue 0 59169689Skan#endif 60169689Skan#ifndef HAVE_sibcall_epilogue 61169689Skan#define HAVE_sibcall_epilogue 0 62169689Skan#endif 63169689Skan 64169689Skan#ifndef EPILOGUE_USES 65169689Skan#define EPILOGUE_USES(REGNO) 0 66169689Skan#endif 67169689Skan 68169689Skan/* The bitmap_obstack is used to hold some static variables that 69169689Skan should not be reset after each function is compiled. */ 70169689Skan 71169689Skanstatic bitmap_obstack persistent_obstack; 72169689Skan 73169689Skan/* The set of hard registers in eliminables[i].from. */ 74169689Skan 75169689Skanstatic HARD_REG_SET elim_reg_set; 76169689Skan 77169689Skan/* This is a bitmap copy of regs_invalidated_by_call so that we can 78169689Skan easily add it into bitmaps, etc. */ 79169689Skan 80169689Skanbitmap df_invalidated_by_call = NULL; 81169689Skan 82169689Skan/* Initialize ur_in and ur_out as if all hard registers were partially 83169689Skan available. */ 84169689Skan 85169689Skanstatic void df_ref_record (struct dataflow *, rtx, rtx *, 86169689Skan basic_block, rtx, enum df_ref_type, 87169689Skan enum df_ref_flags, bool record_live); 88169689Skanstatic void df_def_record_1 (struct dataflow *, rtx, basic_block, rtx, 89169689Skan enum df_ref_flags, bool record_live); 90169689Skanstatic void df_defs_record (struct dataflow *, rtx, basic_block, rtx); 91169689Skanstatic void df_uses_record (struct dataflow *, rtx *, enum df_ref_type, 92169689Skan basic_block, rtx, enum df_ref_flags); 93169689Skan 94169689Skanstatic void df_insn_refs_record (struct dataflow *, basic_block, rtx); 95169689Skanstatic void df_bb_refs_record (struct dataflow *, basic_block); 96169689Skanstatic void df_refs_record (struct dataflow *, bitmap); 97169689Skanstatic struct df_ref *df_ref_create_structure (struct dataflow *, rtx, rtx *, 98169689Skan basic_block, rtx, enum df_ref_type, 99169689Skan enum df_ref_flags); 100169689Skanstatic void df_record_entry_block_defs (struct dataflow *); 101169689Skanstatic void df_record_exit_block_uses (struct dataflow *); 102169689Skanstatic void df_grow_reg_info (struct dataflow *, struct df_ref_info *); 103169689Skanstatic void df_grow_ref_info (struct df_ref_info *, unsigned int); 104169689Skanstatic void df_grow_insn_info (struct df *); 105169689Skan 106169689Skan 107169689Skan/*---------------------------------------------------------------------------- 108169689Skan SCANNING DATAFLOW PROBLEM 109169689Skan 110169689Skan There are several ways in which scanning looks just like the other 111169689Skan dataflow problems. It shares the all the mechanisms for local info 112169689Skan as well as basic block info. Where it differs is when and how often 113169689Skan it gets run. It also has no need for the iterative solver. 114169689Skan----------------------------------------------------------------------------*/ 115169689Skan 116169689Skan/* Problem data for the scanning dataflow function. */ 117169689Skanstruct df_scan_problem_data 118169689Skan{ 119169689Skan alloc_pool ref_pool; 120169689Skan alloc_pool insn_pool; 121169689Skan alloc_pool reg_pool; 122169689Skan alloc_pool mw_reg_pool; 123169689Skan alloc_pool mw_link_pool; 124169689Skan}; 125169689Skan 126169689Skantypedef struct df_scan_bb_info *df_scan_bb_info_t; 127169689Skan 128169689Skanstatic void 129169689Skandf_scan_free_internal (struct dataflow *dflow) 130169689Skan{ 131169689Skan struct df *df = dflow->df; 132169689Skan struct df_scan_problem_data *problem_data 133169689Skan = (struct df_scan_problem_data *) dflow->problem_data; 134169689Skan 135169689Skan free (df->def_info.regs); 136169689Skan free (df->def_info.refs); 137169689Skan memset (&df->def_info, 0, (sizeof (struct df_ref_info))); 138169689Skan 139169689Skan free (df->use_info.regs); 140169689Skan free (df->use_info.refs); 141169689Skan memset (&df->use_info, 0, (sizeof (struct df_ref_info))); 142169689Skan 143169689Skan free (df->insns); 144169689Skan df->insns = NULL; 145169689Skan df->insns_size = 0; 146169689Skan 147169689Skan free (dflow->block_info); 148169689Skan dflow->block_info = NULL; 149169689Skan dflow->block_info_size = 0; 150169689Skan 151169689Skan BITMAP_FREE (df->hardware_regs_used); 152169689Skan BITMAP_FREE (df->entry_block_defs); 153169689Skan BITMAP_FREE (df->exit_block_uses); 154169689Skan 155169689Skan free_alloc_pool (dflow->block_pool); 156169689Skan free_alloc_pool (problem_data->ref_pool); 157169689Skan free_alloc_pool (problem_data->insn_pool); 158169689Skan free_alloc_pool (problem_data->reg_pool); 159169689Skan free_alloc_pool (problem_data->mw_reg_pool); 160169689Skan free_alloc_pool (problem_data->mw_link_pool); 161169689Skan} 162169689Skan 163169689Skan 164169689Skan/* Get basic block info. */ 165169689Skan 166169689Skanstruct df_scan_bb_info * 167169689Skandf_scan_get_bb_info (struct dataflow *dflow, unsigned int index) 168169689Skan{ 169169689Skan gcc_assert (index < dflow->block_info_size); 170169689Skan return (struct df_scan_bb_info *) dflow->block_info[index]; 171169689Skan} 172169689Skan 173169689Skan 174169689Skan/* Set basic block info. */ 175169689Skan 176169689Skanstatic void 177169689Skandf_scan_set_bb_info (struct dataflow *dflow, unsigned int index, 178169689Skan struct df_scan_bb_info *bb_info) 179169689Skan{ 180169689Skan gcc_assert (index < dflow->block_info_size); 181169689Skan dflow->block_info[index] = (void *) bb_info; 182169689Skan} 183169689Skan 184169689Skan 185169689Skan/* Free basic block info. */ 186169689Skan 187169689Skanstatic void 188169689Skandf_scan_free_bb_info (struct dataflow *dflow, basic_block bb, void *vbb_info) 189169689Skan{ 190169689Skan struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info; 191169689Skan if (bb_info) 192169689Skan { 193169689Skan df_bb_refs_delete (dflow, bb->index); 194169689Skan pool_free (dflow->block_pool, bb_info); 195169689Skan } 196169689Skan} 197169689Skan 198169689Skan 199169689Skan/* Allocate the problem data for the scanning problem. This should be 200169689Skan called when the problem is created or when the entire function is to 201169689Skan be rescanned. */ 202169689Skan 203169689Skanstatic void 204169689Skandf_scan_alloc (struct dataflow *dflow, bitmap blocks_to_rescan, 205169689Skan bitmap all_blocks ATTRIBUTE_UNUSED) 206169689Skan{ 207169689Skan struct df *df = dflow->df; 208169689Skan struct df_scan_problem_data *problem_data; 209169689Skan unsigned int insn_num = get_max_uid () + 1; 210169689Skan unsigned int block_size = 50; 211169689Skan unsigned int bb_index; 212169689Skan bitmap_iterator bi; 213169689Skan 214169689Skan /* Given the number of pools, this is really faster than tearing 215169689Skan everything apart. */ 216169689Skan if (dflow->problem_data) 217169689Skan df_scan_free_internal (dflow); 218169689Skan 219169689Skan dflow->block_pool 220169689Skan = create_alloc_pool ("df_scan_block pool", 221169689Skan sizeof (struct df_scan_bb_info), 222169689Skan block_size); 223169689Skan 224169689Skan problem_data = XNEW (struct df_scan_problem_data); 225169689Skan dflow->problem_data = problem_data; 226169689Skan 227169689Skan problem_data->ref_pool 228169689Skan = create_alloc_pool ("df_scan_ref pool", 229169689Skan sizeof (struct df_ref), block_size); 230169689Skan problem_data->insn_pool 231169689Skan = create_alloc_pool ("df_scan_insn pool", 232169689Skan sizeof (struct df_insn_info), block_size); 233169689Skan problem_data->reg_pool 234169689Skan = create_alloc_pool ("df_scan_reg pool", 235169689Skan sizeof (struct df_reg_info), block_size); 236169689Skan problem_data->mw_reg_pool 237169689Skan = create_alloc_pool ("df_scan_mw_reg pool", 238169689Skan sizeof (struct df_mw_hardreg), block_size); 239169689Skan problem_data->mw_link_pool 240169689Skan = create_alloc_pool ("df_scan_mw_link pool", 241169689Skan sizeof (struct df_link), block_size); 242169689Skan 243169689Skan insn_num += insn_num / 4; 244169689Skan df_grow_reg_info (dflow, &df->def_info); 245169689Skan df_grow_ref_info (&df->def_info, insn_num); 246169689Skan 247169689Skan df_grow_reg_info (dflow, &df->use_info); 248169689Skan df_grow_ref_info (&df->use_info, insn_num *2); 249169689Skan 250169689Skan df_grow_insn_info (df); 251169689Skan df_grow_bb_info (dflow); 252169689Skan 253169689Skan EXECUTE_IF_SET_IN_BITMAP (blocks_to_rescan, 0, bb_index, bi) 254169689Skan { 255169689Skan struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb_index); 256169689Skan if (!bb_info) 257169689Skan { 258169689Skan bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool); 259169689Skan df_scan_set_bb_info (dflow, bb_index, bb_info); 260169689Skan } 261169689Skan bb_info->artificial_defs = NULL; 262169689Skan bb_info->artificial_uses = NULL; 263169689Skan } 264169689Skan 265169689Skan df->hardware_regs_used = BITMAP_ALLOC (NULL); 266169689Skan df->entry_block_defs = BITMAP_ALLOC (NULL); 267169689Skan df->exit_block_uses = BITMAP_ALLOC (NULL); 268169689Skan} 269169689Skan 270169689Skan 271169689Skan/* Free all of the data associated with the scan problem. */ 272169689Skan 273169689Skanstatic void 274169689Skandf_scan_free (struct dataflow *dflow) 275169689Skan{ 276169689Skan struct df *df = dflow->df; 277169689Skan 278169689Skan if (dflow->problem_data) 279169689Skan { 280169689Skan df_scan_free_internal (dflow); 281169689Skan free (dflow->problem_data); 282169689Skan } 283169689Skan 284169689Skan if (df->blocks_to_scan) 285169689Skan BITMAP_FREE (df->blocks_to_scan); 286169689Skan 287169689Skan if (df->blocks_to_analyze) 288169689Skan BITMAP_FREE (df->blocks_to_analyze); 289169689Skan 290169689Skan free (dflow); 291169689Skan} 292169689Skan 293169689Skanstatic void 294169689Skandf_scan_dump (struct dataflow *dflow ATTRIBUTE_UNUSED, FILE *file ATTRIBUTE_UNUSED) 295169689Skan{ 296169689Skan struct df *df = dflow->df; 297169689Skan int i; 298169689Skan 299169689Skan fprintf (file, " invalidated by call \t"); 300169689Skan dump_bitmap (file, df_invalidated_by_call); 301169689Skan fprintf (file, " hardware regs used \t"); 302169689Skan dump_bitmap (file, df->hardware_regs_used); 303169689Skan fprintf (file, " entry block uses \t"); 304169689Skan dump_bitmap (file, df->entry_block_defs); 305169689Skan fprintf (file, " exit block uses \t"); 306169689Skan dump_bitmap (file, df->exit_block_uses); 307169689Skan fprintf (file, " regs ever live \t"); 308169689Skan for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 309169689Skan if (regs_ever_live[i]) 310169689Skan fprintf (file, "%d ", i); 311169689Skan fprintf (file, "\n"); 312169689Skan} 313169689Skan 314169689Skanstatic struct df_problem problem_SCAN = 315169689Skan{ 316169689Skan DF_SCAN, /* Problem id. */ 317169689Skan DF_NONE, /* Direction. */ 318169689Skan df_scan_alloc, /* Allocate the problem specific data. */ 319169689Skan NULL, /* Reset global information. */ 320169689Skan df_scan_free_bb_info, /* Free basic block info. */ 321169689Skan NULL, /* Local compute function. */ 322169689Skan NULL, /* Init the solution specific data. */ 323169689Skan NULL, /* Iterative solver. */ 324169689Skan NULL, /* Confluence operator 0. */ 325169689Skan NULL, /* Confluence operator n. */ 326169689Skan NULL, /* Transfer function. */ 327169689Skan NULL, /* Finalize function. */ 328169689Skan df_scan_free, /* Free all of the problem information. */ 329169689Skan df_scan_dump, /* Debugging. */ 330169689Skan NULL, /* Dependent problem. */ 331169689Skan 0 /* Changeable flags. */ 332169689Skan}; 333169689Skan 334169689Skan 335169689Skan/* Create a new DATAFLOW instance and add it to an existing instance 336169689Skan of DF. The returned structure is what is used to get at the 337169689Skan solution. */ 338169689Skan 339169689Skanstruct dataflow * 340169689Skandf_scan_add_problem (struct df *df, int flags) 341169689Skan{ 342169689Skan return df_add_problem (df, &problem_SCAN, flags); 343169689Skan} 344169689Skan 345169689Skan/*---------------------------------------------------------------------------- 346169689Skan Storage Allocation Utilities 347169689Skan----------------------------------------------------------------------------*/ 348169689Skan 349169689Skan 350169689Skan/* First, grow the reg_info information. If the current size is less than 351169689Skan the number of psuedos, grow to 25% more than the number of 352169689Skan pseudos. 353169689Skan 354169689Skan Second, assure that all of the slots up to max_reg_num have been 355169689Skan filled with reg_info structures. */ 356169689Skan 357169689Skanstatic void 358169689Skandf_grow_reg_info (struct dataflow *dflow, struct df_ref_info *ref_info) 359169689Skan{ 360169689Skan unsigned int max_reg = max_reg_num (); 361169689Skan unsigned int new_size = max_reg; 362169689Skan struct df_scan_problem_data *problem_data 363169689Skan = (struct df_scan_problem_data *) dflow->problem_data; 364169689Skan unsigned int i; 365169689Skan 366169689Skan if (ref_info->regs_size < new_size) 367169689Skan { 368169689Skan new_size += new_size / 4; 369169689Skan ref_info->regs = xrealloc (ref_info->regs, 370169689Skan new_size *sizeof (struct df_reg_info*)); 371169689Skan ref_info->regs_size = new_size; 372169689Skan } 373169689Skan 374169689Skan for (i = ref_info->regs_inited; i < max_reg; i++) 375169689Skan { 376169689Skan struct df_reg_info *reg_info = pool_alloc (problem_data->reg_pool); 377169689Skan memset (reg_info, 0, sizeof (struct df_reg_info)); 378169689Skan ref_info->regs[i] = reg_info; 379169689Skan } 380169689Skan 381169689Skan ref_info->regs_inited = max_reg; 382169689Skan} 383169689Skan 384169689Skan 385169689Skan/* Grow the ref information. */ 386169689Skan 387169689Skanstatic void 388169689Skandf_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size) 389169689Skan{ 390169689Skan if (ref_info->refs_size < new_size) 391169689Skan { 392169689Skan ref_info->refs = xrealloc (ref_info->refs, 393169689Skan new_size *sizeof (struct df_ref *)); 394169689Skan memset (ref_info->refs + ref_info->refs_size, 0, 395169689Skan (new_size - ref_info->refs_size) *sizeof (struct df_ref *)); 396169689Skan ref_info->refs_size = new_size; 397169689Skan } 398169689Skan} 399169689Skan 400169689Skan 401169689Skan/* Grow the ref information. If the current size is less than the 402169689Skan number of instructions, grow to 25% more than the number of 403169689Skan instructions. */ 404169689Skan 405169689Skanstatic void 406169689Skandf_grow_insn_info (struct df *df) 407169689Skan{ 408169689Skan unsigned int new_size = get_max_uid () + 1; 409169689Skan if (df->insns_size < new_size) 410169689Skan { 411169689Skan new_size += new_size / 4; 412169689Skan df->insns = xrealloc (df->insns, 413169689Skan new_size *sizeof (struct df_insn_info *)); 414169689Skan memset (df->insns + df->insns_size, 0, 415169689Skan (new_size - df->insns_size) *sizeof (struct df_insn_info *)); 416169689Skan df->insns_size = new_size; 417169689Skan } 418169689Skan} 419169689Skan 420169689Skan 421169689Skan 422169689Skan 423169689Skan/*---------------------------------------------------------------------------- 424169689Skan PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING. 425169689Skan----------------------------------------------------------------------------*/ 426169689Skan 427169689Skan/* Rescan some BLOCKS or all the blocks defined by the last call to 428169689Skan df_set_blocks if BLOCKS is NULL); */ 429169689Skan 430169689Skanvoid 431169689Skandf_rescan_blocks (struct df *df, bitmap blocks) 432169689Skan{ 433169689Skan bitmap local_blocks_to_scan = BITMAP_ALLOC (NULL); 434169689Skan 435169689Skan struct dataflow *dflow = df->problems_by_index[DF_SCAN]; 436169689Skan basic_block bb; 437169689Skan 438169689Skan df->def_info.refs_organized = false; 439169689Skan df->use_info.refs_organized = false; 440169689Skan 441169689Skan if (blocks) 442169689Skan { 443169689Skan int i; 444169689Skan unsigned int bb_index; 445169689Skan bitmap_iterator bi; 446169689Skan bool cleared_bits = false; 447169689Skan 448169689Skan /* Need to assure that there are space in all of the tables. */ 449169689Skan unsigned int insn_num = get_max_uid () + 1; 450169689Skan insn_num += insn_num / 4; 451169689Skan 452169689Skan df_grow_reg_info (dflow, &df->def_info); 453169689Skan df_grow_ref_info (&df->def_info, insn_num); 454169689Skan 455169689Skan df_grow_reg_info (dflow, &df->use_info); 456169689Skan df_grow_ref_info (&df->use_info, insn_num *2); 457169689Skan 458169689Skan df_grow_insn_info (df); 459169689Skan df_grow_bb_info (dflow); 460169689Skan 461169689Skan bitmap_copy (local_blocks_to_scan, blocks); 462169689Skan 463169689Skan EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi) 464169689Skan { 465169689Skan basic_block bb = BASIC_BLOCK (bb_index); 466169689Skan if (!bb) 467169689Skan { 468169689Skan bitmap_clear_bit (local_blocks_to_scan, bb_index); 469169689Skan cleared_bits = true; 470169689Skan } 471169689Skan } 472169689Skan 473169689Skan if (cleared_bits) 474169689Skan bitmap_copy (blocks, local_blocks_to_scan); 475169689Skan 476169689Skan df->def_info.add_refs_inline = true; 477169689Skan df->use_info.add_refs_inline = true; 478169689Skan 479169689Skan for (i = df->num_problems_defined; i; i--) 480169689Skan { 481169689Skan bitmap blocks_to_reset = NULL; 482169689Skan if (dflow->problem->reset_fun) 483169689Skan { 484169689Skan if (!blocks_to_reset) 485169689Skan { 486169689Skan blocks_to_reset = BITMAP_ALLOC (NULL); 487169689Skan bitmap_copy (blocks_to_reset, local_blocks_to_scan); 488169689Skan if (df->blocks_to_scan) 489169689Skan bitmap_ior_into (blocks_to_reset, df->blocks_to_scan); 490169689Skan } 491169689Skan dflow->problem->reset_fun (dflow, blocks_to_reset); 492169689Skan } 493169689Skan if (blocks_to_reset) 494169689Skan BITMAP_FREE (blocks_to_reset); 495169689Skan } 496169689Skan 497169689Skan df_refs_delete (dflow, local_blocks_to_scan); 498169689Skan 499169689Skan /* This may be a mistake, but if an explicit blocks is passed in 500169689Skan and the set of blocks to analyze has been explicitly set, add 501169689Skan the extra blocks to blocks_to_analyze. The alternative is to 502169689Skan put an assert here. We do not want this to just go by 503169689Skan silently or else we may get storage leaks. */ 504169689Skan if (df->blocks_to_analyze) 505169689Skan bitmap_ior_into (df->blocks_to_analyze, blocks); 506169689Skan } 507169689Skan else 508169689Skan { 509169689Skan /* If we are going to do everything, just reallocate everything. 510169689Skan Most stuff is allocated in pools so this is faster than 511169689Skan walking it. */ 512169689Skan if (df->blocks_to_analyze) 513169689Skan bitmap_copy (local_blocks_to_scan, df->blocks_to_analyze); 514169689Skan else 515169689Skan FOR_ALL_BB (bb) 516169689Skan { 517169689Skan bitmap_set_bit (local_blocks_to_scan, bb->index); 518169689Skan } 519169689Skan df_scan_alloc (dflow, local_blocks_to_scan, NULL); 520169689Skan 521169689Skan df->def_info.add_refs_inline = false; 522169689Skan df->use_info.add_refs_inline = false; 523169689Skan } 524169689Skan 525169689Skan df_refs_record (dflow, local_blocks_to_scan); 526169689Skan#if 0 527169689Skan bitmap_print (stderr, local_blocks_to_scan, "scanning: ", "\n"); 528169689Skan#endif 529169689Skan 530169689Skan if (!df->blocks_to_scan) 531169689Skan df->blocks_to_scan = BITMAP_ALLOC (NULL); 532169689Skan 533169689Skan bitmap_ior_into (df->blocks_to_scan, local_blocks_to_scan); 534169689Skan BITMAP_FREE (local_blocks_to_scan); 535169689Skan} 536169689Skan 537169689Skan 538169689Skan/* Create a new ref of type DF_REF_TYPE for register REG at address 539169689Skan LOC within INSN of BB. */ 540169689Skan 541169689Skanstruct df_ref * 542169689Skandf_ref_create (struct df *df, rtx reg, rtx *loc, rtx insn, 543169689Skan basic_block bb, 544169689Skan enum df_ref_type ref_type, 545169689Skan enum df_ref_flags ref_flags) 546169689Skan{ 547169689Skan struct dataflow *dflow = df->problems_by_index[DF_SCAN]; 548169689Skan struct df_scan_bb_info *bb_info; 549169689Skan 550169689Skan df_grow_reg_info (dflow, &df->use_info); 551169689Skan df_grow_reg_info (dflow, &df->def_info); 552169689Skan df_grow_bb_info (dflow); 553169689Skan 554169689Skan /* Make sure there is the bb_info for this block. */ 555169689Skan bb_info = df_scan_get_bb_info (dflow, bb->index); 556169689Skan if (!bb_info) 557169689Skan { 558169689Skan bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool); 559169689Skan df_scan_set_bb_info (dflow, bb->index, bb_info); 560169689Skan bb_info->artificial_defs = NULL; 561169689Skan bb_info->artificial_uses = NULL; 562169689Skan } 563169689Skan 564169689Skan if (ref_type == DF_REF_REG_DEF) 565169689Skan df->def_info.add_refs_inline = true; 566169689Skan else 567169689Skan df->use_info.add_refs_inline = true; 568169689Skan 569169689Skan return df_ref_create_structure (dflow, reg, loc, bb, insn, ref_type, ref_flags); 570169689Skan} 571169689Skan 572169689Skan 573169689Skan 574169689Skan/*---------------------------------------------------------------------------- 575169689Skan UTILITIES TO CREATE AND DESTROY REFS AND CHAINS. 576169689Skan----------------------------------------------------------------------------*/ 577169689Skan 578169689Skan 579169689Skan/* Get the artificial uses for a basic block. */ 580169689Skan 581169689Skanstruct df_ref * 582169689Skandf_get_artificial_defs (struct df *df, unsigned int bb_index) 583169689Skan{ 584169689Skan struct dataflow *dflow = df->problems_by_index[DF_SCAN]; 585169689Skan return df_scan_get_bb_info (dflow, bb_index)->artificial_defs; 586169689Skan} 587169689Skan 588169689Skan 589169689Skan/* Get the artificial uses for a basic block. */ 590169689Skan 591169689Skanstruct df_ref * 592169689Skandf_get_artificial_uses (struct df *df, unsigned int bb_index) 593169689Skan{ 594169689Skan struct dataflow *dflow = df->problems_by_index[DF_SCAN]; 595169689Skan return df_scan_get_bb_info (dflow, bb_index)->artificial_uses; 596169689Skan} 597169689Skan 598169689Skan 599169689Skan/* Link REF at the front of reg_use or reg_def chain for REGNO. */ 600169689Skan 601169689Skanvoid 602169689Skandf_reg_chain_create (struct df_reg_info *reg_info, 603169689Skan struct df_ref *ref) 604169689Skan{ 605169689Skan struct df_ref *head = reg_info->reg_chain; 606169689Skan reg_info->reg_chain = ref; 607169689Skan 608169689Skan DF_REF_NEXT_REG (ref) = head; 609169689Skan 610169689Skan /* We cannot actually link to the head of the chain. */ 611169689Skan DF_REF_PREV_REG (ref) = NULL; 612169689Skan 613169689Skan if (head) 614169689Skan DF_REF_PREV_REG (head) = ref; 615169689Skan} 616169689Skan 617169689Skan 618169689Skan/* Remove REF from the CHAIN. Return the head of the chain. This 619169689Skan will be CHAIN unless the REF was at the beginning of the chain. */ 620169689Skan 621169689Skanstatic struct df_ref * 622169689Skandf_ref_unlink (struct df_ref *chain, struct df_ref *ref) 623169689Skan{ 624169689Skan struct df_ref *orig_chain = chain; 625169689Skan struct df_ref *prev = NULL; 626169689Skan while (chain) 627169689Skan { 628169689Skan if (chain == ref) 629169689Skan { 630169689Skan if (prev) 631169689Skan { 632169689Skan prev->next_ref = ref->next_ref; 633169689Skan ref->next_ref = NULL; 634169689Skan return orig_chain; 635169689Skan } 636169689Skan else 637169689Skan { 638169689Skan chain = ref->next_ref; 639169689Skan ref->next_ref = NULL; 640169689Skan return chain; 641169689Skan } 642169689Skan } 643169689Skan 644169689Skan prev = chain; 645169689Skan chain = chain->next_ref; 646169689Skan } 647169689Skan 648169689Skan /* Someone passed in a ref that was not in the chain. */ 649169689Skan gcc_unreachable (); 650169689Skan return NULL; 651169689Skan} 652169689Skan 653169689Skan 654169689Skan/* Unlink and delete REF at the reg_use or reg_def chain. Also delete 655169689Skan the def-use or use-def chain if it exists. Returns the next ref in 656169689Skan uses or defs chain. */ 657169689Skan 658169689Skanstruct df_ref * 659169689Skandf_reg_chain_unlink (struct dataflow *dflow, struct df_ref *ref) 660169689Skan{ 661169689Skan struct df *df = dflow->df; 662169689Skan struct df_ref *next = DF_REF_NEXT_REG (ref); 663169689Skan struct df_ref *prev = DF_REF_PREV_REG (ref); 664169689Skan struct df_scan_problem_data *problem_data 665169689Skan = (struct df_scan_problem_data *) dflow->problem_data; 666169689Skan struct df_reg_info *reg_info; 667169689Skan struct df_ref *next_ref = ref->next_ref; 668169689Skan unsigned int id = DF_REF_ID (ref); 669169689Skan 670169689Skan if (DF_REF_TYPE (ref) == DF_REF_REG_DEF) 671169689Skan { 672169689Skan reg_info = DF_REG_DEF_GET (df, DF_REF_REGNO (ref)); 673169689Skan df->def_info.bitmap_size--; 674169689Skan if (df->def_info.refs && (id < df->def_info.refs_size)) 675169689Skan DF_DEFS_SET (df, id, NULL); 676169689Skan } 677169689Skan else 678169689Skan { 679169689Skan reg_info = DF_REG_USE_GET (df, DF_REF_REGNO (ref)); 680169689Skan df->use_info.bitmap_size--; 681169689Skan if (df->use_info.refs && (id < df->use_info.refs_size)) 682169689Skan DF_USES_SET (df, id, NULL); 683169689Skan } 684169689Skan 685169689Skan /* Delete any def-use or use-def chains that start here. */ 686169689Skan if (DF_REF_CHAIN (ref)) 687169689Skan df_chain_unlink (df->problems_by_index[DF_CHAIN], ref, NULL); 688169689Skan 689169689Skan reg_info->n_refs--; 690169689Skan 691169689Skan /* Unlink from the reg chain. If there is no prev, this is the 692169689Skan first of the list. If not, just join the next and prev. */ 693169689Skan if (prev) 694169689Skan { 695169689Skan DF_REF_NEXT_REG (prev) = next; 696169689Skan if (next) 697169689Skan DF_REF_PREV_REG (next) = prev; 698169689Skan } 699169689Skan else 700169689Skan { 701169689Skan reg_info->reg_chain = next; 702169689Skan if (next) 703169689Skan DF_REF_PREV_REG (next) = NULL; 704169689Skan } 705169689Skan 706169689Skan pool_free (problem_data->ref_pool, ref); 707169689Skan return next_ref; 708169689Skan} 709169689Skan 710169689Skan 711169689Skan/* Unlink REF from all def-use/use-def chains, etc. */ 712169689Skan 713169689Skanvoid 714169689Skandf_ref_remove (struct df *df, struct df_ref *ref) 715169689Skan{ 716169689Skan struct dataflow *dflow = df->problems_by_index[DF_SCAN]; 717169689Skan if (DF_REF_REG_DEF_P (ref)) 718169689Skan { 719169689Skan if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL) 720169689Skan { 721169689Skan struct df_scan_bb_info *bb_info 722169689Skan = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index); 723169689Skan bb_info->artificial_defs 724169689Skan = df_ref_unlink (bb_info->artificial_defs, ref); 725169689Skan } 726169689Skan else 727169689Skan DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref)) 728169689Skan = df_ref_unlink (DF_INSN_UID_DEFS (df, DF_REF_INSN_UID (ref)), ref); 729169689Skan 730169689Skan if (df->def_info.add_refs_inline) 731169689Skan DF_DEFS_SET (df, DF_REF_ID (ref), NULL); 732169689Skan } 733169689Skan else 734169689Skan { 735169689Skan if (DF_REF_FLAGS (ref) & DF_REF_ARTIFICIAL) 736169689Skan { 737169689Skan struct df_scan_bb_info *bb_info 738169689Skan = df_scan_get_bb_info (dflow, DF_REF_BB (ref)->index); 739169689Skan bb_info->artificial_uses 740169689Skan = df_ref_unlink (bb_info->artificial_uses, ref); 741169689Skan } 742169689Skan else 743169689Skan DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref)) 744169689Skan = df_ref_unlink (DF_INSN_UID_USES (df, DF_REF_INSN_UID (ref)), ref); 745169689Skan 746169689Skan if (df->use_info.add_refs_inline) 747169689Skan DF_USES_SET (df, DF_REF_ID (ref), NULL); 748169689Skan } 749169689Skan 750169689Skan df_reg_chain_unlink (dflow, ref); 751169689Skan} 752169689Skan 753169689Skan 754169689Skan/* Create the insn record for INSN. If there was one there, zero it out. */ 755169689Skan 756169689Skanstatic struct df_insn_info * 757169689Skandf_insn_create_insn_record (struct dataflow *dflow, rtx insn) 758169689Skan{ 759169689Skan struct df *df = dflow->df; 760169689Skan struct df_scan_problem_data *problem_data 761169689Skan = (struct df_scan_problem_data *) dflow->problem_data; 762169689Skan 763169689Skan struct df_insn_info *insn_rec = DF_INSN_GET (df, insn); 764169689Skan if (!insn_rec) 765169689Skan { 766169689Skan insn_rec = pool_alloc (problem_data->insn_pool); 767169689Skan DF_INSN_SET (df, insn, insn_rec); 768169689Skan } 769169689Skan memset (insn_rec, 0, sizeof (struct df_insn_info)); 770169689Skan 771169689Skan return insn_rec; 772169689Skan} 773169689Skan 774169689Skan 775169689Skan/* Delete all of the refs information from INSN. */ 776169689Skan 777169689Skanvoid 778169689Skandf_insn_refs_delete (struct dataflow *dflow, rtx insn) 779169689Skan{ 780169689Skan struct df *df = dflow->df; 781169689Skan unsigned int uid = INSN_UID (insn); 782169689Skan struct df_insn_info *insn_info = NULL; 783169689Skan struct df_ref *ref; 784169689Skan struct df_scan_problem_data *problem_data 785169689Skan = (struct df_scan_problem_data *) dflow->problem_data; 786169689Skan 787169689Skan if (uid < df->insns_size) 788169689Skan insn_info = DF_INSN_UID_GET (df, uid); 789169689Skan 790169689Skan if (insn_info) 791169689Skan { 792169689Skan struct df_mw_hardreg *hardregs = insn_info->mw_hardregs; 793169689Skan 794169689Skan while (hardregs) 795169689Skan { 796169689Skan struct df_mw_hardreg *next_hr = hardregs->next; 797169689Skan struct df_link *link = hardregs->regs; 798169689Skan while (link) 799169689Skan { 800169689Skan struct df_link *next_l = link->next; 801169689Skan pool_free (problem_data->mw_link_pool, link); 802169689Skan link = next_l; 803169689Skan } 804169689Skan 805169689Skan pool_free (problem_data->mw_reg_pool, hardregs); 806169689Skan hardregs = next_hr; 807169689Skan } 808169689Skan 809169689Skan ref = insn_info->defs; 810169689Skan while (ref) 811169689Skan ref = df_reg_chain_unlink (dflow, ref); 812169689Skan 813169689Skan ref = insn_info->uses; 814169689Skan while (ref) 815169689Skan ref = df_reg_chain_unlink (dflow, ref); 816169689Skan 817169689Skan pool_free (problem_data->insn_pool, insn_info); 818169689Skan DF_INSN_SET (df, insn, NULL); 819169689Skan } 820169689Skan} 821169689Skan 822169689Skan 823169689Skan/* Delete all of the refs information from basic_block with BB_INDEX. */ 824169689Skan 825169689Skanvoid 826169689Skandf_bb_refs_delete (struct dataflow *dflow, int bb_index) 827169689Skan{ 828169689Skan struct df_ref *def; 829169689Skan struct df_ref *use; 830169689Skan 831169689Skan struct df_scan_bb_info *bb_info 832169689Skan = df_scan_get_bb_info (dflow, bb_index); 833169689Skan rtx insn; 834169689Skan basic_block bb = BASIC_BLOCK (bb_index); 835169689Skan FOR_BB_INSNS (bb, insn) 836169689Skan { 837169689Skan if (INSN_P (insn)) 838169689Skan { 839169689Skan /* Record defs within INSN. */ 840169689Skan df_insn_refs_delete (dflow, insn); 841169689Skan } 842169689Skan } 843169689Skan 844169689Skan /* Get rid of any artificial uses or defs. */ 845169689Skan if (bb_info) 846169689Skan { 847169689Skan def = bb_info->artificial_defs; 848169689Skan while (def) 849169689Skan def = df_reg_chain_unlink (dflow, def); 850169689Skan bb_info->artificial_defs = NULL; 851169689Skan use = bb_info->artificial_uses; 852169689Skan while (use) 853169689Skan use = df_reg_chain_unlink (dflow, use); 854169689Skan bb_info->artificial_uses = NULL; 855169689Skan } 856169689Skan} 857169689Skan 858169689Skan 859169689Skan/* Delete all of the refs information from BLOCKS. */ 860169689Skan 861169689Skanvoid 862169689Skandf_refs_delete (struct dataflow *dflow, bitmap blocks) 863169689Skan{ 864169689Skan bitmap_iterator bi; 865169689Skan unsigned int bb_index; 866169689Skan 867169689Skan EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi) 868169689Skan { 869169689Skan df_bb_refs_delete (dflow, bb_index); 870169689Skan } 871169689Skan} 872169689Skan 873169689Skan 874169689Skan/* Take build ref table for either the uses or defs from the reg-use 875169689Skan or reg-def chains. */ 876169689Skan 877169689Skanvoid 878169689Skandf_reorganize_refs (struct df_ref_info *ref_info) 879169689Skan{ 880169689Skan unsigned int m = ref_info->regs_inited; 881169689Skan unsigned int regno; 882169689Skan unsigned int offset = 0; 883169689Skan unsigned int size = 0; 884169689Skan 885169689Skan if (ref_info->refs_organized) 886169689Skan return; 887169689Skan 888169689Skan if (ref_info->refs_size < ref_info->bitmap_size) 889169689Skan { 890169689Skan int new_size = ref_info->bitmap_size + ref_info->bitmap_size / 4; 891169689Skan df_grow_ref_info (ref_info, new_size); 892169689Skan } 893169689Skan 894169689Skan for (regno = 0; regno < m; regno++) 895169689Skan { 896169689Skan struct df_reg_info *reg_info = ref_info->regs[regno]; 897169689Skan int count = 0; 898169689Skan if (reg_info) 899169689Skan { 900169689Skan struct df_ref *ref = reg_info->reg_chain; 901169689Skan reg_info->begin = offset; 902169689Skan while (ref) 903169689Skan { 904169689Skan ref_info->refs[offset] = ref; 905169689Skan DF_REF_ID (ref) = offset++; 906169689Skan ref = DF_REF_NEXT_REG (ref); 907169689Skan count++; 908169689Skan size++; 909169689Skan } 910169689Skan reg_info->n_refs = count; 911169689Skan } 912169689Skan } 913169689Skan 914169689Skan /* The bitmap size is not decremented when refs are deleted. So 915169689Skan reset it now that we have squished out all of the empty 916169689Skan slots. */ 917169689Skan ref_info->bitmap_size = size; 918169689Skan ref_info->refs_organized = true; 919169689Skan ref_info->add_refs_inline = true; 920169689Skan} 921169689Skan 922169689Skan 923169689Skan/*---------------------------------------------------------------------------- 924169689Skan Hard core instruction scanning code. No external interfaces here, 925169689Skan just a lot of routines that look inside insns. 926169689Skan----------------------------------------------------------------------------*/ 927169689Skan 928169689Skan/* Create a ref and add it to the reg-def or reg-use chains. */ 929169689Skan 930169689Skanstatic struct df_ref * 931169689Skandf_ref_create_structure (struct dataflow *dflow, rtx reg, rtx *loc, 932169689Skan basic_block bb, rtx insn, 933169689Skan enum df_ref_type ref_type, 934169689Skan enum df_ref_flags ref_flags) 935169689Skan{ 936169689Skan struct df_ref *this_ref; 937169689Skan struct df *df = dflow->df; 938169689Skan int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg); 939169689Skan struct df_scan_problem_data *problem_data 940169689Skan = (struct df_scan_problem_data *) dflow->problem_data; 941169689Skan 942169689Skan this_ref = pool_alloc (problem_data->ref_pool); 943169689Skan DF_REF_REG (this_ref) = reg; 944169689Skan DF_REF_REGNO (this_ref) = regno; 945169689Skan DF_REF_LOC (this_ref) = loc; 946169689Skan DF_REF_INSN (this_ref) = insn; 947169689Skan DF_REF_CHAIN (this_ref) = NULL; 948169689Skan DF_REF_TYPE (this_ref) = ref_type; 949169689Skan DF_REF_FLAGS (this_ref) = ref_flags; 950169689Skan DF_REF_DATA (this_ref) = NULL; 951169689Skan DF_REF_BB (this_ref) = bb; 952169689Skan 953169689Skan /* Link the ref into the reg_def and reg_use chains and keep a count 954169689Skan of the instances. */ 955169689Skan switch (ref_type) 956169689Skan { 957169689Skan case DF_REF_REG_DEF: 958169689Skan { 959169689Skan struct df_reg_info *reg_info = DF_REG_DEF_GET (df, regno); 960169689Skan reg_info->n_refs++; 961169689Skan 962169689Skan /* Add the ref to the reg_def chain. */ 963169689Skan df_reg_chain_create (reg_info, this_ref); 964169689Skan DF_REF_ID (this_ref) = df->def_info.bitmap_size; 965169689Skan if (df->def_info.add_refs_inline) 966169689Skan { 967169689Skan if (DF_DEFS_SIZE (df) >= df->def_info.refs_size) 968169689Skan { 969169689Skan int new_size = df->def_info.bitmap_size 970169689Skan + df->def_info.bitmap_size / 4; 971169689Skan df_grow_ref_info (&df->def_info, new_size); 972169689Skan } 973169689Skan /* Add the ref to the big array of defs. */ 974169689Skan DF_DEFS_SET (df, df->def_info.bitmap_size, this_ref); 975169689Skan df->def_info.refs_organized = false; 976169689Skan } 977169689Skan 978169689Skan df->def_info.bitmap_size++; 979169689Skan 980169689Skan if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL) 981169689Skan { 982169689Skan struct df_scan_bb_info *bb_info 983169689Skan = df_scan_get_bb_info (dflow, bb->index); 984169689Skan this_ref->next_ref = bb_info->artificial_defs; 985169689Skan bb_info->artificial_defs = this_ref; 986169689Skan } 987169689Skan else 988169689Skan { 989169689Skan this_ref->next_ref = DF_INSN_GET (df, insn)->defs; 990169689Skan DF_INSN_GET (df, insn)->defs = this_ref; 991169689Skan } 992169689Skan } 993169689Skan break; 994169689Skan 995169689Skan case DF_REF_REG_MEM_LOAD: 996169689Skan case DF_REF_REG_MEM_STORE: 997169689Skan case DF_REF_REG_USE: 998169689Skan { 999169689Skan struct df_reg_info *reg_info = DF_REG_USE_GET (df, regno); 1000169689Skan reg_info->n_refs++; 1001169689Skan 1002169689Skan /* Add the ref to the reg_use chain. */ 1003169689Skan df_reg_chain_create (reg_info, this_ref); 1004169689Skan DF_REF_ID (this_ref) = df->use_info.bitmap_size; 1005169689Skan if (df->use_info.add_refs_inline) 1006169689Skan { 1007169689Skan if (DF_USES_SIZE (df) >= df->use_info.refs_size) 1008169689Skan { 1009169689Skan int new_size = df->use_info.bitmap_size 1010169689Skan + df->use_info.bitmap_size / 4; 1011169689Skan df_grow_ref_info (&df->use_info, new_size); 1012169689Skan } 1013169689Skan /* Add the ref to the big array of defs. */ 1014169689Skan DF_USES_SET (df, df->use_info.bitmap_size, this_ref); 1015169689Skan df->use_info.refs_organized = false; 1016169689Skan } 1017169689Skan 1018169689Skan df->use_info.bitmap_size++; 1019169689Skan if (DF_REF_FLAGS (this_ref) & DF_REF_ARTIFICIAL) 1020169689Skan { 1021169689Skan struct df_scan_bb_info *bb_info 1022169689Skan = df_scan_get_bb_info (dflow, bb->index); 1023169689Skan this_ref->next_ref = bb_info->artificial_uses; 1024169689Skan bb_info->artificial_uses = this_ref; 1025169689Skan } 1026169689Skan else 1027169689Skan { 1028169689Skan this_ref->next_ref = DF_INSN_GET (df, insn)->uses; 1029169689Skan DF_INSN_GET (df, insn)->uses = this_ref; 1030169689Skan } 1031169689Skan } 1032169689Skan break; 1033169689Skan 1034169689Skan default: 1035169689Skan gcc_unreachable (); 1036169689Skan 1037169689Skan } 1038169689Skan return this_ref; 1039169689Skan} 1040169689Skan 1041169689Skan 1042169689Skan/* Create new references of type DF_REF_TYPE for each part of register REG 1043169689Skan at address LOC within INSN of BB. */ 1044169689Skan 1045169689Skanstatic void 1046169689Skandf_ref_record (struct dataflow *dflow, rtx reg, rtx *loc, 1047169689Skan basic_block bb, rtx insn, 1048169689Skan enum df_ref_type ref_type, 1049169689Skan enum df_ref_flags ref_flags, 1050169689Skan bool record_live) 1051169689Skan{ 1052169689Skan struct df *df = dflow->df; 1053169689Skan rtx oldreg = reg; 1054169689Skan unsigned int regno; 1055169689Skan 1056169689Skan gcc_assert (REG_P (reg) || GET_CODE (reg) == SUBREG); 1057169689Skan 1058169689Skan /* For the reg allocator we are interested in some SUBREG rtx's, but not 1059169689Skan all. Notably only those representing a word extraction from a multi-word 1060169689Skan reg. As written in the docu those should have the form 1061169689Skan (subreg:SI (reg:M A) N), with size(SImode) > size(Mmode). 1062169689Skan XXX Is that true? We could also use the global word_mode variable. */ 1063169689Skan if ((dflow->flags & DF_SUBREGS) == 0 1064169689Skan && GET_CODE (reg) == SUBREG 1065169689Skan && (GET_MODE_SIZE (GET_MODE (reg)) < GET_MODE_SIZE (word_mode) 1066169689Skan || GET_MODE_SIZE (GET_MODE (reg)) 1067169689Skan >= GET_MODE_SIZE (GET_MODE (SUBREG_REG (reg))))) 1068169689Skan { 1069169689Skan loc = &SUBREG_REG (reg); 1070169689Skan reg = *loc; 1071169689Skan ref_flags |= DF_REF_STRIPPED; 1072169689Skan } 1073169689Skan 1074169689Skan regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg); 1075169689Skan if (regno < FIRST_PSEUDO_REGISTER) 1076169689Skan { 1077169689Skan unsigned int i; 1078169689Skan unsigned int endregno; 1079169689Skan struct df_mw_hardreg *hardreg = NULL; 1080169689Skan struct df_scan_problem_data *problem_data 1081169689Skan = (struct df_scan_problem_data *) dflow->problem_data; 1082169689Skan 1083169689Skan if (!(dflow->flags & DF_HARD_REGS)) 1084169689Skan return; 1085169689Skan 1086169689Skan /* GET_MODE (reg) is correct here. We do not want to go into a SUBREG 1087169689Skan for the mode, because we only want to add references to regs, which 1088169689Skan are really referenced. E.g., a (subreg:SI (reg:DI 0) 0) does _not_ 1089169689Skan reference the whole reg 0 in DI mode (which would also include 1090169689Skan reg 1, at least, if 0 and 1 are SImode registers). */ 1091169689Skan endregno = hard_regno_nregs[regno][GET_MODE (reg)]; 1092169689Skan if (GET_CODE (reg) == SUBREG) 1093169689Skan regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)), 1094169689Skan SUBREG_BYTE (reg), GET_MODE (reg)); 1095169689Skan endregno += regno; 1096169689Skan 1097169689Skan /* If this is a multiword hardreg, we create some extra datastructures that 1098169689Skan will enable us to easily build REG_DEAD and REG_UNUSED notes. */ 1099169689Skan if ((endregno != regno + 1) && insn) 1100169689Skan { 1101169689Skan struct df_insn_info *insn_info = DF_INSN_GET (df, insn); 1102169689Skan /* Sets to a subreg of a multiword register are partial. 1103169689Skan Sets to a non-subreg of a multiword register are not. */ 1104169689Skan if (GET_CODE (oldreg) == SUBREG) 1105169689Skan ref_flags |= DF_REF_PARTIAL; 1106169689Skan ref_flags |= DF_REF_MW_HARDREG; 1107169689Skan hardreg = pool_alloc (problem_data->mw_reg_pool); 1108169689Skan hardreg->next = insn_info->mw_hardregs; 1109169689Skan insn_info->mw_hardregs = hardreg; 1110169689Skan hardreg->type = ref_type; 1111169689Skan hardreg->flags = ref_flags; 1112169689Skan hardreg->mw_reg = reg; 1113169689Skan hardreg->regs = NULL; 1114169689Skan 1115169689Skan } 1116169689Skan 1117169689Skan for (i = regno; i < endregno; i++) 1118169689Skan { 1119169689Skan struct df_ref *ref; 1120169689Skan 1121169689Skan /* Calls are handled at call site because regs_ever_live 1122169689Skan doesn't include clobbered regs, only used ones. */ 1123169689Skan if (ref_type == DF_REF_REG_DEF && record_live) 1124169689Skan regs_ever_live[i] = 1; 1125169689Skan else if ((ref_type == DF_REF_REG_USE 1126169689Skan || ref_type == DF_REF_REG_MEM_STORE 1127169689Skan || ref_type == DF_REF_REG_MEM_LOAD) 1128169689Skan && ((ref_flags & DF_REF_ARTIFICIAL) == 0)) 1129169689Skan { 1130169689Skan /* Set regs_ever_live on uses of non-eliminable frame 1131169689Skan pointers and arg pointers. */ 1132169689Skan if (!(TEST_HARD_REG_BIT (elim_reg_set, regno) 1133169689Skan && (regno == FRAME_POINTER_REGNUM 1134169689Skan || regno == ARG_POINTER_REGNUM))) 1135169689Skan regs_ever_live[i] = 1; 1136169689Skan } 1137169689Skan 1138169689Skan ref = df_ref_create_structure (dflow, regno_reg_rtx[i], loc, 1139169689Skan bb, insn, ref_type, ref_flags); 1140169689Skan if (hardreg) 1141169689Skan { 1142169689Skan struct df_link *link = pool_alloc (problem_data->mw_link_pool); 1143169689Skan 1144169689Skan link->next = hardreg->regs; 1145169689Skan link->ref = ref; 1146169689Skan hardreg->regs = link; 1147169689Skan } 1148169689Skan } 1149169689Skan } 1150169689Skan else 1151169689Skan { 1152169689Skan df_ref_create_structure (dflow, reg, loc, 1153169689Skan bb, insn, ref_type, ref_flags); 1154169689Skan } 1155169689Skan} 1156169689Skan 1157169689Skan 1158169689Skan/* A set to a non-paradoxical SUBREG for which the number of word_mode units 1159169689Skan covered by the outer mode is smaller than that covered by the inner mode, 1160169689Skan is a read-modify-write operation. 1161169689Skan This function returns true iff the SUBREG X is such a SUBREG. */ 1162169689Skan 1163169689Skanbool 1164169689Skandf_read_modify_subreg_p (rtx x) 1165169689Skan{ 1166169689Skan unsigned int isize, osize; 1167169689Skan if (GET_CODE (x) != SUBREG) 1168169689Skan return false; 1169169689Skan isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))); 1170169689Skan osize = GET_MODE_SIZE (GET_MODE (x)); 1171169689Skan return (isize > osize && isize > UNITS_PER_WORD); 1172169689Skan} 1173169689Skan 1174169689Skan 1175169689Skan/* Process all the registers defined in the rtx, X. 1176169689Skan Autoincrement/decrement definitions will be picked up by 1177169689Skan df_uses_record. */ 1178169689Skan 1179169689Skanstatic void 1180169689Skandf_def_record_1 (struct dataflow *dflow, rtx x, 1181169689Skan basic_block bb, rtx insn, 1182169689Skan enum df_ref_flags flags, bool record_live) 1183169689Skan{ 1184169689Skan rtx *loc; 1185169689Skan rtx dst; 1186169689Skan bool dst_in_strict_lowpart = false; 1187169689Skan 1188169689Skan /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL 1189169689Skan construct. */ 1190169689Skan if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER) 1191169689Skan loc = &XEXP (x, 0); 1192169689Skan else 1193169689Skan loc = &SET_DEST (x); 1194169689Skan dst = *loc; 1195169689Skan 1196169689Skan /* It is legal to have a set destination be a parallel. */ 1197169689Skan if (GET_CODE (dst) == PARALLEL) 1198169689Skan { 1199169689Skan int i; 1200169689Skan 1201169689Skan for (i = XVECLEN (dst, 0) - 1; i >= 0; i--) 1202169689Skan { 1203169689Skan rtx temp = XVECEXP (dst, 0, i); 1204169689Skan if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER 1205169689Skan || GET_CODE (temp) == SET) 1206169689Skan df_def_record_1 (dflow, temp, bb, insn, 1207169689Skan GET_CODE (temp) == CLOBBER 1208169689Skan ? flags | DF_REF_MUST_CLOBBER : flags, 1209169689Skan record_live); 1210169689Skan } 1211169689Skan return; 1212169689Skan } 1213169689Skan 1214169689Skan /* Maybe, we should flag the use of STRICT_LOW_PART somehow. It might 1215169689Skan be handy for the reg allocator. */ 1216169689Skan while (GET_CODE (dst) == STRICT_LOW_PART 1217169689Skan || GET_CODE (dst) == ZERO_EXTRACT 1218169689Skan || df_read_modify_subreg_p (dst)) 1219169689Skan { 1220169689Skan#if 0 1221169689Skan /* Strict low part always contains SUBREG, but we do not want to make 1222169689Skan it appear outside, as whole register is always considered. */ 1223169689Skan if (GET_CODE (dst) == STRICT_LOW_PART) 1224169689Skan { 1225169689Skan loc = &XEXP (dst, 0); 1226169689Skan dst = *loc; 1227169689Skan } 1228169689Skan#endif 1229169689Skan loc = &XEXP (dst, 0); 1230169689Skan if (GET_CODE (dst) == STRICT_LOW_PART) 1231169689Skan dst_in_strict_lowpart = true; 1232169689Skan dst = *loc; 1233169689Skan flags |= DF_REF_READ_WRITE; 1234169689Skan 1235169689Skan } 1236169689Skan 1237169689Skan /* Sets to a subreg of a single word register are partial sets if 1238169689Skan they are wrapped in a strict lowpart, and not partial otherwise. 1239169689Skan */ 1240169689Skan if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)) 1241169689Skan && dst_in_strict_lowpart) 1242169689Skan flags |= DF_REF_PARTIAL; 1243169689Skan 1244169689Skan if (REG_P (dst) 1245169689Skan || (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))) 1246169689Skan df_ref_record (dflow, dst, loc, bb, insn, 1247169689Skan DF_REF_REG_DEF, flags, record_live); 1248169689Skan} 1249169689Skan 1250169689Skan 1251169689Skan/* Process all the registers defined in the pattern rtx, X. */ 1252169689Skan 1253169689Skanstatic void 1254169689Skandf_defs_record (struct dataflow *dflow, rtx x, basic_block bb, rtx insn) 1255169689Skan{ 1256169689Skan RTX_CODE code = GET_CODE (x); 1257169689Skan 1258169689Skan if (code == SET || code == CLOBBER) 1259169689Skan { 1260169689Skan /* Mark the single def within the pattern. */ 1261169689Skan df_def_record_1 (dflow, x, bb, insn, 1262169689Skan code == CLOBBER ? DF_REF_MUST_CLOBBER : 0, true); 1263169689Skan } 1264169689Skan else if (code == COND_EXEC) 1265169689Skan { 1266169689Skan df_defs_record (dflow, COND_EXEC_CODE (x), bb, insn); 1267169689Skan } 1268169689Skan else if (code == PARALLEL) 1269169689Skan { 1270169689Skan int i; 1271169689Skan 1272169689Skan /* Mark the multiple defs within the pattern. */ 1273169689Skan for (i = XVECLEN (x, 0) - 1; i >= 0; i--) 1274169689Skan df_defs_record (dflow, XVECEXP (x, 0, i), bb, insn); 1275169689Skan } 1276169689Skan} 1277169689Skan 1278169689Skan 1279169689Skan/* Process all the registers used in the rtx at address LOC. */ 1280169689Skan 1281169689Skanstatic void 1282169689Skandf_uses_record (struct dataflow *dflow, rtx *loc, enum df_ref_type ref_type, 1283169689Skan basic_block bb, rtx insn, enum df_ref_flags flags) 1284169689Skan{ 1285169689Skan RTX_CODE code; 1286169689Skan rtx x; 1287169689Skan retry: 1288169689Skan x = *loc; 1289169689Skan if (!x) 1290169689Skan return; 1291169689Skan code = GET_CODE (x); 1292169689Skan switch (code) 1293169689Skan { 1294169689Skan case LABEL_REF: 1295169689Skan case SYMBOL_REF: 1296169689Skan case CONST_INT: 1297169689Skan case CONST: 1298169689Skan case CONST_DOUBLE: 1299169689Skan case CONST_VECTOR: 1300169689Skan case PC: 1301169689Skan case CC0: 1302169689Skan case ADDR_VEC: 1303169689Skan case ADDR_DIFF_VEC: 1304169689Skan return; 1305169689Skan 1306169689Skan case CLOBBER: 1307169689Skan /* If we are clobbering a MEM, mark any registers inside the address 1308169689Skan as being used. */ 1309169689Skan if (MEM_P (XEXP (x, 0))) 1310169689Skan df_uses_record (dflow, &XEXP (XEXP (x, 0), 0), 1311169689Skan DF_REF_REG_MEM_STORE, bb, insn, flags); 1312169689Skan 1313169689Skan /* If we're clobbering a REG then we have a def so ignore. */ 1314169689Skan return; 1315169689Skan 1316169689Skan case MEM: 1317169689Skan df_uses_record (dflow, &XEXP (x, 0), DF_REF_REG_MEM_LOAD, bb, insn, 1318169689Skan flags & DF_REF_IN_NOTE); 1319169689Skan return; 1320169689Skan 1321169689Skan case SUBREG: 1322169689Skan /* While we're here, optimize this case. */ 1323169689Skan flags |= DF_REF_PARTIAL; 1324169689Skan /* In case the SUBREG is not of a REG, do not optimize. */ 1325169689Skan if (!REG_P (SUBREG_REG (x))) 1326169689Skan { 1327169689Skan loc = &SUBREG_REG (x); 1328169689Skan df_uses_record (dflow, loc, ref_type, bb, insn, flags); 1329169689Skan return; 1330169689Skan } 1331169689Skan /* ... Fall through ... */ 1332169689Skan 1333169689Skan case REG: 1334169689Skan df_ref_record (dflow, x, loc, bb, insn, ref_type, flags, true); 1335169689Skan return; 1336169689Skan 1337169689Skan case SET: 1338169689Skan { 1339169689Skan rtx dst = SET_DEST (x); 1340169689Skan gcc_assert (!(flags & DF_REF_IN_NOTE)); 1341169689Skan df_uses_record (dflow, &SET_SRC (x), DF_REF_REG_USE, bb, insn, flags); 1342169689Skan 1343169689Skan switch (GET_CODE (dst)) 1344169689Skan { 1345169689Skan case SUBREG: 1346169689Skan if (df_read_modify_subreg_p (dst)) 1347169689Skan { 1348169689Skan df_uses_record (dflow, &SUBREG_REG (dst), 1349169689Skan DF_REF_REG_USE, bb, 1350169689Skan insn, flags | DF_REF_READ_WRITE); 1351169689Skan break; 1352169689Skan } 1353169689Skan /* Fall through. */ 1354169689Skan case REG: 1355169689Skan case PARALLEL: 1356169689Skan case SCRATCH: 1357169689Skan case PC: 1358169689Skan case CC0: 1359169689Skan break; 1360169689Skan case MEM: 1361169689Skan df_uses_record (dflow, &XEXP (dst, 0), 1362169689Skan DF_REF_REG_MEM_STORE, 1363169689Skan bb, insn, flags); 1364169689Skan break; 1365169689Skan case STRICT_LOW_PART: 1366169689Skan { 1367169689Skan rtx *temp = &XEXP (dst, 0); 1368169689Skan /* A strict_low_part uses the whole REG and not just the 1369169689Skan SUBREG. */ 1370169689Skan dst = XEXP (dst, 0); 1371169689Skan df_uses_record (dflow, 1372169689Skan (GET_CODE (dst) == SUBREG) 1373169689Skan ? &SUBREG_REG (dst) : temp, 1374169689Skan DF_REF_REG_USE, bb, 1375169689Skan insn, DF_REF_READ_WRITE); 1376169689Skan } 1377169689Skan break; 1378169689Skan case ZERO_EXTRACT: 1379169689Skan case SIGN_EXTRACT: 1380169689Skan df_uses_record (dflow, &XEXP (dst, 0), 1381169689Skan DF_REF_REG_USE, bb, insn, 1382169689Skan DF_REF_READ_WRITE); 1383169689Skan df_uses_record (dflow, &XEXP (dst, 1), 1384169689Skan DF_REF_REG_USE, bb, insn, flags); 1385169689Skan df_uses_record (dflow, &XEXP (dst, 2), 1386169689Skan DF_REF_REG_USE, bb, insn, flags); 1387169689Skan dst = XEXP (dst, 0); 1388169689Skan break; 1389169689Skan default: 1390169689Skan gcc_unreachable (); 1391169689Skan } 1392169689Skan return; 1393169689Skan } 1394169689Skan 1395169689Skan case RETURN: 1396169689Skan break; 1397169689Skan 1398169689Skan case ASM_OPERANDS: 1399169689Skan case UNSPEC_VOLATILE: 1400169689Skan case TRAP_IF: 1401169689Skan case ASM_INPUT: 1402169689Skan { 1403169689Skan /* Traditional and volatile asm instructions must be 1404169689Skan considered to use and clobber all hard registers, all 1405169689Skan pseudo-registers and all of memory. So must TRAP_IF and 1406169689Skan UNSPEC_VOLATILE operations. 1407169689Skan 1408169689Skan Consider for instance a volatile asm that changes the fpu 1409169689Skan rounding mode. An insn should not be moved across this 1410169689Skan even if it only uses pseudo-regs because it might give an 1411169689Skan incorrectly rounded result. 1412169689Skan 1413169689Skan However, flow.c's liveness computation did *not* do this, 1414169689Skan giving the reasoning as " ?!? Unfortunately, marking all 1415169689Skan hard registers as live causes massive problems for the 1416169689Skan register allocator and marking all pseudos as live creates 1417169689Skan mountains of uninitialized variable warnings." 1418169689Skan 1419169689Skan In order to maintain the status quo with regard to liveness 1420169689Skan and uses, we do what flow.c did and just mark any regs we 1421169689Skan can find in ASM_OPERANDS as used. Later on, when liveness 1422169689Skan is computed, asm insns are scanned and regs_asm_clobbered 1423169689Skan is filled out. 1424169689Skan 1425169689Skan For all ASM_OPERANDS, we must traverse the vector of input 1426169689Skan operands. We can not just fall through here since then we 1427169689Skan would be confused by the ASM_INPUT rtx inside ASM_OPERANDS, 1428169689Skan which do not indicate traditional asms unlike their normal 1429169689Skan usage. */ 1430169689Skan if (code == ASM_OPERANDS) 1431169689Skan { 1432169689Skan int j; 1433169689Skan 1434169689Skan for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++) 1435169689Skan df_uses_record (dflow, &ASM_OPERANDS_INPUT (x, j), 1436169689Skan DF_REF_REG_USE, bb, insn, flags); 1437169689Skan return; 1438169689Skan } 1439169689Skan break; 1440169689Skan } 1441169689Skan 1442169689Skan case PRE_DEC: 1443169689Skan case POST_DEC: 1444169689Skan case PRE_INC: 1445169689Skan case POST_INC: 1446169689Skan case PRE_MODIFY: 1447169689Skan case POST_MODIFY: 1448169689Skan /* Catch the def of the register being modified. */ 1449169689Skan flags |= DF_REF_READ_WRITE; 1450169689Skan df_ref_record (dflow, XEXP (x, 0), &XEXP (x, 0), bb, insn, 1451169689Skan DF_REF_REG_DEF, flags, true); 1452169689Skan 1453169689Skan /* ... Fall through to handle uses ... */ 1454169689Skan 1455169689Skan default: 1456169689Skan break; 1457169689Skan } 1458169689Skan 1459169689Skan /* Recursively scan the operands of this expression. */ 1460169689Skan { 1461169689Skan const char *fmt = GET_RTX_FORMAT (code); 1462169689Skan int i; 1463169689Skan 1464169689Skan for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) 1465169689Skan { 1466169689Skan if (fmt[i] == 'e') 1467169689Skan { 1468169689Skan /* Tail recursive case: save a function call level. */ 1469169689Skan if (i == 0) 1470169689Skan { 1471169689Skan loc = &XEXP (x, 0); 1472169689Skan goto retry; 1473169689Skan } 1474169689Skan df_uses_record (dflow, &XEXP (x, i), ref_type, bb, insn, flags); 1475169689Skan } 1476169689Skan else if (fmt[i] == 'E') 1477169689Skan { 1478169689Skan int j; 1479169689Skan for (j = 0; j < XVECLEN (x, i); j++) 1480169689Skan df_uses_record (dflow, &XVECEXP (x, i, j), ref_type, 1481169689Skan bb, insn, flags); 1482169689Skan } 1483169689Skan } 1484169689Skan } 1485169689Skan} 1486169689Skan 1487169689Skan/* Return true if *LOC contains an asm. */ 1488169689Skan 1489169689Skanstatic int 1490169689Skandf_insn_contains_asm_1 (rtx *loc, void *data ATTRIBUTE_UNUSED) 1491169689Skan{ 1492169689Skan if ( !*loc) 1493169689Skan return 0; 1494169689Skan if (GET_CODE (*loc) == ASM_OPERANDS) 1495169689Skan return 1; 1496169689Skan return 0; 1497169689Skan} 1498169689Skan 1499169689Skan 1500169689Skan/* Return true if INSN contains an ASM. */ 1501169689Skan 1502169689Skanstatic int 1503169689Skandf_insn_contains_asm (rtx insn) 1504169689Skan{ 1505169689Skan return for_each_rtx (&insn, df_insn_contains_asm_1, NULL); 1506169689Skan} 1507169689Skan 1508169689Skan 1509169689Skan 1510169689Skan/* Record all the refs for DF within INSN of basic block BB. */ 1511169689Skan 1512169689Skanstatic void 1513169689Skandf_insn_refs_record (struct dataflow *dflow, basic_block bb, rtx insn) 1514169689Skan{ 1515169689Skan struct df *df = dflow->df; 1516169689Skan int i; 1517169689Skan 1518169689Skan if (INSN_P (insn)) 1519169689Skan { 1520169689Skan rtx note; 1521169689Skan 1522169689Skan if (df_insn_contains_asm (insn)) 1523169689Skan DF_INSN_CONTAINS_ASM (df, insn) = true; 1524169689Skan 1525169689Skan /* Record register defs. */ 1526169689Skan df_defs_record (dflow, PATTERN (insn), bb, insn); 1527169689Skan 1528169689Skan if (dflow->flags & DF_EQUIV_NOTES) 1529169689Skan for (note = REG_NOTES (insn); note; 1530169689Skan note = XEXP (note, 1)) 1531169689Skan { 1532169689Skan switch (REG_NOTE_KIND (note)) 1533169689Skan { 1534169689Skan case REG_EQUIV: 1535169689Skan case REG_EQUAL: 1536169689Skan df_uses_record (dflow, &XEXP (note, 0), DF_REF_REG_USE, 1537169689Skan bb, insn, DF_REF_IN_NOTE); 1538169689Skan default: 1539169689Skan break; 1540169689Skan } 1541169689Skan } 1542169689Skan 1543169689Skan if (CALL_P (insn)) 1544169689Skan { 1545169689Skan rtx note; 1546169689Skan 1547169689Skan /* Record the registers used to pass arguments, and explicitly 1548169689Skan noted as clobbered. */ 1549169689Skan for (note = CALL_INSN_FUNCTION_USAGE (insn); note; 1550169689Skan note = XEXP (note, 1)) 1551169689Skan { 1552169689Skan if (GET_CODE (XEXP (note, 0)) == USE) 1553169689Skan df_uses_record (dflow, &XEXP (XEXP (note, 0), 0), 1554169689Skan DF_REF_REG_USE, 1555169689Skan bb, insn, 0); 1556169689Skan else if (GET_CODE (XEXP (note, 0)) == CLOBBER) 1557169689Skan { 1558169689Skan df_defs_record (dflow, XEXP (note, 0), bb, insn); 1559169689Skan if (REG_P (XEXP (XEXP (note, 0), 0))) 1560169689Skan { 1561169689Skan rtx reg = XEXP (XEXP (note, 0), 0); 1562169689Skan int regno_last; 1563169689Skan int regno_first; 1564169689Skan int i; 1565169689Skan 1566169689Skan regno_last = regno_first = REGNO (reg); 1567169689Skan if (regno_first < FIRST_PSEUDO_REGISTER) 1568169689Skan regno_last 1569169689Skan += hard_regno_nregs[regno_first][GET_MODE (reg)] - 1; 1570169689Skan for (i = regno_first; i <= regno_last; i++) 1571169689Skan regs_ever_live[i] = 1; 1572169689Skan } 1573169689Skan } 1574169689Skan } 1575169689Skan 1576169689Skan /* The stack ptr is used (honorarily) by a CALL insn. */ 1577169689Skan df_uses_record (dflow, ®no_reg_rtx[STACK_POINTER_REGNUM], 1578169689Skan DF_REF_REG_USE, bb, insn, 1579169689Skan 0); 1580169689Skan 1581169689Skan if (dflow->flags & DF_HARD_REGS) 1582169689Skan { 1583169689Skan bitmap_iterator bi; 1584169689Skan unsigned int ui; 1585169689Skan /* Calls may also reference any of the global registers, 1586169689Skan so they are recorded as used. */ 1587169689Skan for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1588169689Skan if (global_regs[i]) 1589169689Skan df_uses_record (dflow, ®no_reg_rtx[i], 1590169689Skan DF_REF_REG_USE, bb, insn, 1591169689Skan 0); 1592169689Skan EXECUTE_IF_SET_IN_BITMAP (df_invalidated_by_call, 0, ui, bi) 1593169689Skan df_ref_record (dflow, regno_reg_rtx[ui], ®no_reg_rtx[ui], bb, 1594169689Skan insn, DF_REF_REG_DEF, DF_REF_MAY_CLOBBER, false); 1595169689Skan } 1596169689Skan } 1597169689Skan 1598169689Skan /* Record the register uses. */ 1599169689Skan df_uses_record (dflow, &PATTERN (insn), 1600169689Skan DF_REF_REG_USE, bb, insn, 0); 1601169689Skan 1602169689Skan } 1603169689Skan} 1604169689Skan 1605169689Skanstatic bool 1606169689Skandf_has_eh_preds (basic_block bb) 1607169689Skan{ 1608169689Skan edge e; 1609169689Skan edge_iterator ei; 1610169689Skan 1611169689Skan FOR_EACH_EDGE (e, ei, bb->preds) 1612169689Skan { 1613169689Skan if (e->flags & EDGE_EH) 1614169689Skan return true; 1615169689Skan } 1616169689Skan return false; 1617169689Skan} 1618169689Skan 1619169689Skan/* Record all the refs within the basic block BB. */ 1620169689Skan 1621169689Skanstatic void 1622169689Skandf_bb_refs_record (struct dataflow *dflow, basic_block bb) 1623169689Skan{ 1624169689Skan struct df *df = dflow->df; 1625169689Skan rtx insn; 1626169689Skan int luid = 0; 1627169689Skan struct df_scan_bb_info *bb_info = df_scan_get_bb_info (dflow, bb->index); 1628169689Skan bitmap artificial_uses_at_bottom = NULL; 1629169689Skan 1630169689Skan if (dflow->flags & DF_HARD_REGS) 1631169689Skan artificial_uses_at_bottom = BITMAP_ALLOC (NULL); 1632169689Skan 1633169689Skan /* Need to make sure that there is a record in the basic block info. */ 1634169689Skan if (!bb_info) 1635169689Skan { 1636169689Skan bb_info = (struct df_scan_bb_info *) pool_alloc (dflow->block_pool); 1637169689Skan df_scan_set_bb_info (dflow, bb->index, bb_info); 1638169689Skan bb_info->artificial_defs = NULL; 1639169689Skan bb_info->artificial_uses = NULL; 1640169689Skan } 1641169689Skan 1642169689Skan /* Scan the block an insn at a time from beginning to end. */ 1643169689Skan FOR_BB_INSNS (bb, insn) 1644169689Skan { 1645169689Skan df_insn_create_insn_record (dflow, insn); 1646169689Skan if (INSN_P (insn)) 1647169689Skan { 1648169689Skan /* Record defs within INSN. */ 1649169689Skan DF_INSN_LUID (df, insn) = luid++; 1650169689Skan df_insn_refs_record (dflow, bb, insn); 1651169689Skan } 1652169689Skan DF_INSN_LUID (df, insn) = luid; 1653169689Skan } 1654169689Skan 1655169689Skan#ifdef EH_RETURN_DATA_REGNO 1656169689Skan if ((dflow->flags & DF_HARD_REGS) 1657169689Skan && df_has_eh_preds (bb)) 1658169689Skan { 1659169689Skan unsigned int i; 1660169689Skan /* Mark the registers that will contain data for the handler. */ 1661169689Skan for (i = 0; ; ++i) 1662169689Skan { 1663169689Skan unsigned regno = EH_RETURN_DATA_REGNO (i); 1664169689Skan if (regno == INVALID_REGNUM) 1665169689Skan break; 1666169689Skan df_ref_record (dflow, regno_reg_rtx[regno], ®no_reg_rtx[regno], 1667169689Skan bb, NULL, 1668169689Skan DF_REF_REG_DEF, DF_REF_ARTIFICIAL | DF_REF_AT_TOP, 1669169689Skan false); 1670169689Skan } 1671169689Skan } 1672169689Skan#endif 1673169689Skan 1674169689Skan 1675169689Skan if ((dflow->flags & DF_HARD_REGS) 1676169689Skan && df_has_eh_preds (bb)) 1677169689Skan { 1678169689Skan#ifdef EH_USES 1679169689Skan unsigned int i; 1680169689Skan /* This code is putting in a artificial ref for the use at the 1681169689Skan TOP of the block that receives the exception. It is too 1682169689Skan cumbersome to actually put the ref on the edge. We could 1683169689Skan either model this at the top of the receiver block or the 1684169689Skan bottom of the sender block. 1685169689Skan 1686169689Skan The bottom of the sender block is problematic because not all 1687169689Skan out-edges of the a block are eh-edges. However, it is true 1688169689Skan that all edges into a block are either eh-edges or none of 1689169689Skan them are eh-edges. Thus, we can model this at the top of the 1690169689Skan eh-receiver for all of the edges at once. */ 1691169689Skan for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1692169689Skan if (EH_USES (i)) 1693169689Skan df_uses_record (dflow, ®no_reg_rtx[i], 1694169689Skan DF_REF_REG_USE, bb, NULL, 1695169689Skan DF_REF_ARTIFICIAL | DF_REF_AT_TOP); 1696169689Skan#endif 1697169689Skan 1698169689Skan /* The following code (down thru the arg_pointer setting APPEARS 1699169689Skan to be necessary because there is nothing that actually 1700169689Skan describes what the exception handling code may actually need 1701169689Skan to keep alive. */ 1702169689Skan if (reload_completed) 1703169689Skan { 1704169689Skan if (frame_pointer_needed) 1705169689Skan { 1706169689Skan bitmap_set_bit (artificial_uses_at_bottom, FRAME_POINTER_REGNUM); 1707169689Skan#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM 1708169689Skan bitmap_set_bit (artificial_uses_at_bottom, HARD_FRAME_POINTER_REGNUM); 1709169689Skan#endif 1710169689Skan } 1711169689Skan#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 1712169689Skan if (fixed_regs[ARG_POINTER_REGNUM]) 1713169689Skan bitmap_set_bit (artificial_uses_at_bottom, ARG_POINTER_REGNUM); 1714169689Skan#endif 1715169689Skan } 1716169689Skan } 1717169689Skan 1718169689Skan if ((dflow->flags & DF_HARD_REGS) 1719169689Skan && bb->index >= NUM_FIXED_BLOCKS) 1720169689Skan { 1721169689Skan /* Before reload, there are a few registers that must be forced 1722169689Skan live everywhere -- which might not already be the case for 1723169689Skan blocks within infinite loops. */ 1724169689Skan if (!reload_completed) 1725169689Skan { 1726169689Skan 1727169689Skan /* Any reference to any pseudo before reload is a potential 1728169689Skan reference of the frame pointer. */ 1729169689Skan bitmap_set_bit (artificial_uses_at_bottom, FRAME_POINTER_REGNUM); 1730169689Skan 1731169689Skan#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 1732169689Skan /* Pseudos with argument area equivalences may require 1733169689Skan reloading via the argument pointer. */ 1734169689Skan if (fixed_regs[ARG_POINTER_REGNUM]) 1735169689Skan bitmap_set_bit (artificial_uses_at_bottom, ARG_POINTER_REGNUM); 1736169689Skan#endif 1737169689Skan 1738169689Skan /* Any constant, or pseudo with constant equivalences, may 1739169689Skan require reloading from memory using the pic register. */ 1740169689Skan if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM 1741169689Skan && fixed_regs[PIC_OFFSET_TABLE_REGNUM]) 1742169689Skan bitmap_set_bit (artificial_uses_at_bottom, PIC_OFFSET_TABLE_REGNUM); 1743169689Skan } 1744169689Skan /* The all-important stack pointer must always be live. */ 1745169689Skan bitmap_set_bit (artificial_uses_at_bottom, STACK_POINTER_REGNUM); 1746169689Skan } 1747169689Skan 1748169689Skan if (dflow->flags & DF_HARD_REGS) 1749169689Skan { 1750169689Skan bitmap_iterator bi; 1751169689Skan unsigned int regno; 1752169689Skan 1753169689Skan EXECUTE_IF_SET_IN_BITMAP (artificial_uses_at_bottom, 0, regno, bi) 1754169689Skan { 1755169689Skan df_uses_record (dflow, ®no_reg_rtx[regno], 1756169689Skan DF_REF_REG_USE, bb, NULL, DF_REF_ARTIFICIAL); 1757169689Skan } 1758169689Skan 1759169689Skan BITMAP_FREE (artificial_uses_at_bottom); 1760169689Skan } 1761169689Skan} 1762169689Skan 1763169689Skan 1764169689Skan/* Record all the refs in the basic blocks specified by BLOCKS. */ 1765169689Skan 1766169689Skanstatic void 1767169689Skandf_refs_record (struct dataflow *dflow, bitmap blocks) 1768169689Skan{ 1769169689Skan unsigned int bb_index; 1770169689Skan bitmap_iterator bi; 1771169689Skan 1772169689Skan EXECUTE_IF_SET_IN_BITMAP (blocks, 0, bb_index, bi) 1773169689Skan { 1774169689Skan basic_block bb = BASIC_BLOCK (bb_index); 1775169689Skan df_bb_refs_record (dflow, bb); 1776169689Skan } 1777169689Skan 1778169689Skan if (bitmap_bit_p (blocks, EXIT_BLOCK)) 1779169689Skan df_record_exit_block_uses (dflow); 1780169689Skan 1781169689Skan if (bitmap_bit_p (blocks, ENTRY_BLOCK)) 1782169689Skan df_record_entry_block_defs (dflow); 1783169689Skan} 1784169689Skan 1785169689Skan 1786169689Skan/*---------------------------------------------------------------------------- 1787169689Skan Specialized hard register scanning functions. 1788169689Skan----------------------------------------------------------------------------*/ 1789169689Skan 1790169689Skan/* Mark a register in SET. Hard registers in large modes get all 1791169689Skan of their component registers set as well. */ 1792169689Skan 1793169689Skanstatic void 1794169689Skandf_mark_reg (rtx reg, void *vset) 1795169689Skan{ 1796169689Skan bitmap set = (bitmap) vset; 1797169689Skan int regno = REGNO (reg); 1798169689Skan 1799169689Skan gcc_assert (GET_MODE (reg) != BLKmode); 1800169689Skan 1801169689Skan bitmap_set_bit (set, regno); 1802169689Skan if (regno < FIRST_PSEUDO_REGISTER) 1803169689Skan { 1804169689Skan int n = hard_regno_nregs[regno][GET_MODE (reg)]; 1805169689Skan while (--n > 0) 1806169689Skan bitmap_set_bit (set, regno + n); 1807169689Skan } 1808169689Skan} 1809169689Skan 1810169689Skan 1811169689Skan/* Record the (conservative) set of hard registers that are defined on 1812169689Skan entry to the function. */ 1813169689Skan 1814169689Skanstatic void 1815169689Skandf_record_entry_block_defs (struct dataflow *dflow) 1816169689Skan{ 1817169689Skan unsigned int i; 1818169689Skan bitmap_iterator bi; 1819169689Skan rtx r; 1820169689Skan struct df *df = dflow->df; 1821169689Skan 1822169689Skan bitmap_clear (df->entry_block_defs); 1823169689Skan 1824169689Skan if (!(dflow->flags & DF_HARD_REGS)) 1825169689Skan return; 1826169689Skan 1827169689Skan for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1828169689Skan { 1829169689Skan if (FUNCTION_ARG_REGNO_P (i)) 1830169689Skan#ifdef INCOMING_REGNO 1831169689Skan bitmap_set_bit (df->entry_block_defs, INCOMING_REGNO (i)); 1832169689Skan#else 1833169689Skan bitmap_set_bit (df->entry_block_defs, i); 1834169689Skan#endif 1835169689Skan } 1836169689Skan 1837169689Skan /* Once the prologue has been generated, all of these registers 1838169689Skan should just show up in the first regular block. */ 1839169689Skan if (HAVE_prologue && epilogue_completed) 1840169689Skan { 1841169689Skan /* Defs for the callee saved registers are inserted so that the 1842169689Skan pushes have some defining location. */ 1843169689Skan for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1844169689Skan if ((call_used_regs[i] == 0) && (regs_ever_live[i])) 1845169689Skan bitmap_set_bit (df->entry_block_defs, i); 1846169689Skan } 1847169689Skan else 1848169689Skan { 1849169689Skan /* The always important stack pointer. */ 1850169689Skan bitmap_set_bit (df->entry_block_defs, STACK_POINTER_REGNUM); 1851169689Skan 1852169689Skan#ifdef INCOMING_RETURN_ADDR_RTX 1853169689Skan if (REG_P (INCOMING_RETURN_ADDR_RTX)) 1854169689Skan bitmap_set_bit (df->entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX)); 1855169689Skan#endif 1856169689Skan 1857169689Skan /* If STATIC_CHAIN_INCOMING_REGNUM == STATIC_CHAIN_REGNUM 1858169689Skan only STATIC_CHAIN_REGNUM is defined. If they are different, 1859169689Skan we only care about the STATIC_CHAIN_INCOMING_REGNUM. */ 1860169689Skan#ifdef STATIC_CHAIN_INCOMING_REGNUM 1861169689Skan bitmap_set_bit (df->entry_block_defs, STATIC_CHAIN_INCOMING_REGNUM); 1862169689Skan#else 1863169689Skan#ifdef STATIC_CHAIN_REGNUM 1864169689Skan bitmap_set_bit (df->entry_block_defs, STATIC_CHAIN_REGNUM); 1865169689Skan#endif 1866169689Skan#endif 1867169689Skan 1868169689Skan r = TARGET_STRUCT_VALUE_RTX (current_function_decl, true); 1869169689Skan if (r && REG_P (r)) 1870169689Skan bitmap_set_bit (df->entry_block_defs, REGNO (r)); 1871169689Skan } 1872169689Skan 1873169689Skan if ((!reload_completed) || frame_pointer_needed) 1874169689Skan { 1875169689Skan /* Any reference to any pseudo before reload is a potential 1876169689Skan reference of the frame pointer. */ 1877169689Skan bitmap_set_bit (df->entry_block_defs, FRAME_POINTER_REGNUM); 1878169689Skan#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM 1879169689Skan /* If they are different, also mark the hard frame pointer as live. */ 1880169689Skan if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM)) 1881169689Skan bitmap_set_bit (df->entry_block_defs, HARD_FRAME_POINTER_REGNUM); 1882169689Skan#endif 1883169689Skan } 1884169689Skan 1885169689Skan /* These registers are live everywhere. */ 1886169689Skan if (!reload_completed) 1887169689Skan { 1888169689Skan#ifdef EH_USES 1889169689Skan /* The ia-64, the only machine that uses this, does not define these 1890169689Skan until after reload. */ 1891169689Skan for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1892169689Skan if (EH_USES (i)) 1893169689Skan { 1894169689Skan bitmap_set_bit (df->entry_block_defs, i); 1895169689Skan } 1896169689Skan#endif 1897169689Skan 1898169689Skan#if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM 1899169689Skan /* Pseudos with argument area equivalences may require 1900169689Skan reloading via the argument pointer. */ 1901169689Skan if (fixed_regs[ARG_POINTER_REGNUM]) 1902169689Skan bitmap_set_bit (df->entry_block_defs, ARG_POINTER_REGNUM); 1903169689Skan#endif 1904169689Skan 1905169689Skan#ifdef PIC_OFFSET_TABLE_REGNUM 1906169689Skan /* Any constant, or pseudo with constant equivalences, may 1907169689Skan require reloading from memory using the pic register. */ 1908169689Skan if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM 1909169689Skan && fixed_regs[PIC_OFFSET_TABLE_REGNUM]) 1910169689Skan bitmap_set_bit (df->entry_block_defs, PIC_OFFSET_TABLE_REGNUM); 1911169689Skan#endif 1912169689Skan } 1913169689Skan 1914169689Skan targetm.live_on_entry (df->entry_block_defs); 1915169689Skan 1916169689Skan EXECUTE_IF_SET_IN_BITMAP (df->entry_block_defs, 0, i, bi) 1917169689Skan { 1918169689Skan df_ref_record (dflow, regno_reg_rtx[i], ®no_reg_rtx[i], 1919169689Skan ENTRY_BLOCK_PTR, NULL, 1920169689Skan DF_REF_REG_DEF, DF_REF_ARTIFICIAL , false); 1921169689Skan } 1922169689Skan} 1923169689Skan 1924169689Skan 1925169689Skan/* Record the set of hard registers that are used in the exit block. */ 1926169689Skan 1927169689Skanstatic void 1928169689Skandf_record_exit_block_uses (struct dataflow *dflow) 1929169689Skan{ 1930169689Skan unsigned int i; 1931169689Skan bitmap_iterator bi; 1932169689Skan struct df *df = dflow->df; 1933169689Skan 1934169689Skan bitmap_clear (df->exit_block_uses); 1935169689Skan 1936169689Skan if (!(dflow->flags & DF_HARD_REGS)) 1937169689Skan return; 1938169689Skan 1939169689Skan /* If exiting needs the right stack value, consider the stack 1940169689Skan pointer live at the end of the function. */ 1941169689Skan if ((HAVE_epilogue && epilogue_completed) 1942169689Skan || !EXIT_IGNORE_STACK 1943169689Skan || (!FRAME_POINTER_REQUIRED 1944169689Skan && !current_function_calls_alloca 1945169689Skan && flag_omit_frame_pointer) 1946169689Skan || current_function_sp_is_unchanging) 1947169689Skan { 1948169689Skan bitmap_set_bit (df->exit_block_uses, STACK_POINTER_REGNUM); 1949169689Skan } 1950169689Skan 1951169689Skan /* Mark the frame pointer if needed at the end of the function. 1952169689Skan If we end up eliminating it, it will be removed from the live 1953169689Skan list of each basic block by reload. */ 1954169689Skan 1955169689Skan if ((!reload_completed) || frame_pointer_needed) 1956169689Skan { 1957169689Skan bitmap_set_bit (df->exit_block_uses, FRAME_POINTER_REGNUM); 1958169689Skan#if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM 1959169689Skan /* If they are different, also mark the hard frame pointer as live. */ 1960169689Skan if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM)) 1961169689Skan bitmap_set_bit (df->exit_block_uses, HARD_FRAME_POINTER_REGNUM); 1962169689Skan#endif 1963169689Skan } 1964169689Skan 1965169689Skan#ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED 1966169689Skan /* Many architectures have a GP register even without flag_pic. 1967169689Skan Assume the pic register is not in use, or will be handled by 1968169689Skan other means, if it is not fixed. */ 1969169689Skan if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM 1970169689Skan && fixed_regs[PIC_OFFSET_TABLE_REGNUM]) 1971169689Skan bitmap_set_bit (df->exit_block_uses, PIC_OFFSET_TABLE_REGNUM); 1972169689Skan#endif 1973169689Skan 1974169689Skan /* Mark all global registers, and all registers used by the 1975169689Skan epilogue as being live at the end of the function since they 1976169689Skan may be referenced by our caller. */ 1977169689Skan for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1978169689Skan if (global_regs[i] || EPILOGUE_USES (i)) 1979169689Skan bitmap_set_bit (df->exit_block_uses, i); 1980169689Skan 1981169689Skan if (HAVE_epilogue && epilogue_completed) 1982169689Skan { 1983169689Skan /* Mark all call-saved registers that we actually used. */ 1984169689Skan for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) 1985169689Skan if (regs_ever_live[i] && !LOCAL_REGNO (i) 1986169689Skan && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i)) 1987169689Skan bitmap_set_bit (df->exit_block_uses, i); 1988169689Skan } 1989169689Skan 1990169689Skan#ifdef EH_RETURN_DATA_REGNO 1991169689Skan /* Mark the registers that will contain data for the handler. */ 1992169689Skan if (reload_completed && current_function_calls_eh_return) 1993169689Skan for (i = 0; ; ++i) 1994169689Skan { 1995169689Skan unsigned regno = EH_RETURN_DATA_REGNO (i); 1996169689Skan if (regno == INVALID_REGNUM) 1997169689Skan break; 1998169689Skan bitmap_set_bit (df->exit_block_uses, regno); 1999169689Skan } 2000169689Skan#endif 2001169689Skan 2002169689Skan#ifdef EH_RETURN_STACKADJ_RTX 2003169689Skan if ((!HAVE_epilogue || ! epilogue_completed) 2004169689Skan && current_function_calls_eh_return) 2005169689Skan { 2006169689Skan rtx tmp = EH_RETURN_STACKADJ_RTX; 2007169689Skan if (tmp && REG_P (tmp)) 2008169689Skan df_mark_reg (tmp, df->exit_block_uses); 2009169689Skan } 2010169689Skan#endif 2011169689Skan 2012169689Skan#ifdef EH_RETURN_HANDLER_RTX 2013169689Skan if ((!HAVE_epilogue || ! epilogue_completed) 2014169689Skan && current_function_calls_eh_return) 2015169689Skan { 2016169689Skan rtx tmp = EH_RETURN_HANDLER_RTX; 2017169689Skan if (tmp && REG_P (tmp)) 2018169689Skan df_mark_reg (tmp, df->exit_block_uses); 2019169689Skan } 2020169689Skan#endif 2021169689Skan 2022169689Skan /* Mark function return value. */ 2023169689Skan diddle_return_value (df_mark_reg, (void*) df->exit_block_uses); 2024169689Skan 2025169689Skan if (dflow->flags & DF_HARD_REGS) 2026169689Skan EXECUTE_IF_SET_IN_BITMAP (df->exit_block_uses, 0, i, bi) 2027169689Skan df_uses_record (dflow, ®no_reg_rtx[i], 2028169689Skan DF_REF_REG_USE, EXIT_BLOCK_PTR, NULL, 2029169689Skan DF_REF_ARTIFICIAL); 2030169689Skan} 2031169689Skan 2032169689Skanstatic bool initialized = false; 2033169689Skan 2034169689Skan/* Initialize some platform specific structures. */ 2035169689Skan 2036169689Skanvoid 2037169689Skandf_hard_reg_init (void) 2038169689Skan{ 2039169689Skan int i; 2040169689Skan#ifdef ELIMINABLE_REGS 2041169689Skan static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS; 2042169689Skan#endif 2043169689Skan /* After reload, some ports add certain bits to regs_ever_live so 2044169689Skan this cannot be reset. */ 2045169689Skan 2046169689Skan if (!reload_completed) 2047169689Skan memset (regs_ever_live, 0, sizeof (regs_ever_live)); 2048169689Skan 2049169689Skan if (initialized) 2050169689Skan return; 2051169689Skan 2052169689Skan bitmap_obstack_initialize (&persistent_obstack); 2053169689Skan 2054169689Skan /* Record which registers will be eliminated. We use this in 2055169689Skan mark_used_regs. */ 2056169689Skan CLEAR_HARD_REG_SET (elim_reg_set); 2057169689Skan 2058169689Skan#ifdef ELIMINABLE_REGS 2059169689Skan for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++) 2060169689Skan SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from); 2061169689Skan#else 2062169689Skan SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM); 2063169689Skan#endif 2064169689Skan 2065169689Skan df_invalidated_by_call = BITMAP_ALLOC (&persistent_obstack); 2066169689Skan 2067169689Skan /* Inconveniently, this is only readily available in hard reg set 2068169689Skan form. */ 2069169689Skan for (i = 0; i < FIRST_PSEUDO_REGISTER; ++i) 2070169689Skan if (TEST_HARD_REG_BIT (regs_invalidated_by_call, i)) 2071169689Skan bitmap_set_bit (df_invalidated_by_call, i); 2072169689Skan 2073169689Skan initialized = true; 2074169689Skan} 2075