tree-ssa-alias.c revision 259405
1/* Alias analysis for trees. 2 Copyright (C) 2004, 2005 Free Software Foundation, Inc. 3 Contributed by Diego Novillo <dnovillo@redhat.com> 4 5This file is part of GCC. 6 7GCC is free software; you can redistribute it and/or modify 8it under the terms of the GNU General Public License as published by 9the Free Software Foundation; either version 2, or (at your option) 10any later version. 11 12GCC is distributed in the hope that it will be useful, 13but WITHOUT ANY WARRANTY; without even the implied warranty of 14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 15GNU General Public License for more details. 16 17You should have received a copy of the GNU General Public License 18along with GCC; see the file COPYING. If not, write to 19the Free Software Foundation, 51 Franklin Street, Fifth Floor, 20Boston, MA 02110-1301, USA. */ 21 22#include "config.h" 23#include "system.h" 24#include "coretypes.h" 25#include "tm.h" 26#include "tree.h" 27#include "rtl.h" 28#include "tm_p.h" 29#include "hard-reg-set.h" 30#include "basic-block.h" 31#include "timevar.h" 32#include "expr.h" 33#include "ggc.h" 34#include "langhooks.h" 35#include "flags.h" 36#include "function.h" 37#include "diagnostic.h" 38#include "tree-dump.h" 39#include "tree-gimple.h" 40#include "tree-flow.h" 41#include "tree-inline.h" 42#include "tree-pass.h" 43#include "tree-ssa-structalias.h" 44#include "convert.h" 45#include "params.h" 46#include "ipa-type-escape.h" 47#include "vec.h" 48#include "bitmap.h" 49#include "vecprim.h" 50#include "pointer-set.h" 51 52/* Obstack used to hold grouping bitmaps and other temporary bitmaps used by 53 aliasing */ 54static bitmap_obstack alias_obstack; 55 56/* 'true' after aliases have been computed (see compute_may_aliases). */ 57bool aliases_computed_p; 58 59/* Structure to map a variable to its alias set and keep track of the 60 virtual operands that will be needed to represent it. */ 61struct alias_map_d 62{ 63 /* Variable and its alias set. */ 64 tree var; 65 HOST_WIDE_INT set; 66 67 /* Total number of virtual operands that will be needed to represent 68 all the aliases of VAR. */ 69 long total_alias_vops; 70 71 /* Nonzero if the aliases for this memory tag have been grouped 72 already. Used in group_aliases. */ 73 unsigned int grouped_p : 1; 74 75 /* Set of variables aliased with VAR. This is the exact same 76 information contained in VAR_ANN (VAR)->MAY_ALIASES, but in 77 bitmap form to speed up alias grouping. */ 78 bitmap may_aliases; 79}; 80 81 82/* Counters used to display statistics on alias analysis. */ 83struct alias_stats_d 84{ 85 unsigned int alias_queries; 86 unsigned int alias_mayalias; 87 unsigned int alias_noalias; 88 unsigned int simple_queries; 89 unsigned int simple_resolved; 90 unsigned int tbaa_queries; 91 unsigned int tbaa_resolved; 92 unsigned int structnoaddress_queries; 93 unsigned int structnoaddress_resolved; 94}; 95 96 97/* Local variables. */ 98static struct alias_stats_d alias_stats; 99 100/* Local functions. */ 101static void compute_flow_insensitive_aliasing (struct alias_info *); 102static void finalize_ref_all_pointers (struct alias_info *); 103static void dump_alias_stats (FILE *); 104static bool may_alias_p (tree, HOST_WIDE_INT, tree, HOST_WIDE_INT, bool); 105static tree create_memory_tag (tree type, bool is_type_tag); 106static tree get_tmt_for (tree, struct alias_info *); 107static tree get_nmt_for (tree); 108static void add_may_alias (tree, tree); 109static void replace_may_alias (tree, size_t, tree); 110static struct alias_info *init_alias_info (void); 111static void delete_alias_info (struct alias_info *); 112static void compute_flow_sensitive_aliasing (struct alias_info *); 113static void setup_pointers_and_addressables (struct alias_info *); 114static void create_global_var (void); 115static void maybe_create_global_var (struct alias_info *ai); 116static void group_aliases (struct alias_info *); 117static void set_pt_anything (tree ptr); 118 119/* Global declarations. */ 120 121/* Call clobbered variables in the function. If bit I is set, then 122 REFERENCED_VARS (I) is call-clobbered. */ 123bitmap call_clobbered_vars; 124 125/* Addressable variables in the function. If bit I is set, then 126 REFERENCED_VARS (I) has had its address taken. Note that 127 CALL_CLOBBERED_VARS and ADDRESSABLE_VARS are not related. An 128 addressable variable is not necessarily call-clobbered (e.g., a 129 local addressable whose address does not escape) and not all 130 call-clobbered variables are addressable (e.g., a local static 131 variable). */ 132bitmap addressable_vars; 133 134/* When the program has too many call-clobbered variables and call-sites, 135 this variable is used to represent the clobbering effects of function 136 calls. In these cases, all the call clobbered variables in the program 137 are forced to alias this variable. This reduces compile times by not 138 having to keep track of too many V_MAY_DEF expressions at call sites. */ 139tree global_var; 140 141/* qsort comparison function to sort type/name tags by DECL_UID. */ 142 143static int 144sort_tags_by_id (const void *pa, const void *pb) 145{ 146 tree a = *(tree *)pa; 147 tree b = *(tree *)pb; 148 149 return DECL_UID (a) - DECL_UID (b); 150} 151 152/* Initialize WORKLIST to contain those memory tags that are marked call 153 clobbered. Initialized WORKLIST2 to contain the reasons these 154 memory tags escaped. */ 155 156static void 157init_transitive_clobber_worklist (VEC (tree, heap) **worklist, 158 VEC (int, heap) **worklist2) 159{ 160 referenced_var_iterator rvi; 161 tree curr; 162 163 FOR_EACH_REFERENCED_VAR (curr, rvi) 164 { 165 if (MTAG_P (curr) && is_call_clobbered (curr)) 166 { 167 VEC_safe_push (tree, heap, *worklist, curr); 168 VEC_safe_push (int, heap, *worklist2, var_ann (curr)->escape_mask); 169 } 170 } 171} 172 173/* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if 174 ALIAS is not already marked call clobbered, and is a memory 175 tag. */ 176 177static void 178add_to_worklist (tree alias, VEC (tree, heap) **worklist, 179 VEC (int, heap) **worklist2, 180 int reason) 181{ 182 if (MTAG_P (alias) && !is_call_clobbered (alias)) 183 { 184 VEC_safe_push (tree, heap, *worklist, alias); 185 VEC_safe_push (int, heap, *worklist2, reason); 186 } 187} 188 189/* Mark aliases of TAG as call clobbered, and place any tags on the 190 alias list that were not already call clobbered on WORKLIST. */ 191 192static void 193mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist, 194 VEC (int, heap) **worklist2) 195{ 196 unsigned int i; 197 VEC (tree, gc) *ma; 198 tree entry; 199 var_ann_t ta = var_ann (tag); 200 201 if (!MTAG_P (tag)) 202 return; 203 ma = may_aliases (tag); 204 if (!ma) 205 return; 206 207 for (i = 0; VEC_iterate (tree, ma, i, entry); i++) 208 { 209 if (!unmodifiable_var_p (entry)) 210 { 211 add_to_worklist (entry, worklist, worklist2, ta->escape_mask); 212 mark_call_clobbered (entry, ta->escape_mask); 213 } 214 } 215} 216 217/* Tags containing global vars need to be marked as global. 218 Tags containing call clobbered vars need to be marked as call 219 clobbered. */ 220 221static void 222compute_tag_properties (void) 223{ 224 referenced_var_iterator rvi; 225 tree tag; 226 bool changed = true; 227 VEC (tree, heap) *taglist = NULL; 228 229 FOR_EACH_REFERENCED_VAR (tag, rvi) 230 { 231 if (!MTAG_P (tag) || TREE_CODE (tag) == STRUCT_FIELD_TAG) 232 continue; 233 VEC_safe_push (tree, heap, taglist, tag); 234 } 235 236 /* We sort the taglist by DECL_UID, for two reasons. 237 1. To get a sequential ordering to make the bitmap accesses 238 faster. 239 2. Because of the way we compute aliases, it's more likely that 240 an earlier tag is included in a later tag, and this will reduce 241 the number of iterations. 242 243 If we had a real tag graph, we would just topo-order it and be 244 done with it. */ 245 qsort (VEC_address (tree, taglist), 246 VEC_length (tree, taglist), 247 sizeof (tree), 248 sort_tags_by_id); 249 250 /* Go through each tag not marked as global, and if it aliases 251 global vars, mark it global. 252 253 If the tag contains call clobbered vars, mark it call 254 clobbered. 255 256 This loop iterates because tags may appear in the may-aliases 257 list of other tags when we group. */ 258 259 while (changed) 260 { 261 unsigned int k; 262 263 changed = false; 264 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++) 265 { 266 VEC (tree, gc) *ma; 267 unsigned int i; 268 tree entry; 269 bool tagcc = is_call_clobbered (tag); 270 bool tagglobal = MTAG_GLOBAL (tag); 271 272 if (tagcc && tagglobal) 273 continue; 274 275 ma = may_aliases (tag); 276 if (!ma) 277 continue; 278 279 for (i = 0; VEC_iterate (tree, ma, i, entry); i++) 280 { 281 /* Call clobbered entries cause the tag to be marked 282 call clobbered. */ 283 if (!tagcc && is_call_clobbered (entry)) 284 { 285 mark_call_clobbered (tag, var_ann (entry)->escape_mask); 286 tagcc = true; 287 changed = true; 288 } 289 290 /* Global vars cause the tag to be marked global. */ 291 if (!tagglobal && is_global_var (entry)) 292 { 293 MTAG_GLOBAL (tag) = true; 294 changed = true; 295 tagglobal = true; 296 } 297 298 /* Early exit once both global and cc are set, since the 299 loop can't do any more than that. */ 300 if (tagcc && tagglobal) 301 break; 302 } 303 } 304 } 305 VEC_free (tree, heap, taglist); 306} 307 308/* Set up the initial variable clobbers and globalness. 309 When this function completes, only tags whose aliases need to be 310 clobbered will be set clobbered. Tags clobbered because they 311 contain call clobbered vars are handled in compute_tag_properties. */ 312 313static void 314set_initial_properties (struct alias_info *ai) 315{ 316 unsigned int i; 317 referenced_var_iterator rvi; 318 tree var; 319 tree ptr; 320 321 FOR_EACH_REFERENCED_VAR (var, rvi) 322 { 323 if (is_global_var (var) 324 && (!var_can_have_subvars (var) 325 || get_subvars_for_var (var) == NULL)) 326 { 327 if (!unmodifiable_var_p (var)) 328 mark_call_clobbered (var, ESCAPE_IS_GLOBAL); 329 } 330 else if (TREE_CODE (var) == PARM_DECL 331 && default_def (var) 332 && POINTER_TYPE_P (TREE_TYPE (var))) 333 { 334 tree def = default_def (var); 335 get_ptr_info (def)->value_escapes_p = 1; 336 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM; 337 } 338 } 339 340 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++) 341 { 342 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr); 343 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr)); 344 345 if (pi->value_escapes_p) 346 { 347 /* If PTR escapes then its associated memory tags and 348 pointed-to variables are call-clobbered. */ 349 if (pi->name_mem_tag) 350 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask); 351 352 if (v_ann->symbol_mem_tag) 353 mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask); 354 355 if (pi->pt_vars) 356 { 357 bitmap_iterator bi; 358 unsigned int j; 359 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi) 360 if (!unmodifiable_var_p (referenced_var (j))) 361 mark_call_clobbered (referenced_var (j), pi->escape_mask); 362 } 363 } 364 365 /* If the name tag is call clobbered, so is the symbol tag 366 associated with the base VAR_DECL. */ 367 if (pi->name_mem_tag 368 && v_ann->symbol_mem_tag 369 && is_call_clobbered (pi->name_mem_tag)) 370 mark_call_clobbered (v_ann->symbol_mem_tag, pi->escape_mask); 371 372 /* Name tags and symbol tags that we don't know where they point 373 to, might point to global memory, and thus, are clobbered. 374 375 FIXME: This is not quite right. They should only be 376 clobbered if value_escapes_p is true, regardless of whether 377 they point to global memory or not. 378 So removing this code and fixing all the bugs would be nice. 379 It is the cause of a bunch of clobbering. */ 380 if ((pi->pt_global_mem || pi->pt_anything) 381 && pi->is_dereferenced && pi->name_mem_tag) 382 { 383 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL); 384 MTAG_GLOBAL (pi->name_mem_tag) = true; 385 } 386 387 if ((pi->pt_global_mem || pi->pt_anything) 388 && pi->is_dereferenced 389 && v_ann->symbol_mem_tag) 390 { 391 mark_call_clobbered (v_ann->symbol_mem_tag, ESCAPE_IS_GLOBAL); 392 MTAG_GLOBAL (v_ann->symbol_mem_tag) = true; 393 } 394 } 395} 396 397 398/* This variable is set to true if we are updating the used alone 399 information for SMTs, or are in a pass that is going to break it 400 temporarily. */ 401bool updating_used_alone; 402 403/* Compute which variables need to be marked call clobbered because 404 their tag is call clobbered, and which tags need to be marked 405 global because they contain global variables. */ 406 407static void 408compute_call_clobbered (struct alias_info *ai) 409{ 410 VEC (tree, heap) *worklist = NULL; 411 VEC(int,heap) *worklist2 = NULL; 412 413 set_initial_properties (ai); 414 init_transitive_clobber_worklist (&worklist, &worklist2); 415 while (VEC_length (tree, worklist) != 0) 416 { 417 tree curr = VEC_pop (tree, worklist); 418 int reason = VEC_pop (int, worklist2); 419 420 mark_call_clobbered (curr, reason); 421 mark_aliases_call_clobbered (curr, &worklist, &worklist2); 422 } 423 VEC_free (tree, heap, worklist); 424 VEC_free (int, heap, worklist2); 425 compute_tag_properties (); 426} 427 428 429/* Helper for recalculate_used_alone. Return a conservatively correct 430 answer as to whether STMT may make a store on the LHS to SYM. */ 431 432static bool 433lhs_may_store_to (tree stmt, tree sym ATTRIBUTE_UNUSED) 434{ 435 tree lhs = TREE_OPERAND (stmt, 0); 436 437 lhs = get_base_address (lhs); 438 439 if (!lhs) 440 return false; 441 442 if (TREE_CODE (lhs) == SSA_NAME) 443 return false; 444 /* We could do better here by looking at the type tag of LHS, but it 445 is unclear whether this is worth it. */ 446 return true; 447} 448 449/* Recalculate the used_alone information for SMTs . */ 450 451void 452recalculate_used_alone (void) 453{ 454 VEC (tree, heap) *calls = NULL; 455 block_stmt_iterator bsi; 456 basic_block bb; 457 tree stmt; 458 size_t i; 459 referenced_var_iterator rvi; 460 tree var; 461 462 /* First, reset all the SMT used alone bits to zero. */ 463 updating_used_alone = true; 464 FOR_EACH_REFERENCED_VAR (var, rvi) 465 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG) 466 { 467 SMT_OLD_USED_ALONE (var) = SMT_USED_ALONE (var); 468 SMT_USED_ALONE (var) = 0; 469 } 470 471 /* Walk all the statements. 472 Calls get put into a list of statements to update, since we will 473 need to update operands on them if we make any changes. 474 If we see a bare use of a SMT anywhere in a real virtual use or virtual 475 def, mark the SMT as used alone, and for renaming. */ 476 FOR_EACH_BB (bb) 477 { 478 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) 479 { 480 bool iscall = false; 481 ssa_op_iter iter; 482 483 stmt = bsi_stmt (bsi); 484 485 if (TREE_CODE (stmt) == CALL_EXPR 486 || (TREE_CODE (stmt) == MODIFY_EXPR 487 && TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR)) 488 { 489 iscall = true; 490 VEC_safe_push (tree, heap, calls, stmt); 491 } 492 493 FOR_EACH_SSA_TREE_OPERAND (var, stmt, iter, 494 SSA_OP_VUSE | SSA_OP_VIRTUAL_DEFS) 495 { 496 tree svar = var; 497 498 if (TREE_CODE (var) == SSA_NAME) 499 svar = SSA_NAME_VAR (var); 500 501 if (TREE_CODE (svar) == SYMBOL_MEMORY_TAG) 502 { 503 /* We only care about the LHS on calls. */ 504 if (iscall && !lhs_may_store_to (stmt, svar)) 505 continue; 506 507 if (!SMT_USED_ALONE (svar)) 508 { 509 SMT_USED_ALONE (svar) = true; 510 511 /* Only need to mark for renaming if it wasn't 512 used alone before. */ 513 if (!SMT_OLD_USED_ALONE (svar)) 514 mark_sym_for_renaming (svar); 515 } 516 } 517 } 518 } 519 } 520 521 /* Update the operands on all the calls we saw. */ 522 if (calls) 523 { 524 for (i = 0; VEC_iterate (tree, calls, i, stmt); i++) 525 update_stmt (stmt); 526 } 527 528 /* We need to mark SMT's that are no longer used for renaming so the 529 symbols go away, or else verification will be angry with us, even 530 though they are dead. */ 531 FOR_EACH_REFERENCED_VAR (var, rvi) 532 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG) 533 { 534 if (SMT_OLD_USED_ALONE (var) && !SMT_USED_ALONE (var)) 535 mark_sym_for_renaming (var); 536 } 537 538 VEC_free (tree, heap, calls); 539 updating_used_alone = false; 540} 541 542/* Compute may-alias information for every variable referenced in function 543 FNDECL. 544 545 Alias analysis proceeds in 3 main phases: 546 547 1- Points-to and escape analysis. 548 549 This phase walks the use-def chains in the SSA web looking for three 550 things: 551 552 * Assignments of the form P_i = &VAR 553 * Assignments of the form P_i = malloc() 554 * Pointers and ADDR_EXPR that escape the current function. 555 556 The concept of 'escaping' is the same one used in the Java world. When 557 a pointer or an ADDR_EXPR escapes, it means that it has been exposed 558 outside of the current function. So, assignment to global variables, 559 function arguments and returning a pointer are all escape sites, as are 560 conversions between pointers and integers. 561 562 This is where we are currently limited. Since not everything is renamed 563 into SSA, we lose track of escape properties when a pointer is stashed 564 inside a field in a structure, for instance. In those cases, we are 565 assuming that the pointer does escape. 566 567 We use escape analysis to determine whether a variable is 568 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable 569 is call-clobbered. If a pointer P_i escapes, then all the variables 570 pointed-to by P_i (and its memory tag) also escape. 571 572 2- Compute flow-sensitive aliases 573 574 We have two classes of memory tags. Memory tags associated with the 575 pointed-to data type of the pointers in the program. These tags are 576 called "symbol memory tag" (SMT). The other class are those associated 577 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that 578 when adding operands for an INDIRECT_REF *P_i, we will first check 579 whether P_i has a name tag, if it does we use it, because that will have 580 more precise aliasing information. Otherwise, we use the standard symbol 581 tag. 582 583 In this phase, we go through all the pointers we found in points-to 584 analysis and create alias sets for the name memory tags associated with 585 each pointer P_i. If P_i escapes, we mark call-clobbered the variables 586 it points to and its tag. 587 588 589 3- Compute flow-insensitive aliases 590 591 This pass will compare the alias set of every symbol memory tag and 592 every addressable variable found in the program. Given a symbol 593 memory tag SMT and an addressable variable V. If the alias sets of 594 SMT and V conflict (as computed by may_alias_p), then V is marked 595 as an alias tag and added to the alias set of SMT. 596 597 For instance, consider the following function: 598 599 foo (int i) 600 { 601 int *p, a, b; 602 603 if (i > 10) 604 p = &a; 605 else 606 p = &b; 607 608 *p = 3; 609 a = b + 2; 610 return *p; 611 } 612 613 After aliasing analysis has finished, the symbol memory tag for pointer 614 'p' will have two aliases, namely variables 'a' and 'b'. Every time 615 pointer 'p' is dereferenced, we want to mark the operation as a 616 potential reference to 'a' and 'b'. 617 618 foo (int i) 619 { 620 int *p, a, b; 621 622 if (i_2 > 10) 623 p_4 = &a; 624 else 625 p_6 = &b; 626 # p_1 = PHI <p_4(1), p_6(2)>; 627 628 # a_7 = V_MAY_DEF <a_3>; 629 # b_8 = V_MAY_DEF <b_5>; 630 *p_1 = 3; 631 632 # a_9 = V_MAY_DEF <a_7> 633 # VUSE <b_8> 634 a_9 = b_8 + 2; 635 636 # VUSE <a_9>; 637 # VUSE <b_8>; 638 return *p_1; 639 } 640 641 In certain cases, the list of may aliases for a pointer may grow too 642 large. This may cause an explosion in the number of virtual operands 643 inserted in the code. Resulting in increased memory consumption and 644 compilation time. 645 646 When the number of virtual operands needed to represent aliased 647 loads and stores grows too large (configurable with @option{--param 648 max-aliased-vops}), alias sets are grouped to avoid severe 649 compile-time slow downs and memory consumption. See group_aliases. */ 650 651static unsigned int 652compute_may_aliases (void) 653{ 654 struct alias_info *ai; 655 656 memset (&alias_stats, 0, sizeof (alias_stats)); 657 658 /* Initialize aliasing information. */ 659 ai = init_alias_info (); 660 661 /* For each pointer P_i, determine the sets of variables that P_i may 662 point-to. For every addressable variable V, determine whether the 663 address of V escapes the current function, making V call-clobbered 664 (i.e., whether &V is stored in a global variable or if its passed as a 665 function call argument). */ 666 compute_points_to_sets (ai); 667 668 /* Collect all pointers and addressable variables, compute alias sets, 669 create memory tags for pointers and promote variables whose address is 670 not needed anymore. */ 671 setup_pointers_and_addressables (ai); 672 673 /* Compute flow-sensitive, points-to based aliasing for all the name 674 memory tags. Note that this pass needs to be done before flow 675 insensitive analysis because it uses the points-to information 676 gathered before to mark call-clobbered symbol tags. */ 677 compute_flow_sensitive_aliasing (ai); 678 679 /* Compute type-based flow-insensitive aliasing for all the type 680 memory tags. */ 681 compute_flow_insensitive_aliasing (ai); 682 683 /* Compute call clobbering information. */ 684 compute_call_clobbered (ai); 685 686 /* Determine if we need to enable alias grouping. */ 687 if (ai->total_alias_vops >= MAX_ALIASED_VOPS) 688 group_aliases (ai); 689 690 /* If the program has too many call-clobbered variables and/or function 691 calls, create .GLOBAL_VAR and use it to model call-clobbering 692 semantics at call sites. This reduces the number of virtual operands 693 considerably, improving compile times at the expense of lost 694 aliasing precision. */ 695 maybe_create_global_var (ai); 696 697 /* If the program contains ref-all pointers, finalize may-alias information 698 for them. This pass needs to be run after call-clobbering information 699 has been computed. */ 700 if (ai->ref_all_symbol_mem_tag) 701 finalize_ref_all_pointers (ai); 702 703 /* Debugging dumps. */ 704 if (dump_file) 705 { 706 dump_referenced_vars (dump_file); 707 if (dump_flags & TDF_STATS) 708 dump_alias_stats (dump_file); 709 dump_points_to_info (dump_file); 710 dump_alias_info (dump_file); 711 } 712 713 /* Report strict aliasing violations. */ 714 strict_aliasing_warning_backend (); 715 716 /* Deallocate memory used by aliasing data structures. */ 717 delete_alias_info (ai); 718 719 updating_used_alone = true; 720 { 721 block_stmt_iterator bsi; 722 basic_block bb; 723 FOR_EACH_BB (bb) 724 { 725 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) 726 { 727 update_stmt_if_modified (bsi_stmt (bsi)); 728 } 729 } 730 } 731 recalculate_used_alone (); 732 updating_used_alone = false; 733 return 0; 734} 735 736 737struct tree_opt_pass pass_may_alias = 738{ 739 "alias", /* name */ 740 NULL, /* gate */ 741 compute_may_aliases, /* execute */ 742 NULL, /* sub */ 743 NULL, /* next */ 744 0, /* static_pass_number */ 745 TV_TREE_MAY_ALIAS, /* tv_id */ 746 PROP_cfg | PROP_ssa, /* properties_required */ 747 PROP_alias, /* properties_provided */ 748 0, /* properties_destroyed */ 749 0, /* todo_flags_start */ 750 TODO_dump_func | TODO_update_ssa 751 | TODO_ggc_collect | TODO_verify_ssa 752 | TODO_verify_stmts, /* todo_flags_finish */ 753 0 /* letter */ 754}; 755 756 757/* Data structure used to count the number of dereferences to PTR 758 inside an expression. */ 759struct count_ptr_d 760{ 761 tree ptr; 762 unsigned count; 763}; 764 765 766/* Helper for count_uses_and_derefs. Called by walk_tree to look for 767 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */ 768 769static tree 770count_ptr_derefs (tree *tp, int *walk_subtrees, void *data) 771{ 772 struct count_ptr_d *count_p = (struct count_ptr_d *) data; 773 774 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld, 775 pointer 'ptr' is *not* dereferenced, it is simply used to compute 776 the address of 'fld' as 'ptr + offsetof(fld)'. */ 777 if (TREE_CODE (*tp) == ADDR_EXPR) 778 { 779 *walk_subtrees = 0; 780 return NULL_TREE; 781 } 782 783 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr) 784 count_p->count++; 785 786 return NULL_TREE; 787} 788 789 790/* Count the number of direct and indirect uses for pointer PTR in 791 statement STMT. The two counts are stored in *NUM_USES_P and 792 *NUM_DEREFS_P respectively. *IS_STORE_P is set to 'true' if at 793 least one of those dereferences is a store operation. */ 794 795void 796count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p, 797 unsigned *num_derefs_p, bool *is_store) 798{ 799 ssa_op_iter i; 800 tree use; 801 802 *num_uses_p = 0; 803 *num_derefs_p = 0; 804 *is_store = false; 805 806 /* Find out the total number of uses of PTR in STMT. */ 807 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE) 808 if (use == ptr) 809 (*num_uses_p)++; 810 811 /* Now count the number of indirect references to PTR. This is 812 truly awful, but we don't have much choice. There are no parent 813 pointers inside INDIRECT_REFs, so an expression like 814 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to 815 find all the indirect and direct uses of x_1 inside. The only 816 shortcut we can take is the fact that GIMPLE only allows 817 INDIRECT_REFs inside the expressions below. */ 818 if (TREE_CODE (stmt) == MODIFY_EXPR 819 || (TREE_CODE (stmt) == RETURN_EXPR 820 && TREE_CODE (TREE_OPERAND (stmt, 0)) == MODIFY_EXPR) 821 || TREE_CODE (stmt) == ASM_EXPR 822 || TREE_CODE (stmt) == CALL_EXPR) 823 { 824 tree lhs, rhs; 825 826 if (TREE_CODE (stmt) == MODIFY_EXPR) 827 { 828 lhs = TREE_OPERAND (stmt, 0); 829 rhs = TREE_OPERAND (stmt, 1); 830 } 831 else if (TREE_CODE (stmt) == RETURN_EXPR) 832 { 833 tree e = TREE_OPERAND (stmt, 0); 834 lhs = TREE_OPERAND (e, 0); 835 rhs = TREE_OPERAND (e, 1); 836 } 837 else if (TREE_CODE (stmt) == ASM_EXPR) 838 { 839 lhs = ASM_OUTPUTS (stmt); 840 rhs = ASM_INPUTS (stmt); 841 } 842 else 843 { 844 lhs = NULL_TREE; 845 rhs = stmt; 846 } 847 848 if (lhs && (TREE_CODE (lhs) == TREE_LIST || EXPR_P (lhs))) 849 { 850 struct count_ptr_d count; 851 count.ptr = ptr; 852 count.count = 0; 853 walk_tree (&lhs, count_ptr_derefs, &count, NULL); 854 *is_store = true; 855 *num_derefs_p = count.count; 856 } 857 858 if (rhs && (TREE_CODE (rhs) == TREE_LIST || EXPR_P (rhs))) 859 { 860 struct count_ptr_d count; 861 count.ptr = ptr; 862 count.count = 0; 863 walk_tree (&rhs, count_ptr_derefs, &count, NULL); 864 *num_derefs_p += count.count; 865 } 866 } 867 868 gcc_assert (*num_uses_p >= *num_derefs_p); 869} 870 871/* Initialize the data structures used for alias analysis. */ 872 873static struct alias_info * 874init_alias_info (void) 875{ 876 struct alias_info *ai; 877 referenced_var_iterator rvi; 878 tree var; 879 880 bitmap_obstack_initialize (&alias_obstack); 881 ai = XCNEW (struct alias_info); 882 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names); 883 sbitmap_zero (ai->ssa_names_visited); 884 ai->processed_ptrs = VEC_alloc (tree, heap, 50); 885 ai->written_vars = BITMAP_ALLOC (&alias_obstack); 886 ai->dereferenced_ptrs_store = BITMAP_ALLOC (&alias_obstack); 887 ai->dereferenced_ptrs_load = BITMAP_ALLOC (&alias_obstack); 888 889 /* If aliases have been computed before, clear existing information. */ 890 if (aliases_computed_p) 891 { 892 unsigned i; 893 894 /* Similarly, clear the set of addressable variables. In this 895 case, we can just clear the set because addressability is 896 only computed here. */ 897 bitmap_clear (addressable_vars); 898 899 /* Clear flow-insensitive alias information from each symbol. */ 900 FOR_EACH_REFERENCED_VAR (var, rvi) 901 { 902 var_ann_t ann = var_ann (var); 903 904 ann->is_aliased = 0; 905 ann->may_aliases = NULL; 906 NUM_REFERENCES_CLEAR (ann); 907 908 /* Since we are about to re-discover call-clobbered 909 variables, clear the call-clobbered flag. Variables that 910 are intrinsically call-clobbered (globals, local statics, 911 etc) will not be marked by the aliasing code, so we can't 912 remove them from CALL_CLOBBERED_VARS. 913 914 NB: STRUCT_FIELDS are still call clobbered if they are for 915 a global variable, so we *don't* clear their call clobberedness 916 just because they are tags, though we will clear it if they 917 aren't for global variables. */ 918 if (TREE_CODE (var) == NAME_MEMORY_TAG 919 || TREE_CODE (var) == SYMBOL_MEMORY_TAG 920 || !is_global_var (var)) 921 clear_call_clobbered (var); 922 } 923 924 /* Clear flow-sensitive points-to information from each SSA name. */ 925 for (i = 1; i < num_ssa_names; i++) 926 { 927 tree name = ssa_name (i); 928 929 if (!name || !POINTER_TYPE_P (TREE_TYPE (name))) 930 continue; 931 932 if (SSA_NAME_PTR_INFO (name)) 933 { 934 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name); 935 936 /* Clear all the flags but keep the name tag to 937 avoid creating new temporaries unnecessarily. If 938 this pointer is found to point to a subset or 939 superset of its former points-to set, then a new 940 tag will need to be created in create_name_tags. */ 941 pi->pt_anything = 0; 942 pi->pt_null = 0; 943 pi->value_escapes_p = 0; 944 pi->is_dereferenced = 0; 945 if (pi->pt_vars) 946 bitmap_clear (pi->pt_vars); 947 } 948 } 949 } 950 951 /* Next time, we will need to reset alias information. */ 952 aliases_computed_p = true; 953 954 return ai; 955} 956 957 958/* Deallocate memory used by alias analysis. */ 959 960static void 961delete_alias_info (struct alias_info *ai) 962{ 963 size_t i; 964 referenced_var_iterator rvi; 965 tree var; 966 967 sbitmap_free (ai->ssa_names_visited); 968 VEC_free (tree, heap, ai->processed_ptrs); 969 970 for (i = 0; i < ai->num_addressable_vars; i++) 971 free (ai->addressable_vars[i]); 972 973 FOR_EACH_REFERENCED_VAR(var, rvi) 974 { 975 var_ann_t ann = var_ann (var); 976 NUM_REFERENCES_CLEAR (ann); 977 } 978 979 free (ai->addressable_vars); 980 981 for (i = 0; i < ai->num_pointers; i++) 982 free (ai->pointers[i]); 983 free (ai->pointers); 984 985 BITMAP_FREE (ai->written_vars); 986 BITMAP_FREE (ai->dereferenced_ptrs_store); 987 BITMAP_FREE (ai->dereferenced_ptrs_load); 988 bitmap_obstack_release (&alias_obstack); 989 free (ai); 990 991 delete_points_to_sets (); 992} 993 994/* Used for hashing to identify pointer infos with identical 995 pt_vars bitmaps. */ 996static int 997eq_ptr_info (const void *p1, const void *p2) 998{ 999 const struct ptr_info_def *n1 = (const struct ptr_info_def *) p1; 1000 const struct ptr_info_def *n2 = (const struct ptr_info_def *) p2; 1001 return bitmap_equal_p (n1->pt_vars, n2->pt_vars); 1002} 1003 1004static hashval_t 1005ptr_info_hash (const void *p) 1006{ 1007 const struct ptr_info_def *n = (const struct ptr_info_def *) p; 1008 return bitmap_hash (n->pt_vars); 1009} 1010 1011/* Create name tags for all the pointers that have been dereferenced. 1012 We only create a name tag for a pointer P if P is found to point to 1013 a set of variables (so that we can alias them to *P) or if it is 1014 the result of a call to malloc (which means that P cannot point to 1015 anything else nor alias any other variable). 1016 1017 If two pointers P and Q point to the same set of variables, they 1018 are assigned the same name tag. */ 1019 1020static void 1021create_name_tags (void) 1022{ 1023 size_t i; 1024 VEC (tree, heap) *with_ptvars = NULL; 1025 tree ptr; 1026 htab_t ptr_hash; 1027 1028 /* Collect the list of pointers with a non-empty points to set. */ 1029 for (i = 1; i < num_ssa_names; i++) 1030 { 1031 tree ptr = ssa_name (i); 1032 struct ptr_info_def *pi; 1033 1034 if (!ptr 1035 || !POINTER_TYPE_P (TREE_TYPE (ptr)) 1036 || !SSA_NAME_PTR_INFO (ptr)) 1037 continue; 1038 1039 pi = SSA_NAME_PTR_INFO (ptr); 1040 1041 if (pi->pt_anything || !pi->is_dereferenced) 1042 { 1043 /* No name tags for pointers that have not been 1044 dereferenced or point to an arbitrary location. */ 1045 pi->name_mem_tag = NULL_TREE; 1046 continue; 1047 } 1048 1049 /* Set pt_anything on the pointers without pt_vars filled in so 1050 that they are assigned a symbol tag. */ 1051 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars)) 1052 VEC_safe_push (tree, heap, with_ptvars, ptr); 1053 else 1054 set_pt_anything (ptr); 1055 } 1056 1057 /* If we didn't find any pointers with pt_vars set, we're done. */ 1058 if (!with_ptvars) 1059 return; 1060 1061 ptr_hash = htab_create (10, ptr_info_hash, eq_ptr_info, NULL); 1062 /* Now go through the pointers with pt_vars, and find a name tag 1063 with the same pt_vars as this pointer, or create one if one 1064 doesn't exist. */ 1065 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++) 1066 { 1067 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr); 1068 tree old_name_tag = pi->name_mem_tag; 1069 struct ptr_info_def **slot; 1070 1071 /* If PTR points to a set of variables, check if we don't 1072 have another pointer Q with the same points-to set before 1073 creating a tag. If so, use Q's tag instead of creating a 1074 new one. 1075 1076 This is important for not creating unnecessary symbols 1077 and also for copy propagation. If we ever need to 1078 propagate PTR into Q or vice-versa, we would run into 1079 problems if they both had different name tags because 1080 they would have different SSA version numbers (which 1081 would force us to take the name tags in and out of SSA). */ 1082 1083 slot = (struct ptr_info_def **) htab_find_slot (ptr_hash, pi, INSERT); 1084 if (*slot) 1085 pi->name_mem_tag = (*slot)->name_mem_tag; 1086 else 1087 { 1088 *slot = pi; 1089 /* If we didn't find a pointer with the same points-to set 1090 as PTR, create a new name tag if needed. */ 1091 if (pi->name_mem_tag == NULL_TREE) 1092 pi->name_mem_tag = get_nmt_for (ptr); 1093 } 1094 1095 /* If the new name tag computed for PTR is different than 1096 the old name tag that it used to have, then the old tag 1097 needs to be removed from the IL, so we mark it for 1098 renaming. */ 1099 if (old_name_tag && old_name_tag != pi->name_mem_tag) 1100 mark_sym_for_renaming (old_name_tag); 1101 1102 TREE_THIS_VOLATILE (pi->name_mem_tag) 1103 |= TREE_THIS_VOLATILE (TREE_TYPE (TREE_TYPE (ptr))); 1104 1105 /* Mark the new name tag for renaming. */ 1106 mark_sym_for_renaming (pi->name_mem_tag); 1107 } 1108 htab_delete (ptr_hash); 1109 1110 VEC_free (tree, heap, with_ptvars); 1111} 1112 1113 1114/* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for 1115 the name memory tag (NMT) associated with P_i. If P_i escapes, then its 1116 name tag and the variables it points-to are call-clobbered. Finally, if 1117 P_i escapes and we could not determine where it points to, then all the 1118 variables in the same alias set as *P_i are marked call-clobbered. This 1119 is necessary because we must assume that P_i may take the address of any 1120 variable in the same alias set. */ 1121 1122static void 1123compute_flow_sensitive_aliasing (struct alias_info *ai) 1124{ 1125 size_t i; 1126 tree ptr; 1127 1128 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++) 1129 { 1130 if (!find_what_p_points_to (ptr)) 1131 set_pt_anything (ptr); 1132 } 1133 1134 create_name_tags (); 1135 1136 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++) 1137 { 1138 unsigned j; 1139 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr); 1140 var_ann_t v_ann = var_ann (SSA_NAME_VAR (ptr)); 1141 bitmap_iterator bi; 1142 1143 1144 /* Set up aliasing information for PTR's name memory tag (if it has 1145 one). Note that only pointers that have been dereferenced will 1146 have a name memory tag. */ 1147 if (pi->name_mem_tag && pi->pt_vars) 1148 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, j, bi) 1149 { 1150 add_may_alias (pi->name_mem_tag, referenced_var (j)); 1151 add_may_alias (v_ann->symbol_mem_tag, referenced_var (j)); 1152 } 1153 } 1154} 1155 1156 1157/* Compute type-based alias sets. Traverse all the pointers and 1158 addressable variables found in setup_pointers_and_addressables. 1159 1160 For every pointer P in AI->POINTERS and addressable variable V in 1161 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol 1162 memory tag (SMT) if their alias sets conflict. V is then marked as 1163 an alias tag so that the operand scanner knows that statements 1164 containing V have aliased operands. */ 1165 1166static void 1167compute_flow_insensitive_aliasing (struct alias_info *ai) 1168{ 1169 size_t i; 1170 1171 /* Initialize counter for the total number of virtual operands that 1172 aliasing will introduce. When AI->TOTAL_ALIAS_VOPS goes beyond the 1173 threshold set by --params max-alias-vops, we enable alias 1174 grouping. */ 1175 ai->total_alias_vops = 0; 1176 1177 /* For every pointer P, determine which addressable variables may alias 1178 with P's symbol memory tag. */ 1179 for (i = 0; i < ai->num_pointers; i++) 1180 { 1181 size_t j; 1182 struct alias_map_d *p_map = ai->pointers[i]; 1183 tree tag = var_ann (p_map->var)->symbol_mem_tag; 1184 var_ann_t tag_ann = var_ann (tag); 1185 tree var; 1186 1187 /* Call-clobbering information is not finalized yet at this point. */ 1188 if (PTR_IS_REF_ALL (p_map->var)) 1189 continue; 1190 1191 p_map->total_alias_vops = 0; 1192 p_map->may_aliases = BITMAP_ALLOC (&alias_obstack); 1193 1194 /* Add any pre-existing may_aliases to the bitmap used to represent 1195 TAG's alias set in case we need to group aliases. */ 1196 for (j = 0; VEC_iterate (tree, tag_ann->may_aliases, j, var); ++j) 1197 bitmap_set_bit (p_map->may_aliases, DECL_UID (var)); 1198 1199 for (j = 0; j < ai->num_addressable_vars; j++) 1200 { 1201 struct alias_map_d *v_map; 1202 var_ann_t v_ann; 1203 bool tag_stored_p, var_stored_p; 1204 1205 v_map = ai->addressable_vars[j]; 1206 var = v_map->var; 1207 v_ann = var_ann (var); 1208 1209 /* Skip memory tags and variables that have never been 1210 written to. We also need to check if the variables are 1211 call-clobbered because they may be overwritten by 1212 function calls. 1213 1214 Note this is effectively random accessing elements in 1215 the sparse bitset, which can be highly inefficient. 1216 So we first check the call_clobbered status of the 1217 tag and variable before querying the bitmap. */ 1218 tag_stored_p = is_call_clobbered (tag) 1219 || bitmap_bit_p (ai->written_vars, DECL_UID (tag)); 1220 var_stored_p = is_call_clobbered (var) 1221 || bitmap_bit_p (ai->written_vars, DECL_UID (var)); 1222 if (!tag_stored_p && !var_stored_p) 1223 continue; 1224 1225 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false)) 1226 { 1227 size_t num_tag_refs, num_var_refs; 1228 1229 num_tag_refs = NUM_REFERENCES (tag_ann); 1230 num_var_refs = NUM_REFERENCES (v_ann); 1231 1232 /* Add VAR to TAG's may-aliases set. */ 1233 1234 /* We should never have a var with subvars here, because 1235 they shouldn't get into the set of addressable vars */ 1236 gcc_assert (!var_can_have_subvars (var) 1237 || get_subvars_for_var (var) == NULL); 1238 1239 add_may_alias (tag, var); 1240 /* Update the bitmap used to represent TAG's alias set 1241 in case we need to group aliases. */ 1242 bitmap_set_bit (p_map->may_aliases, DECL_UID (var)); 1243 1244 /* Update the total number of virtual operands due to 1245 aliasing. Since we are adding one more alias to TAG's 1246 may-aliases set, the total number of virtual operands due 1247 to aliasing will be increased by the number of references 1248 made to VAR and TAG (every reference to TAG will also 1249 count as a reference to VAR). */ 1250 ai->total_alias_vops += (num_var_refs + num_tag_refs); 1251 p_map->total_alias_vops += (num_var_refs + num_tag_refs); 1252 1253 1254 } 1255 } 1256 } 1257 1258 /* Since this analysis is based exclusively on symbols, it fails to 1259 handle cases where two pointers P and Q have different memory 1260 tags with conflicting alias set numbers but no aliased symbols in 1261 common. 1262 1263 For example, suppose that we have two memory tags SMT.1 and SMT.2 1264 such that 1265 1266 may-aliases (SMT.1) = { a } 1267 may-aliases (SMT.2) = { b } 1268 1269 and the alias set number of SMT.1 conflicts with that of SMT.2. 1270 Since they don't have symbols in common, loads and stores from 1271 SMT.1 and SMT.2 will seem independent of each other, which will 1272 lead to the optimizers making invalid transformations (see 1273 testsuite/gcc.c-torture/execute/pr15262-[12].c). 1274 1275 To avoid this problem, we do a final traversal of AI->POINTERS 1276 looking for pairs of pointers that have no aliased symbols in 1277 common and yet have conflicting alias set numbers. */ 1278 for (i = 0; i < ai->num_pointers; i++) 1279 { 1280 size_t j; 1281 struct alias_map_d *p_map1 = ai->pointers[i]; 1282 tree tag1 = var_ann (p_map1->var)->symbol_mem_tag; 1283 bitmap may_aliases1 = p_map1->may_aliases; 1284 1285 if (PTR_IS_REF_ALL (p_map1->var)) 1286 continue; 1287 1288 for (j = i + 1; j < ai->num_pointers; j++) 1289 { 1290 struct alias_map_d *p_map2 = ai->pointers[j]; 1291 tree tag2 = var_ann (p_map2->var)->symbol_mem_tag; 1292 bitmap may_aliases2 = p_map2->may_aliases; 1293 1294 if (PTR_IS_REF_ALL (p_map2->var)) 1295 continue; 1296 1297 /* If the pointers may not point to each other, do nothing. */ 1298 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true)) 1299 continue; 1300 1301 /* The two pointers may alias each other. If they already have 1302 symbols in common, do nothing. */ 1303 if (bitmap_intersect_p (may_aliases1, may_aliases2)) 1304 continue; 1305 1306 if (!bitmap_empty_p (may_aliases2)) 1307 { 1308 unsigned int k; 1309 bitmap_iterator bi; 1310 1311 /* Add all the aliases for TAG2 into TAG1's alias set. 1312 FIXME, update grouping heuristic counters. */ 1313 EXECUTE_IF_SET_IN_BITMAP (may_aliases2, 0, k, bi) 1314 add_may_alias (tag1, referenced_var (k)); 1315 bitmap_ior_into (may_aliases1, may_aliases2); 1316 } 1317 else 1318 { 1319 /* Since TAG2 does not have any aliases of its own, add 1320 TAG2 itself to the alias set of TAG1. */ 1321 add_may_alias (tag1, tag2); 1322 bitmap_set_bit (may_aliases1, DECL_UID (tag2)); 1323 } 1324 } 1325 } 1326 1327 if (dump_file) 1328 fprintf (dump_file, "\n%s: Total number of aliased vops: %ld\n", 1329 get_name (current_function_decl), 1330 ai->total_alias_vops); 1331} 1332 1333 1334/* Finalize may-alias information for ref-all pointers. Traverse all 1335 the addressable variables found in setup_pointers_and_addressables. 1336 1337 If flow-sensitive alias analysis has attached a name memory tag to 1338 a ref-all pointer, we will use it for the dereferences because that 1339 will have more precise aliasing information. But if there is no 1340 name tag, we will use a special symbol tag that aliases all the 1341 call-clobbered addressable variables. */ 1342 1343static void 1344finalize_ref_all_pointers (struct alias_info *ai) 1345{ 1346 size_t i; 1347 1348 if (global_var) 1349 add_may_alias (ai->ref_all_symbol_mem_tag, global_var); 1350 else 1351 { 1352 /* First add the real call-clobbered variables. */ 1353 for (i = 0; i < ai->num_addressable_vars; i++) 1354 { 1355 tree var = ai->addressable_vars[i]->var; 1356 if (is_call_clobbered (var)) 1357 add_may_alias (ai->ref_all_symbol_mem_tag, var); 1358 } 1359 1360 /* Then add the call-clobbered pointer memory tags. See 1361 compute_flow_insensitive_aliasing for the rationale. */ 1362 for (i = 0; i < ai->num_pointers; i++) 1363 { 1364 tree ptr = ai->pointers[i]->var, tag; 1365 if (PTR_IS_REF_ALL (ptr)) 1366 continue; 1367 tag = var_ann (ptr)->symbol_mem_tag; 1368 if (is_call_clobbered (tag)) 1369 add_may_alias (ai->ref_all_symbol_mem_tag, tag); 1370 } 1371 } 1372} 1373 1374 1375/* Comparison function for qsort used in group_aliases. */ 1376 1377static int 1378total_alias_vops_cmp (const void *p, const void *q) 1379{ 1380 const struct alias_map_d **p1 = (const struct alias_map_d **)p; 1381 const struct alias_map_d **p2 = (const struct alias_map_d **)q; 1382 long n1 = (*p1)->total_alias_vops; 1383 long n2 = (*p2)->total_alias_vops; 1384 1385 /* We want to sort in descending order. */ 1386 return (n1 > n2 ? -1 : (n1 == n2) ? 0 : 1); 1387} 1388 1389/* Group all the aliases for TAG to make TAG represent all the 1390 variables in its alias set. Update the total number 1391 of virtual operands due to aliasing (AI->TOTAL_ALIAS_VOPS). This 1392 function will make TAG be the unique alias tag for all the 1393 variables in its may-aliases. So, given: 1394 1395 may-aliases(TAG) = { V1, V2, V3 } 1396 1397 This function will group the variables into: 1398 1399 may-aliases(V1) = { TAG } 1400 may-aliases(V2) = { TAG } 1401 may-aliases(V2) = { TAG } */ 1402 1403static void 1404group_aliases_into (tree tag, bitmap tag_aliases, struct alias_info *ai) 1405{ 1406 unsigned int i; 1407 var_ann_t tag_ann = var_ann (tag); 1408 size_t num_tag_refs = NUM_REFERENCES (tag_ann); 1409 bitmap_iterator bi; 1410 1411 EXECUTE_IF_SET_IN_BITMAP (tag_aliases, 0, i, bi) 1412 { 1413 tree var = referenced_var (i); 1414 var_ann_t ann = var_ann (var); 1415 1416 /* Make TAG the unique alias of VAR. */ 1417 ann->is_aliased = 0; 1418 ann->may_aliases = NULL; 1419 1420 /* Note that VAR and TAG may be the same if the function has no 1421 addressable variables (see the discussion at the end of 1422 setup_pointers_and_addressables). */ 1423 if (var != tag) 1424 add_may_alias (var, tag); 1425 1426 /* Reduce total number of virtual operands contributed 1427 by TAG on behalf of VAR. Notice that the references to VAR 1428 itself won't be removed. We will merely replace them with 1429 references to TAG. */ 1430 ai->total_alias_vops -= num_tag_refs; 1431 } 1432 1433 /* We have reduced the number of virtual operands that TAG makes on 1434 behalf of all the variables formerly aliased with it. However, 1435 we have also "removed" all the virtual operands for TAG itself, 1436 so we add them back. */ 1437 ai->total_alias_vops += num_tag_refs; 1438 1439 /* TAG no longer has any aliases. */ 1440 tag_ann->may_aliases = NULL; 1441} 1442 1443 1444/* Group may-aliases sets to reduce the number of virtual operands due 1445 to aliasing. 1446 1447 1- Sort the list of pointers in decreasing number of contributed 1448 virtual operands. 1449 1450 2- Take the first entry in AI->POINTERS and revert the role of 1451 the memory tag and its aliases. Usually, whenever an aliased 1452 variable Vi is found to alias with a memory tag T, we add Vi 1453 to the may-aliases set for T. Meaning that after alias 1454 analysis, we will have: 1455 1456 may-aliases(T) = { V1, V2, V3, ..., Vn } 1457 1458 This means that every statement that references T, will get 'n' 1459 virtual operands for each of the Vi tags. But, when alias 1460 grouping is enabled, we make T an alias tag and add it to the 1461 alias set of all the Vi variables: 1462 1463 may-aliases(V1) = { T } 1464 may-aliases(V2) = { T } 1465 ... 1466 may-aliases(Vn) = { T } 1467 1468 This has two effects: (a) statements referencing T will only get 1469 a single virtual operand, and, (b) all the variables Vi will now 1470 appear to alias each other. So, we lose alias precision to 1471 improve compile time. But, in theory, a program with such a high 1472 level of aliasing should not be very optimizable in the first 1473 place. 1474 1475 3- Since variables may be in the alias set of more than one 1476 memory tag, the grouping done in step (2) needs to be extended 1477 to all the memory tags that have a non-empty intersection with 1478 the may-aliases set of tag T. For instance, if we originally 1479 had these may-aliases sets: 1480 1481 may-aliases(T) = { V1, V2, V3 } 1482 may-aliases(R) = { V2, V4 } 1483 1484 In step (2) we would have reverted the aliases for T as: 1485 1486 may-aliases(V1) = { T } 1487 may-aliases(V2) = { T } 1488 may-aliases(V3) = { T } 1489 1490 But note that now V2 is no longer aliased with R. We could 1491 add R to may-aliases(V2), but we are in the process of 1492 grouping aliases to reduce virtual operands so what we do is 1493 add V4 to the grouping to obtain: 1494 1495 may-aliases(V1) = { T } 1496 may-aliases(V2) = { T } 1497 may-aliases(V3) = { T } 1498 may-aliases(V4) = { T } 1499 1500 4- If the total number of virtual operands due to aliasing is 1501 still above the threshold set by max-alias-vops, go back to (2). */ 1502 1503static void 1504group_aliases (struct alias_info *ai) 1505{ 1506 size_t i; 1507 tree ptr; 1508 1509 /* Sort the POINTERS array in descending order of contributed 1510 virtual operands. */ 1511 qsort (ai->pointers, ai->num_pointers, sizeof (struct alias_map_d *), 1512 total_alias_vops_cmp); 1513 1514 /* For every pointer in AI->POINTERS, reverse the roles of its tag 1515 and the tag's may-aliases set. */ 1516 for (i = 0; i < ai->num_pointers; i++) 1517 { 1518 size_t j; 1519 tree tag1 = var_ann (ai->pointers[i]->var)->symbol_mem_tag; 1520 bitmap tag1_aliases = ai->pointers[i]->may_aliases; 1521 1522 /* Skip tags that have been grouped already. */ 1523 if (ai->pointers[i]->grouped_p) 1524 continue; 1525 1526 /* See if TAG1 had any aliases in common with other symbol tags. 1527 If we find a TAG2 with common aliases with TAG1, add TAG2's 1528 aliases into TAG1. */ 1529 for (j = i + 1; j < ai->num_pointers; j++) 1530 { 1531 bitmap tag2_aliases = ai->pointers[j]->may_aliases; 1532 1533 if (bitmap_intersect_p (tag1_aliases, tag2_aliases)) 1534 { 1535 tree tag2 = var_ann (ai->pointers[j]->var)->symbol_mem_tag; 1536 1537 bitmap_ior_into (tag1_aliases, tag2_aliases); 1538 1539 /* TAG2 does not need its aliases anymore. */ 1540 bitmap_clear (tag2_aliases); 1541 var_ann (tag2)->may_aliases = NULL; 1542 1543 /* TAG1 is the unique alias of TAG2. */ 1544 add_may_alias (tag2, tag1); 1545 1546 ai->pointers[j]->grouped_p = true; 1547 } 1548 } 1549 1550 /* Now group all the aliases we collected into TAG1. */ 1551 group_aliases_into (tag1, tag1_aliases, ai); 1552 1553 /* If we've reduced total number of virtual operands below the 1554 threshold, stop. */ 1555 if (ai->total_alias_vops < MAX_ALIASED_VOPS) 1556 break; 1557 } 1558 1559 /* Finally, all the variables that have been grouped cannot be in 1560 the may-alias set of name memory tags. Suppose that we have 1561 grouped the aliases in this code so that may-aliases(a) = SMT.20 1562 1563 p_5 = &a; 1564 ... 1565 # a_9 = V_MAY_DEF <a_8> 1566 p_5->field = 0 1567 ... Several modifications to SMT.20 ... 1568 # VUSE <a_9> 1569 x_30 = p_5->field 1570 1571 Since p_5 points to 'a', the optimizers will try to propagate 0 1572 into p_5->field, but that is wrong because there have been 1573 modifications to 'SMT.20' in between. To prevent this we have to 1574 replace 'a' with 'SMT.20' in the name tag of p_5. */ 1575 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++) 1576 { 1577 size_t j; 1578 tree name_tag = SSA_NAME_PTR_INFO (ptr)->name_mem_tag; 1579 VEC(tree,gc) *aliases; 1580 tree alias; 1581 1582 if (name_tag == NULL_TREE) 1583 continue; 1584 1585 aliases = var_ann (name_tag)->may_aliases; 1586 for (j = 0; VEC_iterate (tree, aliases, j, alias); j++) 1587 { 1588 var_ann_t ann = var_ann (alias); 1589 1590 if ((!MTAG_P (alias) 1591 || TREE_CODE (alias) == STRUCT_FIELD_TAG) 1592 && ann->may_aliases) 1593 { 1594 tree new_alias; 1595 1596 gcc_assert (VEC_length (tree, ann->may_aliases) == 1); 1597 1598 new_alias = VEC_index (tree, ann->may_aliases, 0); 1599 replace_may_alias (name_tag, j, new_alias); 1600 } 1601 } 1602 } 1603 1604 if (dump_file) 1605 fprintf (dump_file, 1606 "%s: Total number of aliased vops after grouping: %ld%s\n", 1607 get_name (current_function_decl), 1608 ai->total_alias_vops, 1609 (ai->total_alias_vops < 0) ? " (negative values are OK)" : ""); 1610} 1611 1612 1613/* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */ 1614 1615static void 1616create_alias_map_for (tree var, struct alias_info *ai) 1617{ 1618 struct alias_map_d *alias_map; 1619 alias_map = XCNEW (struct alias_map_d); 1620 alias_map->var = var; 1621 alias_map->set = get_alias_set (var); 1622 ai->addressable_vars[ai->num_addressable_vars++] = alias_map; 1623} 1624 1625 1626/* Create memory tags for all the dereferenced pointers and build the 1627 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias 1628 sets. Based on the address escape and points-to information collected 1629 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those 1630 variables whose address is not needed anymore. */ 1631 1632static void 1633setup_pointers_and_addressables (struct alias_info *ai) 1634{ 1635 size_t n_vars, num_addressable_vars, num_pointers; 1636 referenced_var_iterator rvi; 1637 tree var; 1638 VEC (tree, heap) *varvec = NULL; 1639 safe_referenced_var_iterator srvi; 1640 1641 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */ 1642 num_addressable_vars = num_pointers = 0; 1643 1644 FOR_EACH_REFERENCED_VAR (var, rvi) 1645 { 1646 if (may_be_aliased (var)) 1647 num_addressable_vars++; 1648 1649 if (POINTER_TYPE_P (TREE_TYPE (var))) 1650 { 1651 /* Since we don't keep track of volatile variables, assume that 1652 these pointers are used in indirect store operations. */ 1653 if (TREE_THIS_VOLATILE (var)) 1654 bitmap_set_bit (ai->dereferenced_ptrs_store, DECL_UID (var)); 1655 1656 num_pointers++; 1657 } 1658 } 1659 1660 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are 1661 always going to be slightly bigger than we actually need them 1662 because some TREE_ADDRESSABLE variables will be marked 1663 non-addressable below and only pointers with unique symbol tags are 1664 going to be added to POINTERS. */ 1665 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars); 1666 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers); 1667 ai->num_addressable_vars = 0; 1668 ai->num_pointers = 0; 1669 1670 /* Since we will be creating symbol memory tags within this loop, 1671 cache the value of NUM_REFERENCED_VARS to avoid processing the 1672 additional tags unnecessarily. */ 1673 n_vars = num_referenced_vars; 1674 1675 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi) 1676 { 1677 var_ann_t v_ann = var_ann (var); 1678 subvar_t svars; 1679 1680 /* Name memory tags already have flow-sensitive aliasing 1681 information, so they need not be processed by 1682 compute_flow_insensitive_aliasing. Similarly, symbol memory 1683 tags are already accounted for when we process their 1684 associated pointer. 1685 1686 Structure fields, on the other hand, have to have some of this 1687 information processed for them, but it's pointless to mark them 1688 non-addressable (since they are fake variables anyway). */ 1689 if (MTAG_P (var) && TREE_CODE (var) != STRUCT_FIELD_TAG) 1690 continue; 1691 1692 /* Remove the ADDRESSABLE flag from every addressable variable whose 1693 address is not needed anymore. This is caused by the propagation 1694 of ADDR_EXPR constants into INDIRECT_REF expressions and the 1695 removal of dead pointer assignments done by the early scalar 1696 cleanup passes. */ 1697 if (TREE_ADDRESSABLE (var)) 1698 { 1699 if (!bitmap_bit_p (addressable_vars, DECL_UID (var)) 1700 && TREE_CODE (var) != RESULT_DECL 1701 && !is_global_var (var)) 1702 { 1703 bool okay_to_mark = true; 1704 1705 /* Since VAR is now a regular GIMPLE register, we will need 1706 to rename VAR into SSA afterwards. */ 1707 mark_sym_for_renaming (var); 1708 1709 /* If VAR can have sub-variables, and any of its 1710 sub-variables has its address taken, then we cannot 1711 remove the addressable flag from VAR. */ 1712 if (var_can_have_subvars (var) 1713 && (svars = get_subvars_for_var (var))) 1714 { 1715 subvar_t sv; 1716 1717 for (sv = svars; sv; sv = sv->next) 1718 { 1719 if (bitmap_bit_p (addressable_vars, DECL_UID (sv->var))) 1720 okay_to_mark = false; 1721 mark_sym_for_renaming (sv->var); 1722 } 1723 } 1724 1725 /* The address of VAR is not needed, remove the 1726 addressable bit, so that it can be optimized as a 1727 regular variable. */ 1728 if (okay_to_mark) 1729 mark_non_addressable (var); 1730 } 1731 } 1732 1733 /* Global variables and addressable locals may be aliased. Create an 1734 entry in ADDRESSABLE_VARS for VAR. */ 1735 if (may_be_aliased (var) 1736 && (!var_can_have_subvars (var) 1737 || get_subvars_for_var (var) == NULL)) 1738 { 1739 create_alias_map_for (var, ai); 1740 mark_sym_for_renaming (var); 1741 } 1742 1743 /* Add pointer variables that have been dereferenced to the POINTERS 1744 array and create a symbol memory tag for them. */ 1745 if (POINTER_TYPE_P (TREE_TYPE (var))) 1746 { 1747 if ((bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var)) 1748 || bitmap_bit_p (ai->dereferenced_ptrs_load, DECL_UID (var)))) 1749 { 1750 tree tag; 1751 var_ann_t t_ann; 1752 1753 /* If pointer VAR still doesn't have a memory tag 1754 associated with it, create it now or re-use an 1755 existing one. */ 1756 tag = get_tmt_for (var, ai); 1757 t_ann = var_ann (tag); 1758 1759 /* The symbol tag will need to be renamed into SSA 1760 afterwards. Note that we cannot do this inside 1761 get_tmt_for because aliasing may run multiple times 1762 and we only create symbol tags the first time. */ 1763 mark_sym_for_renaming (tag); 1764 1765 /* Similarly, if pointer VAR used to have another type 1766 tag, we will need to process it in the renamer to 1767 remove the stale virtual operands. */ 1768 if (v_ann->symbol_mem_tag) 1769 mark_sym_for_renaming (v_ann->symbol_mem_tag); 1770 1771 /* Associate the tag with pointer VAR. */ 1772 v_ann->symbol_mem_tag = tag; 1773 1774 /* If pointer VAR has been used in a store operation, 1775 then its memory tag must be marked as written-to. */ 1776 if (bitmap_bit_p (ai->dereferenced_ptrs_store, DECL_UID (var))) 1777 bitmap_set_bit (ai->written_vars, DECL_UID (tag)); 1778 1779 /* All the dereferences of pointer VAR count as 1780 references of TAG. Since TAG can be associated with 1781 several pointers, add the dereferences of VAR to the 1782 TAG. */ 1783 NUM_REFERENCES_SET (t_ann, 1784 NUM_REFERENCES (t_ann) 1785 + NUM_REFERENCES (v_ann)); 1786 } 1787 else 1788 { 1789 /* The pointer has not been dereferenced. If it had a 1790 symbol memory tag, remove it and mark the old tag for 1791 renaming to remove it out of the IL. */ 1792 var_ann_t ann = var_ann (var); 1793 tree tag = ann->symbol_mem_tag; 1794 if (tag) 1795 { 1796 mark_sym_for_renaming (tag); 1797 ann->symbol_mem_tag = NULL_TREE; 1798 } 1799 } 1800 } 1801 } 1802 VEC_free (tree, heap, varvec); 1803} 1804 1805 1806/* Determine whether to use .GLOBAL_VAR to model call clobbering semantics. At 1807 every call site, we need to emit V_MAY_DEF expressions to represent the 1808 clobbering effects of the call for variables whose address escapes the 1809 current function. 1810 1811 One approach is to group all call-clobbered variables into a single 1812 representative that is used as an alias of every call-clobbered variable 1813 (.GLOBAL_VAR). This works well, but it ties the optimizer hands because 1814 references to any call clobbered variable is a reference to .GLOBAL_VAR. 1815 1816 The second approach is to emit a clobbering V_MAY_DEF for every 1817 call-clobbered variable at call sites. This is the preferred way in terms 1818 of optimization opportunities but it may create too many V_MAY_DEF operands 1819 if there are many call clobbered variables and function calls in the 1820 function. 1821 1822 To decide whether or not to use .GLOBAL_VAR we multiply the number of 1823 function calls found by the number of call-clobbered variables. If that 1824 product is beyond a certain threshold, as determined by the parameterized 1825 values shown below, we use .GLOBAL_VAR. 1826 1827 FIXME. This heuristic should be improved. One idea is to use several 1828 .GLOBAL_VARs of different types instead of a single one. The thresholds 1829 have been derived from a typical bootstrap cycle, including all target 1830 libraries. Compile times were found increase by ~1% compared to using 1831 .GLOBAL_VAR. */ 1832 1833static void 1834maybe_create_global_var (struct alias_info *ai) 1835{ 1836 unsigned i, n_clobbered; 1837 bitmap_iterator bi; 1838 1839 /* No need to create it, if we have one already. */ 1840 if (global_var == NULL_TREE) 1841 { 1842 /* Count all the call-clobbered variables. */ 1843 n_clobbered = 0; 1844 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi) 1845 { 1846 n_clobbered++; 1847 } 1848 1849 /* If the number of virtual operands that would be needed to 1850 model all the call-clobbered variables is larger than 1851 GLOBAL_VAR_THRESHOLD, create .GLOBAL_VAR. 1852 1853 Also create .GLOBAL_VAR if there are no call-clobbered 1854 variables and the program contains a mixture of pure/const 1855 and regular function calls. This is to avoid the problem 1856 described in PR 20115: 1857 1858 int X; 1859 int func_pure (void) { return X; } 1860 int func_non_pure (int a) { X += a; } 1861 int foo () 1862 { 1863 int a = func_pure (); 1864 func_non_pure (a); 1865 a = func_pure (); 1866 return a; 1867 } 1868 1869 Since foo() has no call-clobbered variables, there is 1870 no relationship between the calls to func_pure and 1871 func_non_pure. Since func_pure has no side-effects, value 1872 numbering optimizations elide the second call to func_pure. 1873 So, if we have some pure/const and some regular calls in the 1874 program we create .GLOBAL_VAR to avoid missing these 1875 relations. */ 1876 if (ai->num_calls_found * n_clobbered >= (size_t) GLOBAL_VAR_THRESHOLD 1877 || (n_clobbered == 0 1878 && ai->num_calls_found > 0 1879 && ai->num_pure_const_calls_found > 0 1880 && ai->num_calls_found > ai->num_pure_const_calls_found)) 1881 create_global_var (); 1882 } 1883 1884 /* Mark all call-clobbered symbols for renaming. Since the initial 1885 rewrite into SSA ignored all call sites, we may need to rename 1886 .GLOBAL_VAR and the call-clobbered variables. */ 1887 EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi) 1888 { 1889 tree var = referenced_var (i); 1890 1891 /* If the function has calls to clobbering functions and 1892 .GLOBAL_VAR has been created, make it an alias for all 1893 call-clobbered variables. */ 1894 if (global_var && var != global_var) 1895 { 1896 add_may_alias (var, global_var); 1897 gcc_assert (!get_subvars_for_var (var)); 1898 } 1899 1900 mark_sym_for_renaming (var); 1901 } 1902} 1903 1904 1905/* Return TRUE if pointer PTR may point to variable VAR. 1906 1907 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR 1908 This is needed because when checking for type conflicts we are 1909 interested in the alias set of the memory location pointed-to by 1910 PTR. The alias set of PTR itself is irrelevant. 1911 1912 VAR_ALIAS_SET is the alias set for VAR. */ 1913 1914static bool 1915may_alias_p (tree ptr, HOST_WIDE_INT mem_alias_set, 1916 tree var, HOST_WIDE_INT var_alias_set, 1917 bool alias_set_only) 1918{ 1919 tree mem; 1920 1921 alias_stats.alias_queries++; 1922 alias_stats.simple_queries++; 1923 1924 /* By convention, a variable cannot alias itself. */ 1925 mem = var_ann (ptr)->symbol_mem_tag; 1926 if (mem == var) 1927 { 1928 alias_stats.alias_noalias++; 1929 alias_stats.simple_resolved++; 1930 return false; 1931 } 1932 1933 /* If -fargument-noalias-global is > 2, pointer arguments may 1934 not point to anything else. */ 1935 if (flag_argument_noalias > 2 && TREE_CODE (ptr) == PARM_DECL) 1936 { 1937 alias_stats.alias_noalias++; 1938 alias_stats.simple_resolved++; 1939 return false; 1940 } 1941 1942 /* If -fargument-noalias-global is > 1, pointer arguments may 1943 not point to global variables. */ 1944 if (flag_argument_noalias > 1 && is_global_var (var) 1945 && TREE_CODE (ptr) == PARM_DECL) 1946 { 1947 alias_stats.alias_noalias++; 1948 alias_stats.simple_resolved++; 1949 return false; 1950 } 1951 1952 /* If either MEM or VAR is a read-only global and the other one 1953 isn't, then PTR cannot point to VAR. */ 1954 if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var)) 1955 || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem))) 1956 { 1957 alias_stats.alias_noalias++; 1958 alias_stats.simple_resolved++; 1959 return false; 1960 } 1961 1962 gcc_assert (TREE_CODE (mem) == SYMBOL_MEMORY_TAG); 1963 1964 alias_stats.tbaa_queries++; 1965 1966 /* If the alias sets don't conflict then MEM cannot alias VAR. */ 1967 if (!alias_sets_conflict_p (mem_alias_set, var_alias_set)) 1968 { 1969 alias_stats.alias_noalias++; 1970 alias_stats.tbaa_resolved++; 1971 return false; 1972 } 1973 1974 /* If var is a record or union type, ptr cannot point into var 1975 unless there is some operation explicit address operation in the 1976 program that can reference a field of the ptr's dereferenced 1977 type. This also assumes that the types of both var and ptr are 1978 contained within the compilation unit, and that there is no fancy 1979 addressing arithmetic associated with any of the types 1980 involved. */ 1981 1982 if ((mem_alias_set != 0) && (var_alias_set != 0)) 1983 { 1984 tree ptr_type = TREE_TYPE (ptr); 1985 tree var_type = TREE_TYPE (var); 1986 1987 /* The star count is -1 if the type at the end of the pointer_to 1988 chain is not a record or union type. */ 1989 if ((!alias_set_only) && 1990 ipa_type_escape_star_count_of_interesting_type (var_type) >= 0) 1991 { 1992 int ptr_star_count = 0; 1993 1994 /* Ipa_type_escape_star_count_of_interesting_type is a little to 1995 restrictive for the pointer type, need to allow pointers to 1996 primitive types as long as those types cannot be pointers 1997 to everything. */ 1998 while (POINTER_TYPE_P (ptr_type)) 1999 /* Strip the *'s off. */ 2000 { 2001 ptr_type = TREE_TYPE (ptr_type); 2002 ptr_star_count++; 2003 } 2004 2005 /* There does not appear to be a better test to see if the 2006 pointer type was one of the pointer to everything 2007 types. */ 2008 2009 if (ptr_star_count > 0) 2010 { 2011 alias_stats.structnoaddress_queries++; 2012 if (ipa_type_escape_field_does_not_clobber_p (var_type, 2013 TREE_TYPE (ptr))) 2014 { 2015 alias_stats.structnoaddress_resolved++; 2016 alias_stats.alias_noalias++; 2017 return false; 2018 } 2019 } 2020 else if (ptr_star_count == 0) 2021 { 2022 /* If ptr_type was not really a pointer to type, it cannot 2023 alias. */ 2024 alias_stats.structnoaddress_queries++; 2025 alias_stats.structnoaddress_resolved++; 2026 alias_stats.alias_noalias++; 2027 return false; 2028 } 2029 } 2030 } 2031 2032 alias_stats.alias_mayalias++; 2033 return true; 2034} 2035 2036 2037/* Add ALIAS to the set of variables that may alias VAR. */ 2038 2039static void 2040add_may_alias (tree var, tree alias) 2041{ 2042 size_t i; 2043 var_ann_t v_ann = get_var_ann (var); 2044 var_ann_t a_ann = get_var_ann (alias); 2045 tree al; 2046 2047 /* Don't allow self-referential aliases. */ 2048 gcc_assert (var != alias); 2049 2050 /* ALIAS must be addressable if it's being added to an alias set. */ 2051#if 1 2052 TREE_ADDRESSABLE (alias) = 1; 2053#else 2054 gcc_assert (may_be_aliased (alias)); 2055#endif 2056 2057 if (v_ann->may_aliases == NULL) 2058 v_ann->may_aliases = VEC_alloc (tree, gc, 2); 2059 2060 /* Avoid adding duplicates. */ 2061 for (i = 0; VEC_iterate (tree, v_ann->may_aliases, i, al); i++) 2062 if (alias == al) 2063 return; 2064 2065 VEC_safe_push (tree, gc, v_ann->may_aliases, alias); 2066 a_ann->is_aliased = 1; 2067} 2068 2069 2070/* Replace alias I in the alias sets of VAR with NEW_ALIAS. */ 2071 2072static void 2073replace_may_alias (tree var, size_t i, tree new_alias) 2074{ 2075 var_ann_t v_ann = var_ann (var); 2076 VEC_replace (tree, v_ann->may_aliases, i, new_alias); 2077} 2078 2079 2080/* Mark pointer PTR as pointing to an arbitrary memory location. */ 2081 2082static void 2083set_pt_anything (tree ptr) 2084{ 2085 struct ptr_info_def *pi = get_ptr_info (ptr); 2086 2087 pi->pt_anything = 1; 2088 pi->pt_vars = NULL; 2089 2090 /* The pointer used to have a name tag, but we now found it pointing 2091 to an arbitrary location. The name tag needs to be renamed and 2092 disassociated from PTR. */ 2093 if (pi->name_mem_tag) 2094 { 2095 mark_sym_for_renaming (pi->name_mem_tag); 2096 pi->name_mem_tag = NULL_TREE; 2097 } 2098} 2099 2100 2101/* Return true if STMT is an "escape" site from the current function. Escape 2102 sites those statements which might expose the address of a variable 2103 outside the current function. STMT is an escape site iff: 2104 2105 1- STMT is a function call, or 2106 2- STMT is an __asm__ expression, or 2107 3- STMT is an assignment to a non-local variable, or 2108 4- STMT is a return statement. 2109 2110 Return the type of escape site found, if we found one, or NO_ESCAPE 2111 if none. */ 2112 2113enum escape_type 2114is_escape_site (tree stmt) 2115{ 2116 tree call = get_call_expr_in (stmt); 2117 if (call != NULL_TREE) 2118 { 2119 if (!TREE_SIDE_EFFECTS (call)) 2120 return ESCAPE_TO_PURE_CONST; 2121 2122 return ESCAPE_TO_CALL; 2123 } 2124 else if (TREE_CODE (stmt) == ASM_EXPR) 2125 return ESCAPE_TO_ASM; 2126 else if (TREE_CODE (stmt) == MODIFY_EXPR) 2127 { 2128 tree lhs = TREE_OPERAND (stmt, 0); 2129 2130 /* Get to the base of _REF nodes. */ 2131 if (TREE_CODE (lhs) != SSA_NAME) 2132 lhs = get_base_address (lhs); 2133 2134 /* If we couldn't recognize the LHS of the assignment, assume that it 2135 is a non-local store. */ 2136 if (lhs == NULL_TREE) 2137 return ESCAPE_UNKNOWN; 2138 2139 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == NOP_EXPR 2140 || TREE_CODE (TREE_OPERAND (stmt, 1)) == CONVERT_EXPR 2141 || TREE_CODE (TREE_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR) 2142 { 2143 tree from = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (stmt, 1), 0)); 2144 tree to = TREE_TYPE (TREE_OPERAND (stmt, 1)); 2145 2146 /* If the RHS is a conversion between a pointer and an integer, the 2147 pointer escapes since we can't track the integer. */ 2148 if (POINTER_TYPE_P (from) && !POINTER_TYPE_P (to)) 2149 return ESCAPE_BAD_CAST; 2150 2151 /* Same if the RHS is a conversion between a regular pointer and a 2152 ref-all pointer since we can't track the SMT of the former. */ 2153 if (POINTER_TYPE_P (from) && !TYPE_REF_CAN_ALIAS_ALL (from) 2154 && POINTER_TYPE_P (to) && TYPE_REF_CAN_ALIAS_ALL (to)) 2155 return ESCAPE_BAD_CAST; 2156 } 2157 2158 /* If the LHS is an SSA name, it can't possibly represent a non-local 2159 memory store. */ 2160 if (TREE_CODE (lhs) == SSA_NAME) 2161 return NO_ESCAPE; 2162 2163 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a 2164 local variables we cannot be sure if it will escape, because we 2165 don't have information about objects not in SSA form. Need to 2166 implement something along the lines of 2167 2168 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P. 2169 Midkiff, ``Escape analysis for java,'' in Proceedings of the 2170 Conference on Object-Oriented Programming Systems, Languages, and 2171 Applications (OOPSLA), pp. 1-19, 1999. */ 2172 return ESCAPE_STORED_IN_GLOBAL; 2173 } 2174 else if (TREE_CODE (stmt) == RETURN_EXPR) 2175 return ESCAPE_TO_RETURN; 2176 2177 return NO_ESCAPE; 2178} 2179 2180/* Create a new memory tag of type TYPE. 2181 Does NOT push it into the current binding. */ 2182 2183static tree 2184create_tag_raw (enum tree_code code, tree type, const char *prefix) 2185{ 2186 tree tmp_var; 2187 tree new_type; 2188 2189 /* Make the type of the variable writable. */ 2190 new_type = build_type_variant (type, 0, 0); 2191 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type); 2192 2193 tmp_var = build_decl (code, create_tmp_var_name (prefix), 2194 type); 2195 /* Make the variable writable. */ 2196 TREE_READONLY (tmp_var) = 0; 2197 2198 /* It doesn't start out global. */ 2199 MTAG_GLOBAL (tmp_var) = 0; 2200 TREE_STATIC (tmp_var) = 0; 2201 TREE_USED (tmp_var) = 1; 2202 2203 return tmp_var; 2204} 2205 2206/* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag 2207 is considered to represent all the pointers whose pointed-to types are 2208 in the same alias set class. Otherwise, the tag represents a single 2209 SSA_NAME pointer variable. */ 2210 2211static tree 2212create_memory_tag (tree type, bool is_type_tag) 2213{ 2214 var_ann_t ann; 2215 tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG, 2216 type, (is_type_tag) ? "SMT" : "NMT"); 2217 2218 /* By default, memory tags are local variables. Alias analysis will 2219 determine whether they should be considered globals. */ 2220 DECL_CONTEXT (tag) = current_function_decl; 2221 2222 /* Memory tags are by definition addressable. */ 2223 TREE_ADDRESSABLE (tag) = 1; 2224 2225 ann = get_var_ann (tag); 2226 ann->symbol_mem_tag = NULL_TREE; 2227 2228 /* Add the tag to the symbol table. */ 2229 add_referenced_var (tag); 2230 2231 return tag; 2232} 2233 2234 2235/* Create a name memory tag to represent a specific SSA_NAME pointer P_i. 2236 This is used if P_i has been found to point to a specific set of 2237 variables or to a non-aliased memory location like the address returned 2238 by malloc functions. */ 2239 2240static tree 2241get_nmt_for (tree ptr) 2242{ 2243 struct ptr_info_def *pi = get_ptr_info (ptr); 2244 tree tag = pi->name_mem_tag; 2245 2246 if (tag == NULL_TREE) 2247 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false); 2248 return tag; 2249} 2250 2251 2252/* Return the symbol memory tag associated to pointer PTR. A memory 2253 tag is an artificial variable that represents the memory location 2254 pointed-to by PTR. It is used to model the effects of pointer 2255 de-references on addressable variables. 2256 2257 AI points to the data gathered during alias analysis. This 2258 function populates the array AI->POINTERS. */ 2259 2260static tree 2261get_tmt_for (tree ptr, struct alias_info *ai) 2262{ 2263 size_t i; 2264 tree tag; 2265 tree tag_type = TREE_TYPE (TREE_TYPE (ptr)); 2266 HOST_WIDE_INT tag_set = get_alias_set (tag_type); 2267 2268 /* We use a unique memory tag for all the ref-all pointers. */ 2269 if (PTR_IS_REF_ALL (ptr)) 2270 { 2271 if (!ai->ref_all_symbol_mem_tag) 2272 ai->ref_all_symbol_mem_tag = create_memory_tag (void_type_node, true); 2273 return ai->ref_all_symbol_mem_tag; 2274 } 2275 2276 /* To avoid creating unnecessary memory tags, only create one memory tag 2277 per alias set class. Note that it may be tempting to group 2278 memory tags based on conflicting alias sets instead of 2279 equivalence. That would be wrong because alias sets are not 2280 necessarily transitive (as demonstrated by the libstdc++ test 2281 23_containers/vector/cons/4.cc). Given three alias sets A, B, C 2282 such that conflicts (A, B) == true and conflicts (A, C) == true, 2283 it does not necessarily follow that conflicts (B, C) == true. */ 2284 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++) 2285 { 2286 struct alias_map_d *curr = ai->pointers[i]; 2287 tree curr_tag = var_ann (curr->var)->symbol_mem_tag; 2288 if (tag_set == curr->set) 2289 { 2290 tag = curr_tag; 2291 break; 2292 } 2293 } 2294 2295 /* If VAR cannot alias with any of the existing memory tags, create a new 2296 tag for PTR and add it to the POINTERS array. */ 2297 if (tag == NULL_TREE) 2298 { 2299 struct alias_map_d *alias_map; 2300 2301 /* If PTR did not have a symbol tag already, create a new SMT.* 2302 artificial variable representing the memory location 2303 pointed-to by PTR. */ 2304 if (var_ann (ptr)->symbol_mem_tag == NULL_TREE) 2305 tag = create_memory_tag (tag_type, true); 2306 else 2307 tag = var_ann (ptr)->symbol_mem_tag; 2308 2309 /* Add PTR to the POINTERS array. Note that we are not interested in 2310 PTR's alias set. Instead, we cache the alias set for the memory that 2311 PTR points to. */ 2312 alias_map = XCNEW (struct alias_map_d); 2313 alias_map->var = ptr; 2314 alias_map->set = tag_set; 2315 ai->pointers[ai->num_pointers++] = alias_map; 2316 } 2317 2318 /* If the pointed-to type is volatile, so is the tag. */ 2319 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type); 2320 2321 /* Make sure that the symbol tag has the same alias set as the 2322 pointed-to type. */ 2323 gcc_assert (tag_set == get_alias_set (tag)); 2324 2325 return tag; 2326} 2327 2328 2329/* Create GLOBAL_VAR, an artificial global variable to act as a 2330 representative of all the variables that may be clobbered by function 2331 calls. */ 2332 2333static void 2334create_global_var (void) 2335{ 2336 global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"), 2337 void_type_node); 2338 DECL_ARTIFICIAL (global_var) = 1; 2339 TREE_READONLY (global_var) = 0; 2340 DECL_EXTERNAL (global_var) = 1; 2341 TREE_STATIC (global_var) = 1; 2342 TREE_USED (global_var) = 1; 2343 DECL_CONTEXT (global_var) = NULL_TREE; 2344 TREE_THIS_VOLATILE (global_var) = 0; 2345 TREE_ADDRESSABLE (global_var) = 0; 2346 2347 create_var_ann (global_var); 2348 mark_call_clobbered (global_var, ESCAPE_UNKNOWN); 2349 add_referenced_var (global_var); 2350 mark_sym_for_renaming (global_var); 2351} 2352 2353 2354/* Dump alias statistics on FILE. */ 2355 2356static void 2357dump_alias_stats (FILE *file) 2358{ 2359 const char *funcname 2360 = lang_hooks.decl_printable_name (current_function_decl, 2); 2361 fprintf (file, "\nAlias statistics for %s\n\n", funcname); 2362 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries); 2363 fprintf (file, "Total alias mayalias results:\t%u\n", 2364 alias_stats.alias_mayalias); 2365 fprintf (file, "Total alias noalias results:\t%u\n", 2366 alias_stats.alias_noalias); 2367 fprintf (file, "Total simple queries:\t%u\n", 2368 alias_stats.simple_queries); 2369 fprintf (file, "Total simple resolved:\t%u\n", 2370 alias_stats.simple_resolved); 2371 fprintf (file, "Total TBAA queries:\t%u\n", 2372 alias_stats.tbaa_queries); 2373 fprintf (file, "Total TBAA resolved:\t%u\n", 2374 alias_stats.tbaa_resolved); 2375 fprintf (file, "Total non-addressable structure type queries:\t%u\n", 2376 alias_stats.structnoaddress_queries); 2377 fprintf (file, "Total non-addressable structure type resolved:\t%u\n", 2378 alias_stats.structnoaddress_resolved); 2379} 2380 2381 2382/* Dump alias information on FILE. */ 2383 2384void 2385dump_alias_info (FILE *file) 2386{ 2387 size_t i; 2388 const char *funcname 2389 = lang_hooks.decl_printable_name (current_function_decl, 2); 2390 referenced_var_iterator rvi; 2391 tree var; 2392 2393 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname); 2394 2395 fprintf (file, "Aliased symbols\n\n"); 2396 2397 FOR_EACH_REFERENCED_VAR (var, rvi) 2398 { 2399 if (may_be_aliased (var)) 2400 dump_variable (file, var); 2401 } 2402 2403 fprintf (file, "\nDereferenced pointers\n\n"); 2404 2405 FOR_EACH_REFERENCED_VAR (var, rvi) 2406 { 2407 var_ann_t ann = var_ann (var); 2408 if (ann->symbol_mem_tag) 2409 dump_variable (file, var); 2410 } 2411 2412 fprintf (file, "\nSymbol memory tags\n\n"); 2413 2414 FOR_EACH_REFERENCED_VAR (var, rvi) 2415 { 2416 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG) 2417 dump_variable (file, var); 2418 } 2419 2420 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname); 2421 2422 fprintf (file, "SSA_NAME pointers\n\n"); 2423 for (i = 1; i < num_ssa_names; i++) 2424 { 2425 tree ptr = ssa_name (i); 2426 struct ptr_info_def *pi; 2427 2428 if (ptr == NULL_TREE) 2429 continue; 2430 2431 pi = SSA_NAME_PTR_INFO (ptr); 2432 if (!SSA_NAME_IN_FREE_LIST (ptr) 2433 && pi 2434 && pi->name_mem_tag) 2435 dump_points_to_info_for (file, ptr); 2436 } 2437 2438 fprintf (file, "\nName memory tags\n\n"); 2439 2440 FOR_EACH_REFERENCED_VAR (var, rvi) 2441 { 2442 if (TREE_CODE (var) == NAME_MEMORY_TAG) 2443 dump_variable (file, var); 2444 } 2445 2446 fprintf (file, "\n"); 2447} 2448 2449 2450/* Dump alias information on stderr. */ 2451 2452void 2453debug_alias_info (void) 2454{ 2455 dump_alias_info (stderr); 2456} 2457 2458 2459/* Return the alias information associated with pointer T. It creates a 2460 new instance if none existed. */ 2461 2462struct ptr_info_def * 2463get_ptr_info (tree t) 2464{ 2465 struct ptr_info_def *pi; 2466 2467 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t))); 2468 2469 pi = SSA_NAME_PTR_INFO (t); 2470 if (pi == NULL) 2471 { 2472 pi = GGC_NEW (struct ptr_info_def); 2473 memset ((void *)pi, 0, sizeof (*pi)); 2474 SSA_NAME_PTR_INFO (t) = pi; 2475 } 2476 2477 return pi; 2478} 2479 2480 2481/* Dump points-to information for SSA_NAME PTR into FILE. */ 2482 2483void 2484dump_points_to_info_for (FILE *file, tree ptr) 2485{ 2486 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr); 2487 2488 print_generic_expr (file, ptr, dump_flags); 2489 2490 if (pi) 2491 { 2492 if (pi->name_mem_tag) 2493 { 2494 fprintf (file, ", name memory tag: "); 2495 print_generic_expr (file, pi->name_mem_tag, dump_flags); 2496 } 2497 2498 if (pi->is_dereferenced) 2499 fprintf (file, ", is dereferenced"); 2500 2501 if (pi->value_escapes_p) 2502 fprintf (file, ", its value escapes"); 2503 2504 if (pi->pt_anything) 2505 fprintf (file, ", points-to anything"); 2506 2507 if (pi->pt_null) 2508 fprintf (file, ", points-to NULL"); 2509 2510 if (pi->pt_vars) 2511 { 2512 unsigned ix; 2513 bitmap_iterator bi; 2514 2515 fprintf (file, ", points-to vars: { "); 2516 EXECUTE_IF_SET_IN_BITMAP (pi->pt_vars, 0, ix, bi) 2517 { 2518 print_generic_expr (file, referenced_var (ix), dump_flags); 2519 fprintf (file, " "); 2520 } 2521 fprintf (file, "}"); 2522 } 2523 } 2524 2525 fprintf (file, "\n"); 2526} 2527 2528 2529/* Dump points-to information for VAR into stderr. */ 2530 2531void 2532debug_points_to_info_for (tree var) 2533{ 2534 dump_points_to_info_for (stderr, var); 2535} 2536 2537 2538/* Dump points-to information into FILE. NOTE: This function is slow, as 2539 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */ 2540 2541void 2542dump_points_to_info (FILE *file) 2543{ 2544 basic_block bb; 2545 block_stmt_iterator si; 2546 ssa_op_iter iter; 2547 const char *fname = 2548 lang_hooks.decl_printable_name (current_function_decl, 2); 2549 referenced_var_iterator rvi; 2550 tree var; 2551 2552 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname); 2553 2554 /* First dump points-to information for the default definitions of 2555 pointer variables. This is necessary because default definitions are 2556 not part of the code. */ 2557 FOR_EACH_REFERENCED_VAR (var, rvi) 2558 { 2559 if (POINTER_TYPE_P (TREE_TYPE (var))) 2560 { 2561 tree def = default_def (var); 2562 if (def) 2563 dump_points_to_info_for (file, def); 2564 } 2565 } 2566 2567 /* Dump points-to information for every pointer defined in the program. */ 2568 FOR_EACH_BB (bb) 2569 { 2570 tree phi; 2571 2572 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi)) 2573 { 2574 tree ptr = PHI_RESULT (phi); 2575 if (POINTER_TYPE_P (TREE_TYPE (ptr))) 2576 dump_points_to_info_for (file, ptr); 2577 } 2578 2579 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si)) 2580 { 2581 tree stmt = bsi_stmt (si); 2582 tree def; 2583 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF) 2584 if (POINTER_TYPE_P (TREE_TYPE (def))) 2585 dump_points_to_info_for (file, def); 2586 } 2587 } 2588 2589 fprintf (file, "\n"); 2590} 2591 2592 2593/* Dump points-to info pointed to by PTO into STDERR. */ 2594 2595void 2596debug_points_to_info (void) 2597{ 2598 dump_points_to_info (stderr); 2599} 2600 2601/* Dump to FILE the list of variables that may be aliasing VAR. */ 2602 2603void 2604dump_may_aliases_for (FILE *file, tree var) 2605{ 2606 VEC(tree, gc) *aliases; 2607 2608 if (TREE_CODE (var) == SSA_NAME) 2609 var = SSA_NAME_VAR (var); 2610 2611 aliases = var_ann (var)->may_aliases; 2612 if (aliases) 2613 { 2614 size_t i; 2615 tree al; 2616 fprintf (file, "{ "); 2617 for (i = 0; VEC_iterate (tree, aliases, i, al); i++) 2618 { 2619 print_generic_expr (file, al, dump_flags); 2620 fprintf (file, " "); 2621 } 2622 fprintf (file, "}"); 2623 } 2624} 2625 2626 2627/* Dump to stderr the list of variables that may be aliasing VAR. */ 2628 2629void 2630debug_may_aliases_for (tree var) 2631{ 2632 dump_may_aliases_for (stderr, var); 2633} 2634 2635/* Return true if VAR may be aliased. */ 2636 2637bool 2638may_be_aliased (tree var) 2639{ 2640 /* Obviously. */ 2641 if (TREE_ADDRESSABLE (var)) 2642 return true; 2643 2644 /* Globally visible variables can have their addresses taken by other 2645 translation units. */ 2646 2647 if (MTAG_P (var) 2648 && (MTAG_GLOBAL (var) || TREE_PUBLIC (var))) 2649 return true; 2650 else if (!MTAG_P (var) 2651 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var))) 2652 return true; 2653 2654 /* Automatic variables can't have their addresses escape any other way. 2655 This must be after the check for global variables, as extern declarations 2656 do not have TREE_STATIC set. */ 2657 if (!TREE_STATIC (var)) 2658 return false; 2659 2660 /* If we're in unit-at-a-time mode, then we must have seen all occurrences 2661 of address-of operators, and so we can trust TREE_ADDRESSABLE. Otherwise 2662 we can only be sure the variable isn't addressable if it's local to the 2663 current function. */ 2664 if (flag_unit_at_a_time) 2665 return false; 2666 if (decl_function_context (var) == current_function_decl) 2667 return false; 2668 2669 return true; 2670} 2671 2672 2673/* Given two symbols return TRUE if one is in the alias set of the other. */ 2674bool 2675is_aliased_with (tree tag, tree sym) 2676{ 2677 size_t i; 2678 VEC(tree,gc) *aliases; 2679 tree al; 2680 2681 if (var_ann (sym)->is_aliased) 2682 { 2683 aliases = var_ann (tag)->may_aliases; 2684 2685 if (aliases == NULL) 2686 return false; 2687 2688 for (i = 0; VEC_iterate (tree, aliases, i, al); i++) 2689 if (al == sym) 2690 return true; 2691 } 2692 else 2693 { 2694 aliases = var_ann (sym)->may_aliases; 2695 2696 if (aliases == NULL) 2697 return false; 2698 2699 for (i = 0; VEC_iterate (tree, aliases, i, al); i++) 2700 if (al == tag) 2701 return true; 2702 } 2703 2704 return false; 2705} 2706 2707 2708/* Given two tags return TRUE if their may-alias sets intersect. */ 2709 2710bool 2711may_aliases_intersect (tree tag1, tree tag2) 2712{ 2713 struct pointer_set_t *set1 = pointer_set_create (); 2714 unsigned i; 2715 VEC(tree,gc) *may_aliases1 = may_aliases (tag1); 2716 VEC(tree,gc) *may_aliases2 = may_aliases (tag2); 2717 tree sym; 2718 2719 /* Insert all the symbols from the first may-alias set into the 2720 pointer-set. */ 2721 for (i = 0; VEC_iterate (tree, may_aliases1, i, sym); i++) 2722 pointer_set_insert (set1, sym); 2723 2724 /* Go through the second may-alias set and check if it contains symbols that 2725 are common with the first set. */ 2726 for (i = 0; VEC_iterate (tree, may_aliases2, i, sym); i++) 2727 if (pointer_set_contains (set1, sym)) 2728 { 2729 pointer_set_destroy (set1); 2730 return true; 2731 } 2732 2733 pointer_set_destroy (set1); 2734 return false; 2735} 2736 2737 2738/* The following is based on code in add_stmt_operand to ensure that the 2739 same defs/uses/vdefs/vuses will be found after replacing a reference 2740 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value 2741 is the address of var. Return a memtag for the ptr, after adding the 2742 proper may_aliases to it (which are the aliases of var, if it has any, 2743 or var itself). */ 2744 2745static tree 2746add_may_alias_for_new_tag (tree tag, tree var) 2747{ 2748 var_ann_t v_ann = var_ann (var); 2749 VEC(tree, gc) *aliases = v_ann->may_aliases; 2750 2751 /* Case 1: |aliases| == 1 */ 2752 if ((aliases != NULL) 2753 && (VEC_length (tree, aliases) == 1)) 2754 { 2755 tree ali = VEC_index (tree, aliases, 0); 2756 2757 if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG) 2758 return ali; 2759 } 2760 2761 /* Case 2: |aliases| == 0 */ 2762 if (aliases == NULL) 2763 add_may_alias (tag, var); 2764 else 2765 { 2766 /* Case 3: |aliases| > 1 */ 2767 unsigned i; 2768 tree al; 2769 2770 for (i = 0; VEC_iterate (tree, aliases, i, al); i++) 2771 add_may_alias (tag, al); 2772 } 2773 2774 return tag; 2775} 2776 2777/* Create a new symbol tag for PTR. Construct the may-alias list of this type 2778 tag so that it has the aliasing of VAR, or of the relevant subvars of VAR 2779 according to the location accessed by EXPR. 2780 2781 Note, the set of aliases represented by the new symbol tag are not marked 2782 for renaming. */ 2783 2784void 2785new_type_alias (tree ptr, tree var, tree expr) 2786{ 2787 var_ann_t p_ann = var_ann (ptr); 2788 tree tag_type = TREE_TYPE (TREE_TYPE (ptr)); 2789 tree tag; 2790 subvar_t svars; 2791 tree ali = NULL_TREE; 2792 HOST_WIDE_INT offset, size, maxsize; 2793 tree ref; 2794 2795 gcc_assert (p_ann->symbol_mem_tag == NULL_TREE); 2796 gcc_assert (!MTAG_P (var)); 2797 2798 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize); 2799 gcc_assert (ref); 2800 2801 tag = create_memory_tag (tag_type, true); 2802 p_ann->symbol_mem_tag = tag; 2803 2804 /* Add VAR to the may-alias set of PTR's new symbol tag. If VAR has 2805 subvars, add the subvars to the tag instead of the actual var. */ 2806 if (var_can_have_subvars (var) 2807 && (svars = get_subvars_for_var (var))) 2808 { 2809 subvar_t sv; 2810 VEC (tree, heap) *overlaps = NULL; 2811 unsigned int len; 2812 2813 for (sv = svars; sv; sv = sv->next) 2814 { 2815 bool exact; 2816 2817 if (overlap_subvar (offset, maxsize, sv->var, &exact)) 2818 VEC_safe_push (tree, heap, overlaps, sv->var); 2819 } 2820 len = VEC_length (tree, overlaps); 2821 if (dump_file && (dump_flags & TDF_DETAILS)) 2822 fprintf (dump_file, "\nnumber of overlapping subvars = %u\n", len); 2823 gcc_assert (len); 2824 2825 if (len == 1) 2826 ali = add_may_alias_for_new_tag (tag, VEC_index (tree, overlaps, 0)); 2827 else if (len > 1) 2828 { 2829 unsigned int k; 2830 tree sv_var; 2831 2832 for (k = 0; VEC_iterate (tree, overlaps, k, sv_var); k++) 2833 { 2834 ali = add_may_alias_for_new_tag (tag, sv_var); 2835 2836 if (ali != tag) 2837 { 2838 /* Can happen only if 'Case 1' of add_may_alias_for_new_tag 2839 took place. Since more than one svar was found, we add 2840 'ali' as one of the may_aliases of the new tag. */ 2841 add_may_alias (tag, ali); 2842 ali = tag; 2843 } 2844 } 2845 } 2846 } 2847 else 2848 ali = add_may_alias_for_new_tag (tag, var); 2849 2850 p_ann->symbol_mem_tag = ali; 2851 TREE_READONLY (tag) = TREE_READONLY (var); 2852 MTAG_GLOBAL (tag) = is_global_var (var); 2853} 2854 2855/* This represents the used range of a variable. */ 2856 2857typedef struct used_part 2858{ 2859 HOST_WIDE_INT minused; 2860 HOST_WIDE_INT maxused; 2861 /* True if we have an explicit use/def of some portion of this variable, 2862 even if it is all of it. i.e. a.b = 5 or temp = a.b. */ 2863 bool explicit_uses; 2864 /* True if we have an implicit use/def of some portion of this 2865 variable. Implicit uses occur when we can't tell what part we 2866 are referencing, and have to make conservative assumptions. */ 2867 bool implicit_uses; 2868 /* True if the structure is only written to or taken its address. */ 2869 bool write_only; 2870} *used_part_t; 2871 2872/* An array of used_part structures, indexed by variable uid. */ 2873 2874static htab_t used_portions; 2875 2876struct used_part_map 2877{ 2878 unsigned int uid; 2879 used_part_t to; 2880}; 2881 2882/* Return true if the uid in the two used part maps are equal. */ 2883 2884static int 2885used_part_map_eq (const void *va, const void *vb) 2886{ 2887 const struct used_part_map *a = (const struct used_part_map *) va; 2888 const struct used_part_map *b = (const struct used_part_map *) vb; 2889 return (a->uid == b->uid); 2890} 2891 2892/* Hash a from uid in a used_part_map. */ 2893 2894static unsigned int 2895used_part_map_hash (const void *item) 2896{ 2897 return ((const struct used_part_map *)item)->uid; 2898} 2899 2900/* Free a used part map element. */ 2901 2902static void 2903free_used_part_map (void *item) 2904{ 2905 free (((struct used_part_map *)item)->to); 2906 free (item); 2907} 2908 2909/* Lookup a used_part structure for a UID. */ 2910 2911static used_part_t 2912up_lookup (unsigned int uid) 2913{ 2914 struct used_part_map *h, in; 2915 in.uid = uid; 2916 h = (struct used_part_map *) htab_find_with_hash (used_portions, &in, uid); 2917 if (!h) 2918 return NULL; 2919 return h->to; 2920} 2921 2922/* Insert the pair UID, TO into the used part hashtable. */ 2923 2924static void 2925up_insert (unsigned int uid, used_part_t to) 2926{ 2927 struct used_part_map *h; 2928 void **loc; 2929 2930 h = XNEW (struct used_part_map); 2931 h->uid = uid; 2932 h->to = to; 2933 loc = htab_find_slot_with_hash (used_portions, h, 2934 uid, INSERT); 2935 if (*loc != NULL) 2936 free (*loc); 2937 *(struct used_part_map **) loc = h; 2938} 2939 2940 2941/* Given a variable uid, UID, get or create the entry in the used portions 2942 table for the variable. */ 2943 2944static used_part_t 2945get_or_create_used_part_for (size_t uid) 2946{ 2947 used_part_t up; 2948 if ((up = up_lookup (uid)) == NULL) 2949 { 2950 up = XCNEW (struct used_part); 2951 up->minused = INT_MAX; 2952 up->maxused = 0; 2953 up->explicit_uses = false; 2954 up->implicit_uses = false; 2955 up->write_only = true; 2956 } 2957 2958 return up; 2959} 2960 2961 2962/* Create and return a structure sub-variable for field type FIELD at 2963 offset OFFSET, with size SIZE, of variable VAR. */ 2964 2965static tree 2966create_sft (tree var, tree field, unsigned HOST_WIDE_INT offset, 2967 unsigned HOST_WIDE_INT size) 2968{ 2969 var_ann_t ann; 2970 tree subvar = create_tag_raw (STRUCT_FIELD_TAG, field, "SFT"); 2971 2972 /* We need to copy the various flags from VAR to SUBVAR, so that 2973 they are is_global_var iff the original variable was. */ 2974 DECL_CONTEXT (subvar) = DECL_CONTEXT (var); 2975 MTAG_GLOBAL (subvar) = DECL_EXTERNAL (var); 2976 TREE_PUBLIC (subvar) = TREE_PUBLIC (var); 2977 TREE_STATIC (subvar) = TREE_STATIC (var); 2978 TREE_READONLY (subvar) = TREE_READONLY (var); 2979 TREE_ADDRESSABLE (subvar) = TREE_ADDRESSABLE (var); 2980 2981 /* Add the new variable to REFERENCED_VARS. */ 2982 ann = get_var_ann (subvar); 2983 ann->symbol_mem_tag = NULL; 2984 add_referenced_var (subvar); 2985 SFT_PARENT_VAR (subvar) = var; 2986 SFT_OFFSET (subvar) = offset; 2987 SFT_SIZE (subvar) = size; 2988 return subvar; 2989} 2990 2991 2992/* Given an aggregate VAR, create the subvariables that represent its 2993 fields. */ 2994 2995static void 2996create_overlap_variables_for (tree var) 2997{ 2998 VEC(fieldoff_s,heap) *fieldstack = NULL; 2999 used_part_t up; 3000 size_t uid = DECL_UID (var); 3001 3002 up = up_lookup (uid); 3003 if (!up 3004 || up->write_only) 3005 return; 3006 3007 push_fields_onto_fieldstack (TREE_TYPE (var), &fieldstack, 0, NULL); 3008 if (VEC_length (fieldoff_s, fieldstack) != 0) 3009 { 3010 subvar_t *subvars; 3011 fieldoff_s *fo; 3012 bool notokay = false; 3013 int fieldcount = 0; 3014 int i; 3015 HOST_WIDE_INT lastfooffset = -1; 3016 HOST_WIDE_INT lastfosize = -1; 3017 tree lastfotype = NULL_TREE; 3018 3019 /* Not all fields have DECL_SIZE set, and those that don't, we don't 3020 know their size, and thus, can't handle. 3021 The same is true of fields with DECL_SIZE that is not an integer 3022 constant (such as variable sized fields). 3023 Fields with offsets which are not constant will have an offset < 0 3024 We *could* handle fields that are constant sized arrays, but 3025 currently don't. Doing so would require some extra changes to 3026 tree-ssa-operands.c. */ 3027 3028 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++) 3029 { 3030 if (!fo->size 3031 || TREE_CODE (fo->size) != INTEGER_CST 3032 || fo->offset < 0) 3033 { 3034 notokay = true; 3035 break; 3036 } 3037 fieldcount++; 3038 } 3039 3040 /* The current heuristic we use is as follows: 3041 If the variable has no used portions in this function, no 3042 structure vars are created for it. 3043 Otherwise, 3044 If the variable has less than SALIAS_MAX_IMPLICIT_FIELDS, 3045 we always create structure vars for them. 3046 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and 3047 some explicit uses, we create structure vars for them. 3048 If the variable has more than SALIAS_MAX_IMPLICIT_FIELDS, and 3049 no explicit uses, we do not create structure vars for them. 3050 */ 3051 3052 if (fieldcount >= SALIAS_MAX_IMPLICIT_FIELDS 3053 && !up->explicit_uses) 3054 { 3055 if (dump_file && (dump_flags & TDF_DETAILS)) 3056 { 3057 fprintf (dump_file, "Variable "); 3058 print_generic_expr (dump_file, var, 0); 3059 fprintf (dump_file, " has no explicit uses in this function, and is > SALIAS_MAX_IMPLICIT_FIELDS, so skipping\n"); 3060 } 3061 notokay = true; 3062 } 3063 3064 /* Bail out, if we can't create overlap variables. */ 3065 if (notokay) 3066 { 3067 VEC_free (fieldoff_s, heap, fieldstack); 3068 return; 3069 } 3070 3071 /* Otherwise, create the variables. */ 3072 subvars = lookup_subvars_for_var (var); 3073 3074 sort_fieldstack (fieldstack); 3075 3076 for (i = VEC_length (fieldoff_s, fieldstack); 3077 VEC_iterate (fieldoff_s, fieldstack, --i, fo);) 3078 { 3079 subvar_t sv; 3080 HOST_WIDE_INT fosize; 3081 tree currfotype; 3082 3083 fosize = TREE_INT_CST_LOW (fo->size); 3084 currfotype = fo->type; 3085 3086 /* If this field isn't in the used portion, 3087 or it has the exact same offset and size as the last 3088 field, skip it. */ 3089 3090 if (((fo->offset <= up->minused 3091 && fo->offset + fosize <= up->minused) 3092 || fo->offset >= up->maxused) 3093 || (fo->offset == lastfooffset 3094 && fosize == lastfosize 3095 && currfotype == lastfotype)) 3096 continue; 3097 sv = GGC_NEW (struct subvar); 3098 sv->next = *subvars; 3099 sv->var = create_sft (var, fo->type, fo->offset, fosize); 3100 3101 if (dump_file) 3102 { 3103 fprintf (dump_file, "structure field tag %s created for var %s", 3104 get_name (sv->var), get_name (var)); 3105 fprintf (dump_file, " offset " HOST_WIDE_INT_PRINT_DEC, 3106 SFT_OFFSET (sv->var)); 3107 fprintf (dump_file, " size " HOST_WIDE_INT_PRINT_DEC, 3108 SFT_SIZE (sv->var)); 3109 fprintf (dump_file, "\n"); 3110 } 3111 3112 lastfotype = currfotype; 3113 lastfooffset = fo->offset; 3114 lastfosize = fosize; 3115 *subvars = sv; 3116 } 3117 3118 /* Once we have created subvars, the original is no longer call 3119 clobbered on its own. Its call clobbered status depends 3120 completely on the call clobbered status of the subvars. 3121 3122 add_referenced_var in the above loop will take care of 3123 marking subvars of global variables as call clobbered for us 3124 to start, since they are global as well. */ 3125 clear_call_clobbered (var); 3126 } 3127 3128 VEC_free (fieldoff_s, heap, fieldstack); 3129} 3130 3131 3132/* Find the conservative answer to the question of what portions of what 3133 structures are used by this statement. We assume that if we have a 3134 component ref with a known size + offset, that we only need that part 3135 of the structure. For unknown cases, or cases where we do something 3136 to the whole structure, we assume we need to create fields for the 3137 entire structure. */ 3138 3139static tree 3140find_used_portions (tree *tp, int *walk_subtrees, void *lhs_p) 3141{ 3142 switch (TREE_CODE (*tp)) 3143 { 3144 case MODIFY_EXPR: 3145 /* Recurse manually here to track whether the use is in the 3146 LHS of an assignment. */ 3147 find_used_portions (&TREE_OPERAND (*tp, 0), walk_subtrees, tp); 3148 return find_used_portions (&TREE_OPERAND (*tp, 1), walk_subtrees, NULL); 3149 case REALPART_EXPR: 3150 case IMAGPART_EXPR: 3151 case COMPONENT_REF: 3152 case ARRAY_REF: 3153 { 3154 HOST_WIDE_INT bitsize; 3155 HOST_WIDE_INT bitmaxsize; 3156 HOST_WIDE_INT bitpos; 3157 tree ref; 3158 ref = get_ref_base_and_extent (*tp, &bitpos, &bitsize, &bitmaxsize); 3159 if (DECL_P (ref) 3160 && var_can_have_subvars (ref) 3161 && bitmaxsize != -1) 3162 { 3163 size_t uid = DECL_UID (ref); 3164 used_part_t up; 3165 3166 up = get_or_create_used_part_for (uid); 3167 3168 if (bitpos <= up->minused) 3169 up->minused = bitpos; 3170 if ((bitpos + bitmaxsize >= up->maxused)) 3171 up->maxused = bitpos + bitmaxsize; 3172 3173 if (bitsize == bitmaxsize) 3174 up->explicit_uses = true; 3175 else 3176 up->implicit_uses = true; 3177 if (!lhs_p) 3178 up->write_only = false; 3179 up_insert (uid, up); 3180 3181 *walk_subtrees = 0; 3182 return NULL_TREE; 3183 } 3184 } 3185 break; 3186 /* This is here to make sure we mark the entire base variable as used 3187 when you take its address. Because our used portion analysis is 3188 simple, we aren't looking at casts or pointer arithmetic to see what 3189 happens when you take the address. */ 3190 case ADDR_EXPR: 3191 { 3192 tree var = get_base_address (TREE_OPERAND (*tp, 0)); 3193 3194 if (var 3195 && DECL_P (var) 3196 && DECL_SIZE (var) 3197 && var_can_have_subvars (var) 3198 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST) 3199 { 3200 used_part_t up; 3201 size_t uid = DECL_UID (var); 3202 3203 up = get_or_create_used_part_for (uid); 3204 3205 up->minused = 0; 3206 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var)); 3207 up->implicit_uses = true; 3208 if (!lhs_p) 3209 up->write_only = false; 3210 3211 up_insert (uid, up); 3212 *walk_subtrees = 0; 3213 return NULL_TREE; 3214 } 3215 } 3216 break; 3217 case CALL_EXPR: 3218 { 3219 tree *arg; 3220 for (arg = &TREE_OPERAND (*tp, 1); *arg; arg = &TREE_CHAIN (*arg)) 3221 { 3222 if (TREE_CODE (TREE_VALUE (*arg)) != ADDR_EXPR) 3223 find_used_portions (&TREE_VALUE (*arg), walk_subtrees, NULL); 3224 } 3225 *walk_subtrees = 0; 3226 return NULL_TREE; 3227 } 3228 case VAR_DECL: 3229 case PARM_DECL: 3230 case RESULT_DECL: 3231 { 3232 tree var = *tp; 3233 if (DECL_SIZE (var) 3234 && var_can_have_subvars (var) 3235 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST) 3236 { 3237 used_part_t up; 3238 size_t uid = DECL_UID (var); 3239 3240 up = get_or_create_used_part_for (uid); 3241 3242 up->minused = 0; 3243 up->maxused = TREE_INT_CST_LOW (DECL_SIZE (var)); 3244 up->implicit_uses = true; 3245 3246 up_insert (uid, up); 3247 *walk_subtrees = 0; 3248 return NULL_TREE; 3249 } 3250 } 3251 break; 3252 3253 default: 3254 break; 3255 3256 } 3257 return NULL_TREE; 3258} 3259 3260/* Create structure field variables for structures used in this function. */ 3261 3262static unsigned int 3263create_structure_vars (void) 3264{ 3265 basic_block bb; 3266 safe_referenced_var_iterator rvi; 3267 VEC (tree, heap) *varvec = NULL; 3268 tree var; 3269 3270 used_portions = htab_create (10, used_part_map_hash, used_part_map_eq, 3271 free_used_part_map); 3272 3273 FOR_EACH_BB (bb) 3274 { 3275 block_stmt_iterator bsi; 3276 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) 3277 { 3278 walk_tree_without_duplicates (bsi_stmt_ptr (bsi), 3279 find_used_portions, 3280 NULL); 3281 } 3282 } 3283 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, rvi) 3284 { 3285 /* The C++ FE creates vars without DECL_SIZE set, for some reason. */ 3286 if (var 3287 && DECL_SIZE (var) 3288 && var_can_have_subvars (var) 3289 && !MTAG_P (var) 3290 && TREE_CODE (DECL_SIZE (var)) == INTEGER_CST) 3291 create_overlap_variables_for (var); 3292 } 3293 htab_delete (used_portions); 3294 VEC_free (tree, heap, varvec); 3295 return 0; 3296} 3297 3298static bool 3299gate_structure_vars (void) 3300{ 3301 return flag_tree_salias != 0; 3302} 3303 3304struct tree_opt_pass pass_create_structure_vars = 3305{ 3306 "salias", /* name */ 3307 gate_structure_vars, /* gate */ 3308 create_structure_vars, /* execute */ 3309 NULL, /* sub */ 3310 NULL, /* next */ 3311 0, /* static_pass_number */ 3312 0, /* tv_id */ 3313 PROP_cfg, /* properties_required */ 3314 0, /* properties_provided */ 3315 0, /* properties_destroyed */ 3316 0, /* todo_flags_start */ 3317 TODO_dump_func, /* todo_flags_finish */ 3318 0 /* letter */ 3319}; 3320 3321/* Reset the DECL_CALL_CLOBBERED flags on our referenced vars. In 3322 theory, this only needs to be done for globals. */ 3323 3324static unsigned int 3325reset_cc_flags (void) 3326{ 3327 tree var; 3328 referenced_var_iterator rvi; 3329 3330 FOR_EACH_REFERENCED_VAR (var, rvi) 3331 DECL_CALL_CLOBBERED (var) = false; 3332 return 0; 3333} 3334 3335struct tree_opt_pass pass_reset_cc_flags = 3336{ 3337 NULL, /* name */ 3338 NULL, /* gate */ 3339 reset_cc_flags, /* execute */ 3340 NULL, /* sub */ 3341 NULL, /* next */ 3342 0, /* static_pass_number */ 3343 0, /* tv_id */ 3344 PROP_referenced_vars |PROP_cfg, /* properties_required */ 3345 0, /* properties_provided */ 3346 0, /* properties_destroyed */ 3347 0, /* todo_flags_start */ 3348 0, /* todo_flags_finish */ 3349 0 /* letter */ 3350}; 3351