1/* A pass for lowering trees to RTL. 2 Copyright (C) 2004, 2005 Free Software Foundation, Inc. 3 4This file is part of GCC. 5 6GCC is free software; you can redistribute it and/or modify 7it under the terms of the GNU General Public License as published by 8the Free Software Foundation; either version 2, or (at your option) 9any later version. 10 11GCC is distributed in the hope that it will be useful, 12but WITHOUT ANY WARRANTY; without even the implied warranty of 13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14GNU General Public License for more details. 15 16You should have received a copy of the GNU General Public License 17along with GCC; see the file COPYING. If not, write to 18the Free Software Foundation, 51 Franklin Street, Fifth Floor, 19Boston, MA 02110-1301, USA. */ 20 21#include "config.h" 22#include "system.h" 23#include "coretypes.h" 24#include "tm.h" 25#include "tree.h" 26#include "rtl.h" 27#include "tm_p.h" 28#include "basic-block.h" 29#include "function.h" 30#include "expr.h" 31#include "langhooks.h" 32#include "tree-flow.h" 33#include "timevar.h" 34#include "tree-dump.h" 35#include "tree-pass.h" 36#include "except.h" 37#include "flags.h" 38#include "diagnostic.h" 39#include "toplev.h" 40#include "debug.h" 41#include "params.h" 42 43/* Verify that there is exactly single jump instruction since last and attach 44 REG_BR_PROB note specifying probability. 45 ??? We really ought to pass the probability down to RTL expanders and let it 46 re-distribute it when the conditional expands into multiple conditionals. 47 This is however difficult to do. */ 48static void 49add_reg_br_prob_note (rtx last, int probability) 50{ 51 if (profile_status == PROFILE_ABSENT) 52 return; 53 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last)) 54 if (JUMP_P (last)) 55 { 56 /* It is common to emit condjump-around-jump sequence when we don't know 57 how to reverse the conditional. Special case this. */ 58 if (!any_condjump_p (last) 59 || !JUMP_P (NEXT_INSN (last)) 60 || !simplejump_p (NEXT_INSN (last)) 61 || !NEXT_INSN (NEXT_INSN (last)) 62 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last))) 63 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last))) 64 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))) 65 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))) 66 goto failed; 67 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); 68 REG_NOTES (last) 69 = gen_rtx_EXPR_LIST (REG_BR_PROB, 70 GEN_INT (REG_BR_PROB_BASE - probability), 71 REG_NOTES (last)); 72 return; 73 } 74 if (!last || !JUMP_P (last) || !any_condjump_p (last)) 75 goto failed; 76 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); 77 REG_NOTES (last) 78 = gen_rtx_EXPR_LIST (REG_BR_PROB, 79 GEN_INT (probability), REG_NOTES (last)); 80 return; 81failed: 82 if (dump_file) 83 fprintf (dump_file, "Failed to add probability note\n"); 84} 85 86 87#ifndef LOCAL_ALIGNMENT 88#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT 89#endif 90 91#ifndef STACK_ALIGNMENT_NEEDED 92#define STACK_ALIGNMENT_NEEDED 1 93#endif 94 95 96/* This structure holds data relevant to one variable that will be 97 placed in a stack slot. */ 98struct stack_var 99{ 100 /* The Variable. */ 101 tree decl; 102 103 /* The offset of the variable. During partitioning, this is the 104 offset relative to the partition. After partitioning, this 105 is relative to the stack frame. */ 106 HOST_WIDE_INT offset; 107 108 /* Initially, the size of the variable. Later, the size of the partition, 109 if this variable becomes it's partition's representative. */ 110 HOST_WIDE_INT size; 111 112 /* The *byte* alignment required for this variable. Or as, with the 113 size, the alignment for this partition. */ 114 unsigned int alignb; 115 116 /* The partition representative. */ 117 size_t representative; 118 119 /* The next stack variable in the partition, or EOC. */ 120 size_t next; 121}; 122 123#define EOC ((size_t)-1) 124 125/* We have an array of such objects while deciding allocation. */ 126static struct stack_var *stack_vars; 127static size_t stack_vars_alloc; 128static size_t stack_vars_num; 129 130/* An array of indicies such that stack_vars[stack_vars_sorted[i]].size 131 is non-decreasing. */ 132static size_t *stack_vars_sorted; 133 134/* We have an interference graph between such objects. This graph 135 is lower triangular. */ 136static bool *stack_vars_conflict; 137static size_t stack_vars_conflict_alloc; 138 139/* The phase of the stack frame. This is the known misalignment of 140 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is, 141 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */ 142static int frame_phase; 143 144/* Used during expand_used_vars to remember if we saw any decls for 145 which we'd like to enable stack smashing protection. */ 146static bool has_protected_decls; 147 148/* Used during expand_used_vars. Remember if we say a character buffer 149 smaller than our cutoff threshold. Used for -Wstack-protector. */ 150static bool has_short_buffer; 151 152/* Discover the byte alignment to use for DECL. Ignore alignment 153 we can't do with expected alignment of the stack boundary. */ 154 155static unsigned int 156get_decl_align_unit (tree decl) 157{ 158 unsigned int align; 159 160 align = DECL_ALIGN (decl); 161 align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align); 162 if (align > PREFERRED_STACK_BOUNDARY) 163 align = PREFERRED_STACK_BOUNDARY; 164 if (cfun->stack_alignment_needed < align) 165 cfun->stack_alignment_needed = align; 166 167 return align / BITS_PER_UNIT; 168} 169 170/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame. 171 Return the frame offset. */ 172 173static HOST_WIDE_INT 174alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align) 175{ 176 HOST_WIDE_INT offset, new_frame_offset; 177 178 new_frame_offset = frame_offset; 179 if (FRAME_GROWS_DOWNWARD) 180 { 181 new_frame_offset -= size + frame_phase; 182 new_frame_offset &= -align; 183 new_frame_offset += frame_phase; 184 offset = new_frame_offset; 185 } 186 else 187 { 188 new_frame_offset -= frame_phase; 189 new_frame_offset += align - 1; 190 new_frame_offset &= -align; 191 new_frame_offset += frame_phase; 192 offset = new_frame_offset; 193 new_frame_offset += size; 194 } 195 frame_offset = new_frame_offset; 196 197 if (frame_offset_overflow (frame_offset, cfun->decl)) 198 frame_offset = offset = 0; 199 200 return offset; 201} 202 203/* Accumulate DECL into STACK_VARS. */ 204 205static void 206add_stack_var (tree decl) 207{ 208 if (stack_vars_num >= stack_vars_alloc) 209 { 210 if (stack_vars_alloc) 211 stack_vars_alloc = stack_vars_alloc * 3 / 2; 212 else 213 stack_vars_alloc = 32; 214 stack_vars 215 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc); 216 } 217 stack_vars[stack_vars_num].decl = decl; 218 stack_vars[stack_vars_num].offset = 0; 219 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1); 220 stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl); 221 222 /* All variables are initially in their own partition. */ 223 stack_vars[stack_vars_num].representative = stack_vars_num; 224 stack_vars[stack_vars_num].next = EOC; 225 226 /* Ensure that this decl doesn't get put onto the list twice. */ 227 SET_DECL_RTL (decl, pc_rtx); 228 229 stack_vars_num++; 230} 231 232/* Compute the linear index of a lower-triangular coordinate (I, J). */ 233 234static size_t 235triangular_index (size_t i, size_t j) 236{ 237 if (i < j) 238 { 239 size_t t; 240 t = i, i = j, j = t; 241 } 242 return (i * (i + 1)) / 2 + j; 243} 244 245/* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */ 246 247static void 248resize_stack_vars_conflict (size_t n) 249{ 250 size_t size = triangular_index (n-1, n-1) + 1; 251 252 if (size <= stack_vars_conflict_alloc) 253 return; 254 255 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size); 256 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0, 257 (size - stack_vars_conflict_alloc) * sizeof (bool)); 258 stack_vars_conflict_alloc = size; 259} 260 261/* Make the decls associated with luid's X and Y conflict. */ 262 263static void 264add_stack_var_conflict (size_t x, size_t y) 265{ 266 size_t index = triangular_index (x, y); 267 gcc_assert (index < stack_vars_conflict_alloc); 268 stack_vars_conflict[index] = true; 269} 270 271/* Check whether the decls associated with luid's X and Y conflict. */ 272 273static bool 274stack_var_conflict_p (size_t x, size_t y) 275{ 276 size_t index = triangular_index (x, y); 277 gcc_assert (index < stack_vars_conflict_alloc); 278 return stack_vars_conflict[index]; 279} 280 281/* Returns true if TYPE is or contains a union type. */ 282 283static bool 284aggregate_contains_union_type (tree type) 285{ 286 tree field; 287 288 if (TREE_CODE (type) == UNION_TYPE 289 || TREE_CODE (type) == QUAL_UNION_TYPE) 290 return true; 291 if (TREE_CODE (type) == ARRAY_TYPE) 292 return aggregate_contains_union_type (TREE_TYPE (type)); 293 if (TREE_CODE (type) != RECORD_TYPE) 294 return false; 295 296 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) 297 if (TREE_CODE (field) == FIELD_DECL) 298 if (aggregate_contains_union_type (TREE_TYPE (field))) 299 return true; 300 301 return false; 302} 303 304/* A subroutine of expand_used_vars. If two variables X and Y have alias 305 sets that do not conflict, then do add a conflict for these variables 306 in the interference graph. We also need to make sure to add conflicts 307 for union containing structures. Else RTL alias analysis comes along 308 and due to type based aliasing rules decides that for two overlapping 309 union temporaries { short s; int i; } accesses to the same mem through 310 different types may not alias and happily reorders stores across 311 life-time boundaries of the temporaries (See PR25654). 312 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */ 313 314static void 315add_alias_set_conflicts (void) 316{ 317 size_t i, j, n = stack_vars_num; 318 319 for (i = 0; i < n; ++i) 320 { 321 tree type_i = TREE_TYPE (stack_vars[i].decl); 322 bool aggr_i = AGGREGATE_TYPE_P (type_i); 323 bool contains_union; 324 325 contains_union = aggregate_contains_union_type (type_i); 326 for (j = 0; j < i; ++j) 327 { 328 tree type_j = TREE_TYPE (stack_vars[j].decl); 329 bool aggr_j = AGGREGATE_TYPE_P (type_j); 330 if (aggr_i != aggr_j 331 /* Either the objects conflict by means of type based 332 aliasing rules, or we need to add a conflict. */ 333 || !objects_must_conflict_p (type_i, type_j) 334 /* In case the types do not conflict ensure that access 335 to elements will conflict. In case of unions we have 336 to be careful as type based aliasing rules may say 337 access to the same memory does not conflict. So play 338 safe and add a conflict in this case. */ 339 || contains_union) 340 add_stack_var_conflict (i, j); 341 } 342 } 343} 344 345/* A subroutine of partition_stack_vars. A comparison function for qsort, 346 sorting an array of indicies by the size of the object. */ 347 348static int 349stack_var_size_cmp (const void *a, const void *b) 350{ 351 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size; 352 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size; 353 unsigned int uida = DECL_UID (stack_vars[*(const size_t *)a].decl); 354 unsigned int uidb = DECL_UID (stack_vars[*(const size_t *)b].decl); 355 356 if (sa < sb) 357 return -1; 358 if (sa > sb) 359 return 1; 360 /* For stack variables of the same size use the uid of the decl 361 to make the sort stable. */ 362 if (uida < uidb) 363 return -1; 364 if (uida > uidb) 365 return 1; 366 return 0; 367} 368 369/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND 370 partitioning algorithm. Partitions A and B are known to be non-conflicting. 371 Merge them into a single partition A. 372 373 At the same time, add OFFSET to all variables in partition B. At the end 374 of the partitioning process we've have a nice block easy to lay out within 375 the stack frame. */ 376 377static void 378union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset) 379{ 380 size_t i, last; 381 382 /* Update each element of partition B with the given offset, 383 and merge them into partition A. */ 384 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next) 385 { 386 stack_vars[i].offset += offset; 387 stack_vars[i].representative = a; 388 } 389 stack_vars[last].next = stack_vars[a].next; 390 stack_vars[a].next = b; 391 392 /* Update the required alignment of partition A to account for B. */ 393 if (stack_vars[a].alignb < stack_vars[b].alignb) 394 stack_vars[a].alignb = stack_vars[b].alignb; 395 396 /* Update the interference graph and merge the conflicts. */ 397 for (last = stack_vars_num, i = 0; i < last; ++i) 398 if (stack_var_conflict_p (b, i)) 399 add_stack_var_conflict (a, i); 400} 401 402/* A subroutine of expand_used_vars. Binpack the variables into 403 partitions constrained by the interference graph. The overall 404 algorithm used is as follows: 405 406 Sort the objects by size. 407 For each object A { 408 S = size(A) 409 O = 0 410 loop { 411 Look for the largest non-conflicting object B with size <= S. 412 UNION (A, B) 413 offset(B) = O 414 O += size(B) 415 S -= size(B) 416 } 417 } 418*/ 419 420static void 421partition_stack_vars (void) 422{ 423 size_t si, sj, n = stack_vars_num; 424 425 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num); 426 for (si = 0; si < n; ++si) 427 stack_vars_sorted[si] = si; 428 429 if (n == 1) 430 return; 431 432 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp); 433 434 /* Special case: detect when all variables conflict, and thus we can't 435 do anything during the partitioning loop. It isn't uncommon (with 436 C code at least) to declare all variables at the top of the function, 437 and if we're not inlining, then all variables will be in the same scope. 438 Take advantage of very fast libc routines for this scan. */ 439 gcc_assert (sizeof(bool) == sizeof(char)); 440 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL) 441 return; 442 443 for (si = 0; si < n; ++si) 444 { 445 size_t i = stack_vars_sorted[si]; 446 HOST_WIDE_INT isize = stack_vars[i].size; 447 HOST_WIDE_INT offset = 0; 448 449 for (sj = si; sj-- > 0; ) 450 { 451 size_t j = stack_vars_sorted[sj]; 452 HOST_WIDE_INT jsize = stack_vars[j].size; 453 unsigned int jalign = stack_vars[j].alignb; 454 455 /* Ignore objects that aren't partition representatives. */ 456 if (stack_vars[j].representative != j) 457 continue; 458 459 /* Ignore objects too large for the remaining space. */ 460 if (isize < jsize) 461 continue; 462 463 /* Ignore conflicting objects. */ 464 if (stack_var_conflict_p (i, j)) 465 continue; 466 467 /* Refine the remaining space check to include alignment. */ 468 if (offset & (jalign - 1)) 469 { 470 HOST_WIDE_INT toff = offset; 471 toff += jalign - 1; 472 toff &= -(HOST_WIDE_INT)jalign; 473 if (isize - (toff - offset) < jsize) 474 continue; 475 476 isize -= toff - offset; 477 offset = toff; 478 } 479 480 /* UNION the objects, placing J at OFFSET. */ 481 union_stack_vars (i, j, offset); 482 483 isize -= jsize; 484 if (isize == 0) 485 break; 486 } 487 } 488} 489 490/* A debugging aid for expand_used_vars. Dump the generated partitions. */ 491 492static void 493dump_stack_var_partition (void) 494{ 495 size_t si, i, j, n = stack_vars_num; 496 497 for (si = 0; si < n; ++si) 498 { 499 i = stack_vars_sorted[si]; 500 501 /* Skip variables that aren't partition representatives, for now. */ 502 if (stack_vars[i].representative != i) 503 continue; 504 505 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC 506 " align %u\n", (unsigned long) i, stack_vars[i].size, 507 stack_vars[i].alignb); 508 509 for (j = i; j != EOC; j = stack_vars[j].next) 510 { 511 fputc ('\t', dump_file); 512 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags); 513 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n", 514 stack_vars[i].offset); 515 } 516 } 517} 518 519/* Assign rtl to DECL at frame offset OFFSET. */ 520 521static void 522expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset) 523{ 524 HOST_WIDE_INT align; 525 rtx x; 526 527 /* If this fails, we've overflowed the stack frame. Error nicely? */ 528 gcc_assert (offset == trunc_int_for_mode (offset, Pmode)); 529 530 x = plus_constant (virtual_stack_vars_rtx, offset); 531 x = gen_rtx_MEM (DECL_MODE (decl), x); 532 533 /* Set alignment we actually gave this decl. */ 534 offset -= frame_phase; 535 align = offset & -offset; 536 align *= BITS_PER_UNIT; 537 if (align > STACK_BOUNDARY || align == 0) 538 align = STACK_BOUNDARY; 539 DECL_ALIGN (decl) = align; 540 DECL_USER_ALIGN (decl) = 0; 541 542 set_mem_attributes (x, decl, true); 543 SET_DECL_RTL (decl, x); 544} 545 546/* A subroutine of expand_used_vars. Give each partition representative 547 a unique location within the stack frame. Update each partition member 548 with that location. */ 549 550static void 551expand_stack_vars (bool (*pred) (tree)) 552{ 553 size_t si, i, j, n = stack_vars_num; 554 555 for (si = 0; si < n; ++si) 556 { 557 HOST_WIDE_INT offset; 558 559 i = stack_vars_sorted[si]; 560 561 /* Skip variables that aren't partition representatives, for now. */ 562 if (stack_vars[i].representative != i) 563 continue; 564 565 /* Skip variables that have already had rtl assigned. See also 566 add_stack_var where we perpetrate this pc_rtx hack. */ 567 if (DECL_RTL (stack_vars[i].decl) != pc_rtx) 568 continue; 569 570 /* Check the predicate to see whether this variable should be 571 allocated in this pass. */ 572 if (pred && !pred (stack_vars[i].decl)) 573 continue; 574 575 offset = alloc_stack_frame_space (stack_vars[i].size, 576 stack_vars[i].alignb); 577 578 /* Create rtl for each variable based on their location within the 579 partition. */ 580 for (j = i; j != EOC; j = stack_vars[j].next) 581 expand_one_stack_var_at (stack_vars[j].decl, 582 stack_vars[j].offset + offset); 583 } 584} 585 586/* A subroutine of expand_one_var. Called to immediately assign rtl 587 to a variable to be allocated in the stack frame. */ 588 589static void 590expand_one_stack_var (tree var) 591{ 592 HOST_WIDE_INT size, offset, align; 593 594 size = tree_low_cst (DECL_SIZE_UNIT (var), 1); 595 align = get_decl_align_unit (var); 596 offset = alloc_stack_frame_space (size, align); 597 598 expand_one_stack_var_at (var, offset); 599} 600 601/* A subroutine of expand_one_var. Called to assign rtl 602 to a TREE_STATIC VAR_DECL. */ 603 604static void 605expand_one_static_var (tree var) 606{ 607 /* In unit-at-a-time all the static variables are expanded at the end 608 of compilation process. */ 609 if (flag_unit_at_a_time) 610 return; 611 /* If this is an inlined copy of a static local variable, 612 look up the original. */ 613 var = DECL_ORIGIN (var); 614 615 /* If we've already processed this variable because of that, do nothing. */ 616 if (TREE_ASM_WRITTEN (var)) 617 return; 618 619 /* Give the front end a chance to do whatever. In practice, this is 620 resolving duplicate names for IMA in C. */ 621 if (lang_hooks.expand_decl (var)) 622 return; 623 624 /* Otherwise, just emit the variable. */ 625 rest_of_decl_compilation (var, 0, 0); 626} 627 628/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL 629 that will reside in a hard register. */ 630 631static void 632expand_one_hard_reg_var (tree var) 633{ 634 rest_of_decl_compilation (var, 0, 0); 635} 636 637/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL 638 that will reside in a pseudo register. */ 639 640static void 641expand_one_register_var (tree var) 642{ 643 tree type = TREE_TYPE (var); 644 int unsignedp = TYPE_UNSIGNED (type); 645 enum machine_mode reg_mode 646 = promote_mode (type, DECL_MODE (var), &unsignedp, 0); 647 rtx x = gen_reg_rtx (reg_mode); 648 649 SET_DECL_RTL (var, x); 650 651 /* Note if the object is a user variable. */ 652 if (!DECL_ARTIFICIAL (var)) 653 { 654 mark_user_reg (x); 655 656 /* Trust user variables which have a pointer type to really 657 be pointers. Do not trust compiler generated temporaries 658 as our type system is totally busted as it relates to 659 pointer arithmetic which translates into lots of compiler 660 generated objects with pointer types, but which are not really 661 pointers. */ 662 if (POINTER_TYPE_P (type)) 663 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var)))); 664 } 665} 666 667/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that 668 has some associated error, e.g. its type is error-mark. We just need 669 to pick something that won't crash the rest of the compiler. */ 670 671static void 672expand_one_error_var (tree var) 673{ 674 enum machine_mode mode = DECL_MODE (var); 675 rtx x; 676 677 if (mode == BLKmode) 678 x = gen_rtx_MEM (BLKmode, const0_rtx); 679 else if (mode == VOIDmode) 680 x = const0_rtx; 681 else 682 x = gen_reg_rtx (mode); 683 684 SET_DECL_RTL (var, x); 685} 686 687/* A subroutine of expand_one_var. VAR is a variable that will be 688 allocated to the local stack frame. Return true if we wish to 689 add VAR to STACK_VARS so that it will be coalesced with other 690 variables. Return false to allocate VAR immediately. 691 692 This function is used to reduce the number of variables considered 693 for coalescing, which reduces the size of the quadratic problem. */ 694 695static bool 696defer_stack_allocation (tree var, bool toplevel) 697{ 698 /* If stack protection is enabled, *all* stack variables must be deferred, 699 so that we can re-order the strings to the top of the frame. */ 700 if (flag_stack_protect) 701 return true; 702 703 /* Variables in the outermost scope automatically conflict with 704 every other variable. The only reason to want to defer them 705 at all is that, after sorting, we can more efficiently pack 706 small variables in the stack frame. Continue to defer at -O2. */ 707 if (toplevel && optimize < 2) 708 return false; 709 710 /* Without optimization, *most* variables are allocated from the 711 stack, which makes the quadratic problem large exactly when we 712 want compilation to proceed as quickly as possible. On the 713 other hand, we don't want the function's stack frame size to 714 get completely out of hand. So we avoid adding scalars and 715 "small" aggregates to the list at all. */ 716 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32) 717 return false; 718 719 return true; 720} 721 722/* A subroutine of expand_used_vars. Expand one variable according to 723 its flavor. Variables to be placed on the stack are not actually 724 expanded yet, merely recorded. */ 725 726static void 727expand_one_var (tree var, bool toplevel) 728{ 729 if (TREE_CODE (var) != VAR_DECL) 730 lang_hooks.expand_decl (var); 731 else if (DECL_EXTERNAL (var)) 732 ; 733 else if (DECL_HAS_VALUE_EXPR_P (var)) 734 ; 735 else if (TREE_STATIC (var)) 736 expand_one_static_var (var); 737 else if (DECL_RTL_SET_P (var)) 738 ; 739 else if (TREE_TYPE (var) == error_mark_node) 740 expand_one_error_var (var); 741 else if (DECL_HARD_REGISTER (var)) 742 expand_one_hard_reg_var (var); 743 else if (use_register_for_decl (var)) 744 expand_one_register_var (var); 745 else if (defer_stack_allocation (var, toplevel)) 746 add_stack_var (var); 747 else 748 expand_one_stack_var (var); 749} 750 751/* A subroutine of expand_used_vars. Walk down through the BLOCK tree 752 expanding variables. Those variables that can be put into registers 753 are allocated pseudos; those that can't are put on the stack. 754 755 TOPLEVEL is true if this is the outermost BLOCK. */ 756 757static void 758expand_used_vars_for_block (tree block, bool toplevel) 759{ 760 size_t i, j, old_sv_num, this_sv_num, new_sv_num; 761 tree t; 762 763 old_sv_num = toplevel ? 0 : stack_vars_num; 764 765 /* Expand all variables at this level. */ 766 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) 767 if (TREE_USED (t) 768 /* Force local static variables to be output when marked by 769 used attribute. For unit-at-a-time, cgraph code already takes 770 care of this. */ 771 || (!flag_unit_at_a_time && TREE_STATIC (t) 772 && DECL_PRESERVE_P (t))) 773 expand_one_var (t, toplevel); 774 775 this_sv_num = stack_vars_num; 776 777 /* Expand all variables at containing levels. */ 778 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) 779 expand_used_vars_for_block (t, false); 780 781 /* Since we do not track exact variable lifetimes (which is not even 782 possible for variables whose address escapes), we mirror the block 783 tree in the interference graph. Here we cause all variables at this 784 level, and all sublevels, to conflict. Do make certain that a 785 variable conflicts with itself. */ 786 if (old_sv_num < this_sv_num) 787 { 788 new_sv_num = stack_vars_num; 789 resize_stack_vars_conflict (new_sv_num); 790 791 for (i = old_sv_num; i < new_sv_num; ++i) 792 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;) 793 add_stack_var_conflict (i, j); 794 } 795} 796 797/* A subroutine of expand_used_vars. Walk down through the BLOCK tree 798 and clear TREE_USED on all local variables. */ 799 800static void 801clear_tree_used (tree block) 802{ 803 tree t; 804 805 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) 806 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */ 807 TREE_USED (t) = 0; 808 809 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) 810 clear_tree_used (t); 811} 812 813enum { 814 SPCT_FLAG_DEFAULT = 1, 815 SPCT_FLAG_ALL = 2, 816 SPCT_FLAG_STRONG = 3 817}; 818 819/* Examine TYPE and determine a bit mask of the following features. */ 820 821#define SPCT_HAS_LARGE_CHAR_ARRAY 1 822#define SPCT_HAS_SMALL_CHAR_ARRAY 2 823#define SPCT_HAS_ARRAY 4 824#define SPCT_HAS_AGGREGATE 8 825 826static unsigned int 827stack_protect_classify_type (tree type) 828{ 829 unsigned int ret = 0; 830 tree t; 831 832 switch (TREE_CODE (type)) 833 { 834 case ARRAY_TYPE: 835 t = TYPE_MAIN_VARIANT (TREE_TYPE (type)); 836 if (t == char_type_node 837 || t == signed_char_type_node 838 || t == unsigned_char_type_node) 839 { 840 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE); 841 unsigned HOST_WIDE_INT len; 842 843 if (!TYPE_SIZE_UNIT (type) 844 || !host_integerp (TYPE_SIZE_UNIT (type), 1)) 845 len = max; 846 else 847 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1); 848 849 if (len < max) 850 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY; 851 else 852 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY; 853 } 854 else 855 ret = SPCT_HAS_ARRAY; 856 break; 857 858 case UNION_TYPE: 859 case QUAL_UNION_TYPE: 860 case RECORD_TYPE: 861 ret = SPCT_HAS_AGGREGATE; 862 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t)) 863 if (TREE_CODE (t) == FIELD_DECL) 864 ret |= stack_protect_classify_type (TREE_TYPE (t)); 865 break; 866 867 default: 868 break; 869 } 870 871 return ret; 872} 873 874/* Return nonzero if DECL should be segregated into the "vulnerable" upper 875 part of the local stack frame. Remember if we ever return nonzero for 876 any variable in this function. The return value is the phase number in 877 which the variable should be allocated. */ 878 879static int 880stack_protect_decl_phase (tree decl) 881{ 882 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl)); 883 int ret = 0; 884 885 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY) 886 has_short_buffer = true; 887 888 if (flag_stack_protect == SPCT_FLAG_ALL 889 || flag_stack_protect == SPCT_FLAG_STRONG) 890 { 891 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY)) 892 && !(bits & SPCT_HAS_AGGREGATE)) 893 ret = 1; 894 else if (bits & SPCT_HAS_ARRAY) 895 ret = 2; 896 } 897 else 898 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0; 899 900 if (ret) 901 has_protected_decls = true; 902 903 return ret; 904} 905 906/* Two helper routines that check for phase 1 and phase 2. These are used 907 as callbacks for expand_stack_vars. */ 908 909static bool 910stack_protect_decl_phase_1 (tree decl) 911{ 912 return stack_protect_decl_phase (decl) == 1; 913} 914 915static bool 916stack_protect_decl_phase_2 (tree decl) 917{ 918 return stack_protect_decl_phase (decl) == 2; 919} 920 921/* Ensure that variables in different stack protection phases conflict 922 so that they are not merged and share the same stack slot. */ 923 924static void 925add_stack_protection_conflicts (void) 926{ 927 size_t i, j, n = stack_vars_num; 928 unsigned char *phase; 929 930 phase = XNEWVEC (unsigned char, n); 931 for (i = 0; i < n; ++i) 932 phase[i] = stack_protect_decl_phase (stack_vars[i].decl); 933 934 for (i = 0; i < n; ++i) 935 { 936 unsigned char ph_i = phase[i]; 937 for (j = 0; j < i; ++j) 938 if (ph_i != phase[j]) 939 add_stack_var_conflict (i, j); 940 } 941 942 XDELETEVEC (phase); 943} 944 945/* Create a decl for the guard at the top of the stack frame. */ 946 947static void 948create_stack_guard (void) 949{ 950 tree guard = build_decl (VAR_DECL, NULL, ptr_type_node); 951 TREE_THIS_VOLATILE (guard) = 1; 952 TREE_USED (guard) = 1; 953 expand_one_stack_var (guard); 954 cfun->stack_protect_guard = guard; 955} 956 957/* Helper routine to check if a record or union contains an array field. */ 958 959static int 960record_or_union_type_has_array_p (tree tree_type) 961{ 962 tree fields = TYPE_FIELDS (tree_type); 963 tree f; 964 965 for (f = fields; f; f = TREE_CHAIN (f)) 966 if (TREE_CODE (f) == FIELD_DECL) 967 { 968 tree field_type = TREE_TYPE (f); 969 if ((TREE_CODE (field_type) == RECORD_TYPE 970 || TREE_CODE (field_type) == UNION_TYPE 971 || TREE_CODE (field_type) == QUAL_UNION_TYPE) 972 && record_or_union_type_has_array_p (field_type)) 973 return 1; 974 if (TREE_CODE (field_type) == ARRAY_TYPE) 975 return 1; 976 } 977 return 0; 978} 979 980/* Expand all variables used in the function. */ 981 982static void 983expand_used_vars (void) 984{ 985 tree t, outer_block = DECL_INITIAL (current_function_decl); 986 bool gen_stack_protect_signal = false; 987 988 /* Compute the phase of the stack frame for this function. */ 989 { 990 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; 991 int off = STARTING_FRAME_OFFSET % align; 992 frame_phase = off ? align - off : 0; 993 } 994 995 /* Set TREE_USED on all variables in the unexpanded_var_list. */ 996 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t)) 997 TREE_USED (TREE_VALUE (t)) = 1; 998 999 /* Clear TREE_USED on all variables associated with a block scope. */ 1000 clear_tree_used (outer_block); 1001 1002 /* Initialize local stack smashing state. */ 1003 has_protected_decls = false; 1004 has_short_buffer = false; 1005 1006 if (flag_stack_protect == SPCT_FLAG_STRONG) 1007 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t)) 1008 { 1009 tree var = TREE_VALUE (t); 1010 if (!is_global_var (var)) 1011 { 1012 tree var_type = TREE_TYPE (var); 1013 /* Examine local referenced variables that have their addresses 1014 * taken, contain an array, or are arrays. */ 1015 if (TREE_CODE (var) == VAR_DECL 1016 && (TREE_CODE (var_type) == ARRAY_TYPE 1017 || TREE_ADDRESSABLE (var) 1018 || ((TREE_CODE (var_type) == RECORD_TYPE 1019 || TREE_CODE (var_type) == UNION_TYPE 1020 || TREE_CODE (var_type) == QUAL_UNION_TYPE) 1021 && record_or_union_type_has_array_p (var_type)))) 1022 { 1023 gen_stack_protect_signal = true; 1024 break; 1025 } 1026 } 1027 } 1028 1029 /* At this point all variables on the unexpanded_var_list with TREE_USED 1030 set are not associated with any block scope. Lay them out. */ 1031 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t)) 1032 { 1033 tree var = TREE_VALUE (t); 1034 bool expand_now = false; 1035 1036 /* We didn't set a block for static or extern because it's hard 1037 to tell the difference between a global variable (re)declared 1038 in a local scope, and one that's really declared there to 1039 begin with. And it doesn't really matter much, since we're 1040 not giving them stack space. Expand them now. */ 1041 if (TREE_STATIC (var) || DECL_EXTERNAL (var)) 1042 expand_now = true; 1043 1044 /* Any variable that could have been hoisted into an SSA_NAME 1045 will have been propagated anywhere the optimizers chose, 1046 i.e. not confined to their original block. Allocate them 1047 as if they were defined in the outermost scope. */ 1048 else if (is_gimple_reg (var)) 1049 expand_now = true; 1050 1051 /* If the variable is not associated with any block, then it 1052 was created by the optimizers, and could be live anywhere 1053 in the function. */ 1054 else if (TREE_USED (var)) 1055 expand_now = true; 1056 1057 /* Finally, mark all variables on the list as used. We'll use 1058 this in a moment when we expand those associated with scopes. */ 1059 TREE_USED (var) = 1; 1060 1061 if (expand_now) 1062 expand_one_var (var, true); 1063 } 1064 cfun->unexpanded_var_list = NULL_TREE; 1065 1066 /* At this point, all variables within the block tree with TREE_USED 1067 set are actually used by the optimized function. Lay them out. */ 1068 expand_used_vars_for_block (outer_block, true); 1069 1070 if (stack_vars_num > 0) 1071 { 1072 /* Due to the way alias sets work, no variables with non-conflicting 1073 alias sets may be assigned the same address. Add conflicts to 1074 reflect this. */ 1075 add_alias_set_conflicts (); 1076 1077 /* If stack protection is enabled, we don't share space between 1078 vulnerable data and non-vulnerable data. */ 1079 if (flag_stack_protect) 1080 add_stack_protection_conflicts (); 1081 1082 /* Now that we have collected all stack variables, and have computed a 1083 minimal interference graph, attempt to save some stack space. */ 1084 partition_stack_vars (); 1085 if (dump_file) 1086 dump_stack_var_partition (); 1087 } 1088 1089 switch (flag_stack_protect) 1090 { 1091 case SPCT_FLAG_ALL: 1092 create_stack_guard (); 1093 break; 1094 1095 case SPCT_FLAG_STRONG: 1096 if (gen_stack_protect_signal 1097 || current_function_calls_alloca || has_protected_decls) 1098 create_stack_guard (); 1099 break; 1100 1101 case SPCT_FLAG_DEFAULT: 1102 if (current_function_calls_alloca || has_protected_decls) 1103 create_stack_guard(); 1104 break; 1105 1106 default: 1107 ; 1108 } 1109 1110 /* Assign rtl to each variable based on these partitions. */ 1111 if (stack_vars_num > 0) 1112 { 1113 /* Reorder decls to be protected by iterating over the variables 1114 array multiple times, and allocating out of each phase in turn. */ 1115 /* ??? We could probably integrate this into the qsort we did 1116 earlier, such that we naturally see these variables first, 1117 and thus naturally allocate things in the right order. */ 1118 if (has_protected_decls) 1119 { 1120 /* Phase 1 contains only character arrays. */ 1121 expand_stack_vars (stack_protect_decl_phase_1); 1122 1123 /* Phase 2 contains other kinds of arrays. */ 1124 if (flag_stack_protect == 2) 1125 expand_stack_vars (stack_protect_decl_phase_2); 1126 } 1127 1128 expand_stack_vars (NULL); 1129 1130 /* Free up stack variable graph data. */ 1131 XDELETEVEC (stack_vars); 1132 XDELETEVEC (stack_vars_sorted); 1133 XDELETEVEC (stack_vars_conflict); 1134 stack_vars = NULL; 1135 stack_vars_alloc = stack_vars_num = 0; 1136 stack_vars_conflict = NULL; 1137 stack_vars_conflict_alloc = 0; 1138 } 1139 1140 /* If the target requires that FRAME_OFFSET be aligned, do it. */ 1141 if (STACK_ALIGNMENT_NEEDED) 1142 { 1143 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; 1144 if (!FRAME_GROWS_DOWNWARD) 1145 frame_offset += align - 1; 1146 frame_offset &= -align; 1147 } 1148} 1149 1150 1151/* If we need to produce a detailed dump, print the tree representation 1152 for STMT to the dump file. SINCE is the last RTX after which the RTL 1153 generated for STMT should have been appended. */ 1154 1155static void 1156maybe_dump_rtl_for_tree_stmt (tree stmt, rtx since) 1157{ 1158 if (dump_file && (dump_flags & TDF_DETAILS)) 1159 { 1160 fprintf (dump_file, "\n;; "); 1161 print_generic_expr (dump_file, stmt, TDF_SLIM); 1162 fprintf (dump_file, "\n"); 1163 1164 print_rtl (dump_file, since ? NEXT_INSN (since) : since); 1165 } 1166} 1167 1168/* A subroutine of expand_gimple_basic_block. Expand one COND_EXPR. 1169 Returns a new basic block if we've terminated the current basic 1170 block and created a new one. */ 1171 1172static basic_block 1173expand_gimple_cond_expr (basic_block bb, tree stmt) 1174{ 1175 basic_block new_bb, dest; 1176 edge new_edge; 1177 edge true_edge; 1178 edge false_edge; 1179 tree pred = COND_EXPR_COND (stmt); 1180 tree then_exp = COND_EXPR_THEN (stmt); 1181 tree else_exp = COND_EXPR_ELSE (stmt); 1182 rtx last2, last; 1183 1184 last2 = last = get_last_insn (); 1185 1186 extract_true_false_edges_from_block (bb, &true_edge, &false_edge); 1187 if (EXPR_LOCUS (stmt)) 1188 { 1189 emit_line_note (*(EXPR_LOCUS (stmt))); 1190 record_block_change (TREE_BLOCK (stmt)); 1191 } 1192 1193 /* These flags have no purpose in RTL land. */ 1194 true_edge->flags &= ~EDGE_TRUE_VALUE; 1195 false_edge->flags &= ~EDGE_FALSE_VALUE; 1196 1197 /* We can either have a pure conditional jump with one fallthru edge or 1198 two-way jump that needs to be decomposed into two basic blocks. */ 1199 if (TREE_CODE (then_exp) == GOTO_EXPR && IS_EMPTY_STMT (else_exp)) 1200 { 1201 jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp))); 1202 add_reg_br_prob_note (last, true_edge->probability); 1203 maybe_dump_rtl_for_tree_stmt (stmt, last); 1204 if (EXPR_LOCUS (then_exp)) 1205 emit_line_note (*(EXPR_LOCUS (then_exp))); 1206 return NULL; 1207 } 1208 if (TREE_CODE (else_exp) == GOTO_EXPR && IS_EMPTY_STMT (then_exp)) 1209 { 1210 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_exp))); 1211 add_reg_br_prob_note (last, false_edge->probability); 1212 maybe_dump_rtl_for_tree_stmt (stmt, last); 1213 if (EXPR_LOCUS (else_exp)) 1214 emit_line_note (*(EXPR_LOCUS (else_exp))); 1215 return NULL; 1216 } 1217 gcc_assert (TREE_CODE (then_exp) == GOTO_EXPR 1218 && TREE_CODE (else_exp) == GOTO_EXPR); 1219 1220 jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp))); 1221 add_reg_br_prob_note (last, true_edge->probability); 1222 last = get_last_insn (); 1223 expand_expr (else_exp, const0_rtx, VOIDmode, 0); 1224 1225 BB_END (bb) = last; 1226 if (BARRIER_P (BB_END (bb))) 1227 BB_END (bb) = PREV_INSN (BB_END (bb)); 1228 update_bb_for_insn (bb); 1229 1230 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb); 1231 dest = false_edge->dest; 1232 redirect_edge_succ (false_edge, new_bb); 1233 false_edge->flags |= EDGE_FALLTHRU; 1234 new_bb->count = false_edge->count; 1235 new_bb->frequency = EDGE_FREQUENCY (false_edge); 1236 new_edge = make_edge (new_bb, dest, 0); 1237 new_edge->probability = REG_BR_PROB_BASE; 1238 new_edge->count = new_bb->count; 1239 if (BARRIER_P (BB_END (new_bb))) 1240 BB_END (new_bb) = PREV_INSN (BB_END (new_bb)); 1241 update_bb_for_insn (new_bb); 1242 1243 maybe_dump_rtl_for_tree_stmt (stmt, last2); 1244 1245 if (EXPR_LOCUS (else_exp)) 1246 emit_line_note (*(EXPR_LOCUS (else_exp))); 1247 1248 return new_bb; 1249} 1250 1251/* A subroutine of expand_gimple_basic_block. Expand one CALL_EXPR 1252 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually 1253 generated a tail call (something that might be denied by the ABI 1254 rules governing the call; see calls.c). 1255 1256 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and 1257 can still reach the rest of BB. The case here is __builtin_sqrt, 1258 where the NaN result goes through the external function (with a 1259 tailcall) and the normal result happens via a sqrt instruction. */ 1260 1261static basic_block 1262expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru) 1263{ 1264 rtx last2, last; 1265 edge e; 1266 edge_iterator ei; 1267 int probability; 1268 gcov_type count; 1269 1270 last2 = last = get_last_insn (); 1271 1272 expand_expr_stmt (stmt); 1273 1274 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last)) 1275 if (CALL_P (last) && SIBLING_CALL_P (last)) 1276 goto found; 1277 1278 maybe_dump_rtl_for_tree_stmt (stmt, last2); 1279 1280 *can_fallthru = true; 1281 return NULL; 1282 1283 found: 1284 /* ??? Wouldn't it be better to just reset any pending stack adjust? 1285 Any instructions emitted here are about to be deleted. */ 1286 do_pending_stack_adjust (); 1287 1288 /* Remove any non-eh, non-abnormal edges that don't go to exit. */ 1289 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be 1290 EH or abnormal edges, we shouldn't have created a tail call in 1291 the first place. So it seems to me we should just be removing 1292 all edges here, or redirecting the existing fallthru edge to 1293 the exit block. */ 1294 1295 probability = 0; 1296 count = 0; 1297 1298 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) 1299 { 1300 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH))) 1301 { 1302 if (e->dest != EXIT_BLOCK_PTR) 1303 { 1304 e->dest->count -= e->count; 1305 e->dest->frequency -= EDGE_FREQUENCY (e); 1306 if (e->dest->count < 0) 1307 e->dest->count = 0; 1308 if (e->dest->frequency < 0) 1309 e->dest->frequency = 0; 1310 } 1311 count += e->count; 1312 probability += e->probability; 1313 remove_edge (e); 1314 } 1315 else 1316 ei_next (&ei); 1317 } 1318 1319 /* This is somewhat ugly: the call_expr expander often emits instructions 1320 after the sibcall (to perform the function return). These confuse the 1321 find_many_sub_basic_blocks code, so we need to get rid of these. */ 1322 last = NEXT_INSN (last); 1323 gcc_assert (BARRIER_P (last)); 1324 1325 *can_fallthru = false; 1326 while (NEXT_INSN (last)) 1327 { 1328 /* For instance an sqrt builtin expander expands if with 1329 sibcall in the then and label for `else`. */ 1330 if (LABEL_P (NEXT_INSN (last))) 1331 { 1332 *can_fallthru = true; 1333 break; 1334 } 1335 delete_insn (NEXT_INSN (last)); 1336 } 1337 1338 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL); 1339 e->probability += probability; 1340 e->count += count; 1341 BB_END (bb) = last; 1342 update_bb_for_insn (bb); 1343 1344 if (NEXT_INSN (last)) 1345 { 1346 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb); 1347 1348 last = BB_END (bb); 1349 if (BARRIER_P (last)) 1350 BB_END (bb) = PREV_INSN (last); 1351 } 1352 1353 maybe_dump_rtl_for_tree_stmt (stmt, last2); 1354 1355 return bb; 1356} 1357 1358/* Expand basic block BB from GIMPLE trees to RTL. */ 1359 1360static basic_block 1361expand_gimple_basic_block (basic_block bb) 1362{ 1363 block_stmt_iterator bsi = bsi_start (bb); 1364 tree stmt = NULL; 1365 rtx note, last; 1366 edge e; 1367 edge_iterator ei; 1368 1369 if (dump_file) 1370 { 1371 fprintf (dump_file, 1372 "\n;; Generating RTL for tree basic block %d\n", 1373 bb->index); 1374 } 1375 1376 init_rtl_bb_info (bb); 1377 bb->flags |= BB_RTL; 1378 1379 if (!bsi_end_p (bsi)) 1380 stmt = bsi_stmt (bsi); 1381 1382 if (stmt && TREE_CODE (stmt) == LABEL_EXPR) 1383 { 1384 last = get_last_insn (); 1385 1386 expand_expr_stmt (stmt); 1387 1388 /* Java emits line number notes in the top of labels. 1389 ??? Make this go away once line number notes are obsoleted. */ 1390 BB_HEAD (bb) = NEXT_INSN (last); 1391 if (NOTE_P (BB_HEAD (bb))) 1392 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb)); 1393 bsi_next (&bsi); 1394 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb)); 1395 1396 maybe_dump_rtl_for_tree_stmt (stmt, last); 1397 } 1398 else 1399 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK); 1400 1401 NOTE_BASIC_BLOCK (note) = bb; 1402 1403 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) 1404 { 1405 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */ 1406 e->flags &= ~EDGE_EXECUTABLE; 1407 1408 /* At the moment not all abnormal edges match the RTL representation. 1409 It is safe to remove them here as find_many_sub_basic_blocks will 1410 rediscover them. In the future we should get this fixed properly. */ 1411 if (e->flags & EDGE_ABNORMAL) 1412 remove_edge (e); 1413 else 1414 ei_next (&ei); 1415 } 1416 1417 for (; !bsi_end_p (bsi); bsi_next (&bsi)) 1418 { 1419 tree stmt = bsi_stmt (bsi); 1420 basic_block new_bb; 1421 1422 if (!stmt) 1423 continue; 1424 1425 /* Expand this statement, then evaluate the resulting RTL and 1426 fixup the CFG accordingly. */ 1427 if (TREE_CODE (stmt) == COND_EXPR) 1428 { 1429 new_bb = expand_gimple_cond_expr (bb, stmt); 1430 if (new_bb) 1431 return new_bb; 1432 } 1433 else 1434 { 1435 tree call = get_call_expr_in (stmt); 1436 if (call && CALL_EXPR_TAILCALL (call)) 1437 { 1438 bool can_fallthru; 1439 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru); 1440 if (new_bb) 1441 { 1442 if (can_fallthru) 1443 bb = new_bb; 1444 else 1445 return new_bb; 1446 } 1447 } 1448 else 1449 { 1450 last = get_last_insn (); 1451 expand_expr_stmt (stmt); 1452 maybe_dump_rtl_for_tree_stmt (stmt, last); 1453 } 1454 } 1455 } 1456 1457 do_pending_stack_adjust (); 1458 1459 /* Find the block tail. The last insn in the block is the insn 1460 before a barrier and/or table jump insn. */ 1461 last = get_last_insn (); 1462 if (BARRIER_P (last)) 1463 last = PREV_INSN (last); 1464 if (JUMP_TABLE_DATA_P (last)) 1465 last = PREV_INSN (PREV_INSN (last)); 1466 BB_END (bb) = last; 1467 1468 update_bb_for_insn (bb); 1469 1470 return bb; 1471} 1472 1473 1474/* Create a basic block for initialization code. */ 1475 1476static basic_block 1477construct_init_block (void) 1478{ 1479 basic_block init_block, first_block; 1480 edge e = NULL; 1481 int flags; 1482 1483 /* Multiple entry points not supported yet. */ 1484 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1); 1485 init_rtl_bb_info (ENTRY_BLOCK_PTR); 1486 init_rtl_bb_info (EXIT_BLOCK_PTR); 1487 ENTRY_BLOCK_PTR->flags |= BB_RTL; 1488 EXIT_BLOCK_PTR->flags |= BB_RTL; 1489 1490 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0); 1491 1492 /* When entry edge points to first basic block, we don't need jump, 1493 otherwise we have to jump into proper target. */ 1494 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb) 1495 { 1496 tree label = tree_block_label (e->dest); 1497 1498 emit_jump (label_rtx (label)); 1499 flags = 0; 1500 } 1501 else 1502 flags = EDGE_FALLTHRU; 1503 1504 init_block = create_basic_block (NEXT_INSN (get_insns ()), 1505 get_last_insn (), 1506 ENTRY_BLOCK_PTR); 1507 init_block->frequency = ENTRY_BLOCK_PTR->frequency; 1508 init_block->count = ENTRY_BLOCK_PTR->count; 1509 if (e) 1510 { 1511 first_block = e->dest; 1512 redirect_edge_succ (e, init_block); 1513 e = make_edge (init_block, first_block, flags); 1514 } 1515 else 1516 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU); 1517 e->probability = REG_BR_PROB_BASE; 1518 e->count = ENTRY_BLOCK_PTR->count; 1519 1520 update_bb_for_insn (init_block); 1521 return init_block; 1522} 1523 1524 1525/* Create a block containing landing pads and similar stuff. */ 1526 1527static void 1528construct_exit_block (void) 1529{ 1530 rtx head = get_last_insn (); 1531 rtx end; 1532 basic_block exit_block; 1533 edge e, e2; 1534 unsigned ix; 1535 edge_iterator ei; 1536 1537 /* Make sure the locus is set to the end of the function, so that 1538 epilogue line numbers and warnings are set properly. */ 1539#ifdef USE_MAPPED_LOCATION 1540 if (cfun->function_end_locus != UNKNOWN_LOCATION) 1541#else 1542 if (cfun->function_end_locus.file) 1543#endif 1544 input_location = cfun->function_end_locus; 1545 1546 /* The following insns belong to the top scope. */ 1547 record_block_change (DECL_INITIAL (current_function_decl)); 1548 1549 /* Generate rtl for function exit. */ 1550 expand_function_end (); 1551 1552 end = get_last_insn (); 1553 if (head == end) 1554 return; 1555 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head))) 1556 head = NEXT_INSN (head); 1557 exit_block = create_basic_block (NEXT_INSN (head), end, 1558 EXIT_BLOCK_PTR->prev_bb); 1559 exit_block->frequency = EXIT_BLOCK_PTR->frequency; 1560 exit_block->count = EXIT_BLOCK_PTR->count; 1561 1562 ix = 0; 1563 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds)) 1564 { 1565 e = EDGE_PRED (EXIT_BLOCK_PTR, ix); 1566 if (!(e->flags & EDGE_ABNORMAL)) 1567 redirect_edge_succ (e, exit_block); 1568 else 1569 ix++; 1570 } 1571 1572 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU); 1573 e->probability = REG_BR_PROB_BASE; 1574 e->count = EXIT_BLOCK_PTR->count; 1575 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds) 1576 if (e2 != e) 1577 { 1578 e->count -= e2->count; 1579 exit_block->count -= e2->count; 1580 exit_block->frequency -= EDGE_FREQUENCY (e2); 1581 } 1582 if (e->count < 0) 1583 e->count = 0; 1584 if (exit_block->count < 0) 1585 exit_block->count = 0; 1586 if (exit_block->frequency < 0) 1587 exit_block->frequency = 0; 1588 update_bb_for_insn (exit_block); 1589} 1590 1591/* Helper function for discover_nonconstant_array_refs. 1592 Look for ARRAY_REF nodes with non-constant indexes and mark them 1593 addressable. */ 1594 1595static tree 1596discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees, 1597 void *data ATTRIBUTE_UNUSED) 1598{ 1599 tree t = *tp; 1600 1601 if (IS_TYPE_OR_DECL_P (t)) 1602 *walk_subtrees = 0; 1603 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) 1604 { 1605 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) 1606 && is_gimple_min_invariant (TREE_OPERAND (t, 1)) 1607 && (!TREE_OPERAND (t, 2) 1608 || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) 1609 || (TREE_CODE (t) == COMPONENT_REF 1610 && (!TREE_OPERAND (t,2) 1611 || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) 1612 || TREE_CODE (t) == BIT_FIELD_REF 1613 || TREE_CODE (t) == REALPART_EXPR 1614 || TREE_CODE (t) == IMAGPART_EXPR 1615 || TREE_CODE (t) == VIEW_CONVERT_EXPR 1616 || TREE_CODE (t) == NOP_EXPR 1617 || TREE_CODE (t) == CONVERT_EXPR) 1618 t = TREE_OPERAND (t, 0); 1619 1620 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) 1621 { 1622 t = get_base_address (t); 1623 if (t && DECL_P (t)) 1624 TREE_ADDRESSABLE (t) = 1; 1625 } 1626 1627 *walk_subtrees = 0; 1628 } 1629 1630 return NULL_TREE; 1631} 1632 1633/* RTL expansion is not able to compile array references with variable 1634 offsets for arrays stored in single register. Discover such 1635 expressions and mark variables as addressable to avoid this 1636 scenario. */ 1637 1638static void 1639discover_nonconstant_array_refs (void) 1640{ 1641 basic_block bb; 1642 block_stmt_iterator bsi; 1643 1644 FOR_EACH_BB (bb) 1645 { 1646 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi)) 1647 walk_tree (bsi_stmt_ptr (bsi), discover_nonconstant_array_refs_r, 1648 NULL , NULL); 1649 } 1650} 1651 1652/* Translate the intermediate representation contained in the CFG 1653 from GIMPLE trees to RTL. 1654 1655 We do conversion per basic block and preserve/update the tree CFG. 1656 This implies we have to do some magic as the CFG can simultaneously 1657 consist of basic blocks containing RTL and GIMPLE trees. This can 1658 confuse the CFG hooks, so be careful to not manipulate CFG during 1659 the expansion. */ 1660 1661static unsigned int 1662tree_expand_cfg (void) 1663{ 1664 basic_block bb, init_block; 1665 sbitmap blocks; 1666 edge_iterator ei; 1667 edge e; 1668 1669 /* Some backends want to know that we are expanding to RTL. */ 1670 currently_expanding_to_rtl = 1; 1671 1672 /* Prepare the rtl middle end to start recording block changes. */ 1673 reset_block_changes (); 1674 1675 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */ 1676 discover_nonconstant_array_refs (); 1677 1678 /* Expand the variables recorded during gimple lowering. */ 1679 expand_used_vars (); 1680 1681 /* Honor stack protection warnings. */ 1682 if (warn_stack_protect) 1683 { 1684 if (current_function_calls_alloca) 1685 warning (0, "not protecting local variables: variable length buffer"); 1686 if (has_short_buffer && !cfun->stack_protect_guard) 1687 warning (0, "not protecting function: no buffer at least %d bytes long", 1688 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE)); 1689 } 1690 1691 /* Set up parameters and prepare for return, for the function. */ 1692 expand_function_start (current_function_decl); 1693 1694 /* If this function is `main', emit a call to `__main' 1695 to run global initializers, etc. */ 1696 if (DECL_NAME (current_function_decl) 1697 && MAIN_NAME_P (DECL_NAME (current_function_decl)) 1698 && DECL_FILE_SCOPE_P (current_function_decl)) 1699 expand_main_function (); 1700 1701 /* Initialize the stack_protect_guard field. This must happen after the 1702 call to __main (if any) so that the external decl is initialized. */ 1703 if (cfun->stack_protect_guard) 1704 stack_protect_prologue (); 1705 1706 /* Register rtl specific functions for cfg. */ 1707 rtl_register_cfg_hooks (); 1708 1709 init_block = construct_init_block (); 1710 1711 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the 1712 remaining edges in expand_gimple_basic_block. */ 1713 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs) 1714 e->flags &= ~EDGE_EXECUTABLE; 1715 1716 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb) 1717 bb = expand_gimple_basic_block (bb); 1718 1719 construct_exit_block (); 1720 1721 /* We're done expanding trees to RTL. */ 1722 currently_expanding_to_rtl = 0; 1723 1724 /* Convert tree EH labels to RTL EH labels, and clean out any unreachable 1725 EH regions. */ 1726 convert_from_eh_region_ranges (); 1727 1728 rebuild_jump_labels (get_insns ()); 1729 find_exception_handler_labels (); 1730 1731 blocks = sbitmap_alloc (last_basic_block); 1732 sbitmap_ones (blocks); 1733 find_many_sub_basic_blocks (blocks); 1734 purge_all_dead_edges (); 1735 sbitmap_free (blocks); 1736 1737 compact_blocks (); 1738#ifdef ENABLE_CHECKING 1739 verify_flow_info(); 1740#endif 1741 1742 /* There's no need to defer outputting this function any more; we 1743 know we want to output it. */ 1744 DECL_DEFER_OUTPUT (current_function_decl) = 0; 1745 1746 /* Now that we're done expanding trees to RTL, we shouldn't have any 1747 more CONCATs anywhere. */ 1748 generating_concat_p = 0; 1749 1750 finalize_block_changes (); 1751 1752 if (dump_file) 1753 { 1754 fprintf (dump_file, 1755 "\n\n;;\n;; Full RTL generated for this function:\n;;\n"); 1756 /* And the pass manager will dump RTL for us. */ 1757 } 1758 1759 /* If we're emitting a nested function, make sure its parent gets 1760 emitted as well. Doing otherwise confuses debug info. */ 1761 { 1762 tree parent; 1763 for (parent = DECL_CONTEXT (current_function_decl); 1764 parent != NULL_TREE; 1765 parent = get_containing_scope (parent)) 1766 if (TREE_CODE (parent) == FUNCTION_DECL) 1767 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1; 1768 } 1769 1770 /* We are now committed to emitting code for this function. Do any 1771 preparation, such as emitting abstract debug info for the inline 1772 before it gets mangled by optimization. */ 1773 if (cgraph_function_possibly_inlined_p (current_function_decl)) 1774 (*debug_hooks->outlining_inline_function) (current_function_decl); 1775 1776 TREE_ASM_WRITTEN (current_function_decl) = 1; 1777 1778 /* After expanding, the return labels are no longer needed. */ 1779 return_label = NULL; 1780 naked_return_label = NULL; 1781 return 0; 1782} 1783 1784struct tree_opt_pass pass_expand = 1785{ 1786 "expand", /* name */ 1787 NULL, /* gate */ 1788 tree_expand_cfg, /* execute */ 1789 NULL, /* sub */ 1790 NULL, /* next */ 1791 0, /* static_pass_number */ 1792 TV_EXPAND, /* tv_id */ 1793 /* ??? If TER is enabled, we actually receive GENERIC. */ 1794 PROP_gimple_leh | PROP_cfg, /* properties_required */ 1795 PROP_rtl, /* properties_provided */ 1796 PROP_trees, /* properties_destroyed */ 1797 0, /* todo_flags_start */ 1798 TODO_dump_func, /* todo_flags_finish */ 1799 'r' /* letter */ 1800}; 1801