1/* Language-independent node constructors for parse phase of GNU compiler. 2 Copyright (C) 1987-2015 Free Software Foundation, Inc. 3 4This file is part of GCC. 5 6GCC is free software; you can redistribute it and/or modify it under 7the terms of the GNU General Public License as published by the Free 8Software Foundation; either version 3, or (at your option) any later 9version. 10 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY 12WARRANTY; without even the implied warranty of MERCHANTABILITY or 13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License 14for more details. 15 16You should have received a copy of the GNU General Public License 17along with GCC; see the file COPYING3. If not see 18<http://www.gnu.org/licenses/>. */ 19 20/* This file contains the low level primitives for operating on tree nodes, 21 including allocation, list operations, interning of identifiers, 22 construction of data type nodes and statement nodes, 23 and construction of type conversion nodes. It also contains 24 tables index by tree code that describe how to take apart 25 nodes of that code. 26 27 It is intended to be language-independent, but occasionally 28 calls language-dependent routines defined (for C) in typecheck.c. */ 29 30#include "config.h" 31#include "system.h" 32#include "coretypes.h" 33#include "tm.h" 34#include "flags.h" 35#include "hash-set.h" 36#include "machmode.h" 37#include "vec.h" 38#include "double-int.h" 39#include "input.h" 40#include "alias.h" 41#include "symtab.h" 42#include "wide-int.h" 43#include "inchash.h" 44#include "tree.h" 45#include "fold-const.h" 46#include "stor-layout.h" 47#include "calls.h" 48#include "attribs.h" 49#include "varasm.h" 50#include "tm_p.h" 51#include "hashtab.h" 52#include "hard-reg-set.h" 53#include "function.h" 54#include "obstack.h" 55#include "toplev.h" /* get_random_seed */ 56#include "filenames.h" 57#include "output.h" 58#include "target.h" 59#include "common/common-target.h" 60#include "langhooks.h" 61#include "tree-inline.h" 62#include "tree-iterator.h" 63#include "predict.h" 64#include "dominance.h" 65#include "cfg.h" 66#include "basic-block.h" 67#include "bitmap.h" 68#include "tree-ssa-alias.h" 69#include "internal-fn.h" 70#include "gimple-expr.h" 71#include "is-a.h" 72#include "gimple.h" 73#include "gimple-iterator.h" 74#include "gimplify.h" 75#include "gimple-ssa.h" 76#include "hash-map.h" 77#include "plugin-api.h" 78#include "ipa-ref.h" 79#include "cgraph.h" 80#include "tree-phinodes.h" 81#include "stringpool.h" 82#include "tree-ssanames.h" 83#include "rtl.h" 84#include "statistics.h" 85#include "real.h" 86#include "fixed-value.h" 87#include "insn-config.h" 88#include "expmed.h" 89#include "dojump.h" 90#include "explow.h" 91#include "emit-rtl.h" 92#include "stmt.h" 93#include "expr.h" 94#include "tree-dfa.h" 95#include "params.h" 96#include "tree-pass.h" 97#include "langhooks-def.h" 98#include "diagnostic.h" 99#include "tree-diagnostic.h" 100#include "tree-pretty-print.h" 101#include "except.h" 102#include "debug.h" 103#include "intl.h" 104#include "builtins.h" 105 106/* Tree code classes. */ 107 108#define DEFTREECODE(SYM, NAME, TYPE, LENGTH) TYPE, 109#define END_OF_BASE_TREE_CODES tcc_exceptional, 110 111const enum tree_code_class tree_code_type[] = { 112#include "all-tree.def" 113}; 114 115#undef DEFTREECODE 116#undef END_OF_BASE_TREE_CODES 117 118/* Table indexed by tree code giving number of expression 119 operands beyond the fixed part of the node structure. 120 Not used for types or decls. */ 121 122#define DEFTREECODE(SYM, NAME, TYPE, LENGTH) LENGTH, 123#define END_OF_BASE_TREE_CODES 0, 124 125const unsigned char tree_code_length[] = { 126#include "all-tree.def" 127}; 128 129#undef DEFTREECODE 130#undef END_OF_BASE_TREE_CODES 131 132/* Names of tree components. 133 Used for printing out the tree and error messages. */ 134#define DEFTREECODE(SYM, NAME, TYPE, LEN) NAME, 135#define END_OF_BASE_TREE_CODES "@dummy", 136 137static const char *const tree_code_name[] = { 138#include "all-tree.def" 139}; 140 141#undef DEFTREECODE 142#undef END_OF_BASE_TREE_CODES 143 144/* Each tree code class has an associated string representation. 145 These must correspond to the tree_code_class entries. */ 146 147const char *const tree_code_class_strings[] = 148{ 149 "exceptional", 150 "constant", 151 "type", 152 "declaration", 153 "reference", 154 "comparison", 155 "unary", 156 "binary", 157 "statement", 158 "vl_exp", 159 "expression" 160}; 161 162/* obstack.[ch] explicitly declined to prototype this. */ 163extern int _obstack_allocated_p (struct obstack *h, void *obj); 164 165/* Statistics-gathering stuff. */ 166 167static int tree_code_counts[MAX_TREE_CODES]; 168int tree_node_counts[(int) all_kinds]; 169int tree_node_sizes[(int) all_kinds]; 170 171/* Keep in sync with tree.h:enum tree_node_kind. */ 172static const char * const tree_node_kind_names[] = { 173 "decls", 174 "types", 175 "blocks", 176 "stmts", 177 "refs", 178 "exprs", 179 "constants", 180 "identifiers", 181 "vecs", 182 "binfos", 183 "ssa names", 184 "constructors", 185 "random kinds", 186 "lang_decl kinds", 187 "lang_type kinds", 188 "omp clauses", 189}; 190 191/* Unique id for next decl created. */ 192static GTY(()) int next_decl_uid; 193/* Unique id for next type created. */ 194static GTY(()) int next_type_uid = 1; 195/* Unique id for next debug decl created. Use negative numbers, 196 to catch erroneous uses. */ 197static GTY(()) int next_debug_decl_uid; 198 199/* Since we cannot rehash a type after it is in the table, we have to 200 keep the hash code. */ 201 202struct GTY((for_user)) type_hash { 203 unsigned long hash; 204 tree type; 205}; 206 207/* Initial size of the hash table (rounded to next prime). */ 208#define TYPE_HASH_INITIAL_SIZE 1000 209 210struct type_cache_hasher : ggc_cache_hasher<type_hash *> 211{ 212 static hashval_t hash (type_hash *t) { return t->hash; } 213 static bool equal (type_hash *a, type_hash *b); 214 215 static void 216 handle_cache_entry (type_hash *&t) 217 { 218 extern void gt_ggc_mx (type_hash *&); 219 if (t == HTAB_DELETED_ENTRY || t == HTAB_EMPTY_ENTRY) 220 return; 221 else if (ggc_marked_p (t->type)) 222 gt_ggc_mx (t); 223 else 224 t = static_cast<type_hash *> (HTAB_DELETED_ENTRY); 225 } 226}; 227 228/* Now here is the hash table. When recording a type, it is added to 229 the slot whose index is the hash code. Note that the hash table is 230 used for several kinds of types (function types, array types and 231 array index range types, for now). While all these live in the 232 same table, they are completely independent, and the hash code is 233 computed differently for each of these. */ 234 235static GTY ((cache)) hash_table<type_cache_hasher> *type_hash_table; 236 237/* Hash table and temporary node for larger integer const values. */ 238static GTY (()) tree int_cst_node; 239 240struct int_cst_hasher : ggc_cache_hasher<tree> 241{ 242 static hashval_t hash (tree t); 243 static bool equal (tree x, tree y); 244}; 245 246static GTY ((cache)) hash_table<int_cst_hasher> *int_cst_hash_table; 247 248/* Hash table for optimization flags and target option flags. Use the same 249 hash table for both sets of options. Nodes for building the current 250 optimization and target option nodes. The assumption is most of the time 251 the options created will already be in the hash table, so we avoid 252 allocating and freeing up a node repeatably. */ 253static GTY (()) tree cl_optimization_node; 254static GTY (()) tree cl_target_option_node; 255 256struct cl_option_hasher : ggc_cache_hasher<tree> 257{ 258 static hashval_t hash (tree t); 259 static bool equal (tree x, tree y); 260}; 261 262static GTY ((cache)) hash_table<cl_option_hasher> *cl_option_hash_table; 263 264/* General tree->tree mapping structure for use in hash tables. */ 265 266 267static GTY ((cache)) 268 hash_table<tree_decl_map_cache_hasher> *debug_expr_for_decl; 269 270static GTY ((cache)) 271 hash_table<tree_decl_map_cache_hasher> *value_expr_for_decl; 272 273 struct tree_vec_map_cache_hasher : ggc_cache_hasher<tree_vec_map *> 274{ 275 static hashval_t hash (tree_vec_map *m) { return DECL_UID (m->base.from); } 276 277 static bool 278 equal (tree_vec_map *a, tree_vec_map *b) 279 { 280 return a->base.from == b->base.from; 281 } 282 283 static void 284 handle_cache_entry (tree_vec_map *&m) 285 { 286 extern void gt_ggc_mx (tree_vec_map *&); 287 if (m == HTAB_EMPTY_ENTRY || m == HTAB_DELETED_ENTRY) 288 return; 289 else if (ggc_marked_p (m->base.from)) 290 gt_ggc_mx (m); 291 else 292 m = static_cast<tree_vec_map *> (HTAB_DELETED_ENTRY); 293 } 294}; 295 296static GTY ((cache)) 297 hash_table<tree_vec_map_cache_hasher> *debug_args_for_decl; 298 299static void set_type_quals (tree, int); 300static void print_type_hash_statistics (void); 301static void print_debug_expr_statistics (void); 302static void print_value_expr_statistics (void); 303static void type_hash_list (const_tree, inchash::hash &); 304static void attribute_hash_list (const_tree, inchash::hash &); 305 306tree global_trees[TI_MAX]; 307tree integer_types[itk_none]; 308 309bool int_n_enabled_p[NUM_INT_N_ENTS]; 310struct int_n_trees_t int_n_trees [NUM_INT_N_ENTS]; 311 312unsigned char tree_contains_struct[MAX_TREE_CODES][64]; 313 314/* Number of operands for each OpenMP clause. */ 315unsigned const char omp_clause_num_ops[] = 316{ 317 0, /* OMP_CLAUSE_ERROR */ 318 1, /* OMP_CLAUSE_PRIVATE */ 319 1, /* OMP_CLAUSE_SHARED */ 320 1, /* OMP_CLAUSE_FIRSTPRIVATE */ 321 2, /* OMP_CLAUSE_LASTPRIVATE */ 322 4, /* OMP_CLAUSE_REDUCTION */ 323 1, /* OMP_CLAUSE_COPYIN */ 324 1, /* OMP_CLAUSE_COPYPRIVATE */ 325 3, /* OMP_CLAUSE_LINEAR */ 326 2, /* OMP_CLAUSE_ALIGNED */ 327 1, /* OMP_CLAUSE_DEPEND */ 328 1, /* OMP_CLAUSE_UNIFORM */ 329 2, /* OMP_CLAUSE_FROM */ 330 2, /* OMP_CLAUSE_TO */ 331 2, /* OMP_CLAUSE_MAP */ 332 2, /* OMP_CLAUSE__CACHE_ */ 333 1, /* OMP_CLAUSE_DEVICE_RESIDENT */ 334 1, /* OMP_CLAUSE_USE_DEVICE */ 335 2, /* OMP_CLAUSE_GANG */ 336 1, /* OMP_CLAUSE_ASYNC */ 337 1, /* OMP_CLAUSE_WAIT */ 338 0, /* OMP_CLAUSE_AUTO */ 339 0, /* OMP_CLAUSE_SEQ */ 340 1, /* OMP_CLAUSE__LOOPTEMP_ */ 341 1, /* OMP_CLAUSE_IF */ 342 1, /* OMP_CLAUSE_NUM_THREADS */ 343 1, /* OMP_CLAUSE_SCHEDULE */ 344 0, /* OMP_CLAUSE_NOWAIT */ 345 0, /* OMP_CLAUSE_ORDERED */ 346 0, /* OMP_CLAUSE_DEFAULT */ 347 3, /* OMP_CLAUSE_COLLAPSE */ 348 0, /* OMP_CLAUSE_UNTIED */ 349 1, /* OMP_CLAUSE_FINAL */ 350 0, /* OMP_CLAUSE_MERGEABLE */ 351 1, /* OMP_CLAUSE_DEVICE */ 352 1, /* OMP_CLAUSE_DIST_SCHEDULE */ 353 0, /* OMP_CLAUSE_INBRANCH */ 354 0, /* OMP_CLAUSE_NOTINBRANCH */ 355 1, /* OMP_CLAUSE_NUM_TEAMS */ 356 1, /* OMP_CLAUSE_THREAD_LIMIT */ 357 0, /* OMP_CLAUSE_PROC_BIND */ 358 1, /* OMP_CLAUSE_SAFELEN */ 359 1, /* OMP_CLAUSE_SIMDLEN */ 360 0, /* OMP_CLAUSE_FOR */ 361 0, /* OMP_CLAUSE_PARALLEL */ 362 0, /* OMP_CLAUSE_SECTIONS */ 363 0, /* OMP_CLAUSE_TASKGROUP */ 364 1, /* OMP_CLAUSE__SIMDUID_ */ 365 1, /* OMP_CLAUSE__CILK_FOR_COUNT_ */ 366 0, /* OMP_CLAUSE_INDEPENDENT */ 367 1, /* OMP_CLAUSE_WORKER */ 368 1, /* OMP_CLAUSE_VECTOR */ 369 1, /* OMP_CLAUSE_NUM_GANGS */ 370 1, /* OMP_CLAUSE_NUM_WORKERS */ 371 1, /* OMP_CLAUSE_VECTOR_LENGTH */ 372}; 373 374const char * const omp_clause_code_name[] = 375{ 376 "error_clause", 377 "private", 378 "shared", 379 "firstprivate", 380 "lastprivate", 381 "reduction", 382 "copyin", 383 "copyprivate", 384 "linear", 385 "aligned", 386 "depend", 387 "uniform", 388 "from", 389 "to", 390 "map", 391 "_cache_", 392 "device_resident", 393 "use_device", 394 "gang", 395 "async", 396 "wait", 397 "auto", 398 "seq", 399 "_looptemp_", 400 "if", 401 "num_threads", 402 "schedule", 403 "nowait", 404 "ordered", 405 "default", 406 "collapse", 407 "untied", 408 "final", 409 "mergeable", 410 "device", 411 "dist_schedule", 412 "inbranch", 413 "notinbranch", 414 "num_teams", 415 "thread_limit", 416 "proc_bind", 417 "safelen", 418 "simdlen", 419 "for", 420 "parallel", 421 "sections", 422 "taskgroup", 423 "_simduid_", 424 "_Cilk_for_count_", 425 "independent", 426 "worker", 427 "vector", 428 "num_gangs", 429 "num_workers", 430 "vector_length" 431}; 432 433 434/* Return the tree node structure used by tree code CODE. */ 435 436static inline enum tree_node_structure_enum 437tree_node_structure_for_code (enum tree_code code) 438{ 439 switch (TREE_CODE_CLASS (code)) 440 { 441 case tcc_declaration: 442 { 443 switch (code) 444 { 445 case FIELD_DECL: 446 return TS_FIELD_DECL; 447 case PARM_DECL: 448 return TS_PARM_DECL; 449 case VAR_DECL: 450 return TS_VAR_DECL; 451 case LABEL_DECL: 452 return TS_LABEL_DECL; 453 case RESULT_DECL: 454 return TS_RESULT_DECL; 455 case DEBUG_EXPR_DECL: 456 return TS_DECL_WRTL; 457 case CONST_DECL: 458 return TS_CONST_DECL; 459 case TYPE_DECL: 460 return TS_TYPE_DECL; 461 case FUNCTION_DECL: 462 return TS_FUNCTION_DECL; 463 case TRANSLATION_UNIT_DECL: 464 return TS_TRANSLATION_UNIT_DECL; 465 default: 466 return TS_DECL_NON_COMMON; 467 } 468 } 469 case tcc_type: 470 return TS_TYPE_NON_COMMON; 471 case tcc_reference: 472 case tcc_comparison: 473 case tcc_unary: 474 case tcc_binary: 475 case tcc_expression: 476 case tcc_statement: 477 case tcc_vl_exp: 478 return TS_EXP; 479 default: /* tcc_constant and tcc_exceptional */ 480 break; 481 } 482 switch (code) 483 { 484 /* tcc_constant cases. */ 485 case VOID_CST: return TS_TYPED; 486 case INTEGER_CST: return TS_INT_CST; 487 case REAL_CST: return TS_REAL_CST; 488 case FIXED_CST: return TS_FIXED_CST; 489 case COMPLEX_CST: return TS_COMPLEX; 490 case VECTOR_CST: return TS_VECTOR; 491 case STRING_CST: return TS_STRING; 492 /* tcc_exceptional cases. */ 493 case ERROR_MARK: return TS_COMMON; 494 case IDENTIFIER_NODE: return TS_IDENTIFIER; 495 case TREE_LIST: return TS_LIST; 496 case TREE_VEC: return TS_VEC; 497 case SSA_NAME: return TS_SSA_NAME; 498 case PLACEHOLDER_EXPR: return TS_COMMON; 499 case STATEMENT_LIST: return TS_STATEMENT_LIST; 500 case BLOCK: return TS_BLOCK; 501 case CONSTRUCTOR: return TS_CONSTRUCTOR; 502 case TREE_BINFO: return TS_BINFO; 503 case OMP_CLAUSE: return TS_OMP_CLAUSE; 504 case OPTIMIZATION_NODE: return TS_OPTIMIZATION; 505 case TARGET_OPTION_NODE: return TS_TARGET_OPTION; 506 507 default: 508 gcc_unreachable (); 509 } 510} 511 512 513/* Initialize tree_contains_struct to describe the hierarchy of tree 514 nodes. */ 515 516static void 517initialize_tree_contains_struct (void) 518{ 519 unsigned i; 520 521 for (i = ERROR_MARK; i < LAST_AND_UNUSED_TREE_CODE; i++) 522 { 523 enum tree_code code; 524 enum tree_node_structure_enum ts_code; 525 526 code = (enum tree_code) i; 527 ts_code = tree_node_structure_for_code (code); 528 529 /* Mark the TS structure itself. */ 530 tree_contains_struct[code][ts_code] = 1; 531 532 /* Mark all the structures that TS is derived from. */ 533 switch (ts_code) 534 { 535 case TS_TYPED: 536 case TS_BLOCK: 537 MARK_TS_BASE (code); 538 break; 539 540 case TS_COMMON: 541 case TS_INT_CST: 542 case TS_REAL_CST: 543 case TS_FIXED_CST: 544 case TS_VECTOR: 545 case TS_STRING: 546 case TS_COMPLEX: 547 case TS_SSA_NAME: 548 case TS_CONSTRUCTOR: 549 case TS_EXP: 550 case TS_STATEMENT_LIST: 551 MARK_TS_TYPED (code); 552 break; 553 554 case TS_IDENTIFIER: 555 case TS_DECL_MINIMAL: 556 case TS_TYPE_COMMON: 557 case TS_LIST: 558 case TS_VEC: 559 case TS_BINFO: 560 case TS_OMP_CLAUSE: 561 case TS_OPTIMIZATION: 562 case TS_TARGET_OPTION: 563 MARK_TS_COMMON (code); 564 break; 565 566 case TS_TYPE_WITH_LANG_SPECIFIC: 567 MARK_TS_TYPE_COMMON (code); 568 break; 569 570 case TS_TYPE_NON_COMMON: 571 MARK_TS_TYPE_WITH_LANG_SPECIFIC (code); 572 break; 573 574 case TS_DECL_COMMON: 575 MARK_TS_DECL_MINIMAL (code); 576 break; 577 578 case TS_DECL_WRTL: 579 case TS_CONST_DECL: 580 MARK_TS_DECL_COMMON (code); 581 break; 582 583 case TS_DECL_NON_COMMON: 584 MARK_TS_DECL_WITH_VIS (code); 585 break; 586 587 case TS_DECL_WITH_VIS: 588 case TS_PARM_DECL: 589 case TS_LABEL_DECL: 590 case TS_RESULT_DECL: 591 MARK_TS_DECL_WRTL (code); 592 break; 593 594 case TS_FIELD_DECL: 595 MARK_TS_DECL_COMMON (code); 596 break; 597 598 case TS_VAR_DECL: 599 MARK_TS_DECL_WITH_VIS (code); 600 break; 601 602 case TS_TYPE_DECL: 603 case TS_FUNCTION_DECL: 604 MARK_TS_DECL_NON_COMMON (code); 605 break; 606 607 case TS_TRANSLATION_UNIT_DECL: 608 MARK_TS_DECL_COMMON (code); 609 break; 610 611 default: 612 gcc_unreachable (); 613 } 614 } 615 616 /* Basic consistency checks for attributes used in fold. */ 617 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_NON_COMMON]); 618 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_NON_COMMON]); 619 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_COMMON]); 620 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_COMMON]); 621 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_COMMON]); 622 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_COMMON]); 623 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_COMMON]); 624 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_COMMON]); 625 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_COMMON]); 626 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_COMMON]); 627 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_COMMON]); 628 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WRTL]); 629 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_WRTL]); 630 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_WRTL]); 631 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WRTL]); 632 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_WRTL]); 633 gcc_assert (tree_contains_struct[CONST_DECL][TS_DECL_MINIMAL]); 634 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_MINIMAL]); 635 gcc_assert (tree_contains_struct[PARM_DECL][TS_DECL_MINIMAL]); 636 gcc_assert (tree_contains_struct[RESULT_DECL][TS_DECL_MINIMAL]); 637 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_MINIMAL]); 638 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_MINIMAL]); 639 gcc_assert (tree_contains_struct[TRANSLATION_UNIT_DECL][TS_DECL_MINIMAL]); 640 gcc_assert (tree_contains_struct[LABEL_DECL][TS_DECL_MINIMAL]); 641 gcc_assert (tree_contains_struct[FIELD_DECL][TS_DECL_MINIMAL]); 642 gcc_assert (tree_contains_struct[VAR_DECL][TS_DECL_WITH_VIS]); 643 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_DECL_WITH_VIS]); 644 gcc_assert (tree_contains_struct[TYPE_DECL][TS_DECL_WITH_VIS]); 645 gcc_assert (tree_contains_struct[VAR_DECL][TS_VAR_DECL]); 646 gcc_assert (tree_contains_struct[FIELD_DECL][TS_FIELD_DECL]); 647 gcc_assert (tree_contains_struct[PARM_DECL][TS_PARM_DECL]); 648 gcc_assert (tree_contains_struct[LABEL_DECL][TS_LABEL_DECL]); 649 gcc_assert (tree_contains_struct[RESULT_DECL][TS_RESULT_DECL]); 650 gcc_assert (tree_contains_struct[CONST_DECL][TS_CONST_DECL]); 651 gcc_assert (tree_contains_struct[TYPE_DECL][TS_TYPE_DECL]); 652 gcc_assert (tree_contains_struct[FUNCTION_DECL][TS_FUNCTION_DECL]); 653 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_MINIMAL]); 654 gcc_assert (tree_contains_struct[IMPORTED_DECL][TS_DECL_COMMON]); 655 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_MINIMAL]); 656 gcc_assert (tree_contains_struct[NAMELIST_DECL][TS_DECL_COMMON]); 657} 658 659 660/* Init tree.c. */ 661 662void 663init_ttree (void) 664{ 665 /* Initialize the hash table of types. */ 666 type_hash_table 667 = hash_table<type_cache_hasher>::create_ggc (TYPE_HASH_INITIAL_SIZE); 668 669 debug_expr_for_decl 670 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512); 671 672 value_expr_for_decl 673 = hash_table<tree_decl_map_cache_hasher>::create_ggc (512); 674 675 int_cst_hash_table = hash_table<int_cst_hasher>::create_ggc (1024); 676 677 int_cst_node = make_int_cst (1, 1); 678 679 cl_option_hash_table = hash_table<cl_option_hasher>::create_ggc (64); 680 681 cl_optimization_node = make_node (OPTIMIZATION_NODE); 682 cl_target_option_node = make_node (TARGET_OPTION_NODE); 683 684 /* Initialize the tree_contains_struct array. */ 685 initialize_tree_contains_struct (); 686 lang_hooks.init_ts (); 687} 688 689 690/* The name of the object as the assembler will see it (but before any 691 translations made by ASM_OUTPUT_LABELREF). Often this is the same 692 as DECL_NAME. It is an IDENTIFIER_NODE. */ 693tree 694decl_assembler_name (tree decl) 695{ 696 if (!DECL_ASSEMBLER_NAME_SET_P (decl)) 697 lang_hooks.set_decl_assembler_name (decl); 698 return DECL_WITH_VIS_CHECK (decl)->decl_with_vis.assembler_name; 699} 700 701/* When the target supports COMDAT groups, this indicates which group the 702 DECL is associated with. This can be either an IDENTIFIER_NODE or a 703 decl, in which case its DECL_ASSEMBLER_NAME identifies the group. */ 704tree 705decl_comdat_group (const_tree node) 706{ 707 struct symtab_node *snode = symtab_node::get (node); 708 if (!snode) 709 return NULL; 710 return snode->get_comdat_group (); 711} 712 713/* Likewise, but make sure it's been reduced to an IDENTIFIER_NODE. */ 714tree 715decl_comdat_group_id (const_tree node) 716{ 717 struct symtab_node *snode = symtab_node::get (node); 718 if (!snode) 719 return NULL; 720 return snode->get_comdat_group_id (); 721} 722 723/* When the target supports named section, return its name as IDENTIFIER_NODE 724 or NULL if it is in no section. */ 725const char * 726decl_section_name (const_tree node) 727{ 728 struct symtab_node *snode = symtab_node::get (node); 729 if (!snode) 730 return NULL; 731 return snode->get_section (); 732} 733 734/* Set section section name of NODE to VALUE (that is expected to 735 be identifier node) */ 736void 737set_decl_section_name (tree node, const char *value) 738{ 739 struct symtab_node *snode; 740 741 if (value == NULL) 742 { 743 snode = symtab_node::get (node); 744 if (!snode) 745 return; 746 } 747 else if (TREE_CODE (node) == VAR_DECL) 748 snode = varpool_node::get_create (node); 749 else 750 snode = cgraph_node::get_create (node); 751 snode->set_section (value); 752} 753 754/* Return TLS model of a variable NODE. */ 755enum tls_model 756decl_tls_model (const_tree node) 757{ 758 struct varpool_node *snode = varpool_node::get (node); 759 if (!snode) 760 return TLS_MODEL_NONE; 761 return snode->tls_model; 762} 763 764/* Set TLS model of variable NODE to MODEL. */ 765void 766set_decl_tls_model (tree node, enum tls_model model) 767{ 768 struct varpool_node *vnode; 769 770 if (model == TLS_MODEL_NONE) 771 { 772 vnode = varpool_node::get (node); 773 if (!vnode) 774 return; 775 } 776 else 777 vnode = varpool_node::get_create (node); 778 vnode->tls_model = model; 779} 780 781/* Compute the number of bytes occupied by a tree with code CODE. 782 This function cannot be used for nodes that have variable sizes, 783 including TREE_VEC, INTEGER_CST, STRING_CST, and CALL_EXPR. */ 784size_t 785tree_code_size (enum tree_code code) 786{ 787 switch (TREE_CODE_CLASS (code)) 788 { 789 case tcc_declaration: /* A decl node */ 790 { 791 switch (code) 792 { 793 case FIELD_DECL: 794 return sizeof (struct tree_field_decl); 795 case PARM_DECL: 796 return sizeof (struct tree_parm_decl); 797 case VAR_DECL: 798 return sizeof (struct tree_var_decl); 799 case LABEL_DECL: 800 return sizeof (struct tree_label_decl); 801 case RESULT_DECL: 802 return sizeof (struct tree_result_decl); 803 case CONST_DECL: 804 return sizeof (struct tree_const_decl); 805 case TYPE_DECL: 806 return sizeof (struct tree_type_decl); 807 case FUNCTION_DECL: 808 return sizeof (struct tree_function_decl); 809 case DEBUG_EXPR_DECL: 810 return sizeof (struct tree_decl_with_rtl); 811 case TRANSLATION_UNIT_DECL: 812 return sizeof (struct tree_translation_unit_decl); 813 case NAMESPACE_DECL: 814 case IMPORTED_DECL: 815 case NAMELIST_DECL: 816 return sizeof (struct tree_decl_non_common); 817 default: 818 return lang_hooks.tree_size (code); 819 } 820 } 821 822 case tcc_type: /* a type node */ 823 return sizeof (struct tree_type_non_common); 824 825 case tcc_reference: /* a reference */ 826 case tcc_expression: /* an expression */ 827 case tcc_statement: /* an expression with side effects */ 828 case tcc_comparison: /* a comparison expression */ 829 case tcc_unary: /* a unary arithmetic expression */ 830 case tcc_binary: /* a binary arithmetic expression */ 831 return (sizeof (struct tree_exp) 832 + (TREE_CODE_LENGTH (code) - 1) * sizeof (tree)); 833 834 case tcc_constant: /* a constant */ 835 switch (code) 836 { 837 case VOID_CST: return sizeof (struct tree_typed); 838 case INTEGER_CST: gcc_unreachable (); 839 case REAL_CST: return sizeof (struct tree_real_cst); 840 case FIXED_CST: return sizeof (struct tree_fixed_cst); 841 case COMPLEX_CST: return sizeof (struct tree_complex); 842 case VECTOR_CST: return sizeof (struct tree_vector); 843 case STRING_CST: gcc_unreachable (); 844 default: 845 return lang_hooks.tree_size (code); 846 } 847 848 case tcc_exceptional: /* something random, like an identifier. */ 849 switch (code) 850 { 851 case IDENTIFIER_NODE: return lang_hooks.identifier_size; 852 case TREE_LIST: return sizeof (struct tree_list); 853 854 case ERROR_MARK: 855 case PLACEHOLDER_EXPR: return sizeof (struct tree_common); 856 857 case TREE_VEC: 858 case OMP_CLAUSE: gcc_unreachable (); 859 860 case SSA_NAME: return sizeof (struct tree_ssa_name); 861 862 case STATEMENT_LIST: return sizeof (struct tree_statement_list); 863 case BLOCK: return sizeof (struct tree_block); 864 case CONSTRUCTOR: return sizeof (struct tree_constructor); 865 case OPTIMIZATION_NODE: return sizeof (struct tree_optimization_option); 866 case TARGET_OPTION_NODE: return sizeof (struct tree_target_option); 867 868 default: 869 return lang_hooks.tree_size (code); 870 } 871 872 default: 873 gcc_unreachable (); 874 } 875} 876 877/* Compute the number of bytes occupied by NODE. This routine only 878 looks at TREE_CODE, except for those nodes that have variable sizes. */ 879size_t 880tree_size (const_tree node) 881{ 882 const enum tree_code code = TREE_CODE (node); 883 switch (code) 884 { 885 case INTEGER_CST: 886 return (sizeof (struct tree_int_cst) 887 + (TREE_INT_CST_EXT_NUNITS (node) - 1) * sizeof (HOST_WIDE_INT)); 888 889 case TREE_BINFO: 890 return (offsetof (struct tree_binfo, base_binfos) 891 + vec<tree, va_gc> 892 ::embedded_size (BINFO_N_BASE_BINFOS (node))); 893 894 case TREE_VEC: 895 return (sizeof (struct tree_vec) 896 + (TREE_VEC_LENGTH (node) - 1) * sizeof (tree)); 897 898 case VECTOR_CST: 899 return (sizeof (struct tree_vector) 900 + (TYPE_VECTOR_SUBPARTS (TREE_TYPE (node)) - 1) * sizeof (tree)); 901 902 case STRING_CST: 903 return TREE_STRING_LENGTH (node) + offsetof (struct tree_string, str) + 1; 904 905 case OMP_CLAUSE: 906 return (sizeof (struct tree_omp_clause) 907 + (omp_clause_num_ops[OMP_CLAUSE_CODE (node)] - 1) 908 * sizeof (tree)); 909 910 default: 911 if (TREE_CODE_CLASS (code) == tcc_vl_exp) 912 return (sizeof (struct tree_exp) 913 + (VL_EXP_OPERAND_LENGTH (node) - 1) * sizeof (tree)); 914 else 915 return tree_code_size (code); 916 } 917} 918 919/* Record interesting allocation statistics for a tree node with CODE 920 and LENGTH. */ 921 922static void 923record_node_allocation_statistics (enum tree_code code ATTRIBUTE_UNUSED, 924 size_t length ATTRIBUTE_UNUSED) 925{ 926 enum tree_code_class type = TREE_CODE_CLASS (code); 927 tree_node_kind kind; 928 929 if (!GATHER_STATISTICS) 930 return; 931 932 switch (type) 933 { 934 case tcc_declaration: /* A decl node */ 935 kind = d_kind; 936 break; 937 938 case tcc_type: /* a type node */ 939 kind = t_kind; 940 break; 941 942 case tcc_statement: /* an expression with side effects */ 943 kind = s_kind; 944 break; 945 946 case tcc_reference: /* a reference */ 947 kind = r_kind; 948 break; 949 950 case tcc_expression: /* an expression */ 951 case tcc_comparison: /* a comparison expression */ 952 case tcc_unary: /* a unary arithmetic expression */ 953 case tcc_binary: /* a binary arithmetic expression */ 954 kind = e_kind; 955 break; 956 957 case tcc_constant: /* a constant */ 958 kind = c_kind; 959 break; 960 961 case tcc_exceptional: /* something random, like an identifier. */ 962 switch (code) 963 { 964 case IDENTIFIER_NODE: 965 kind = id_kind; 966 break; 967 968 case TREE_VEC: 969 kind = vec_kind; 970 break; 971 972 case TREE_BINFO: 973 kind = binfo_kind; 974 break; 975 976 case SSA_NAME: 977 kind = ssa_name_kind; 978 break; 979 980 case BLOCK: 981 kind = b_kind; 982 break; 983 984 case CONSTRUCTOR: 985 kind = constr_kind; 986 break; 987 988 case OMP_CLAUSE: 989 kind = omp_clause_kind; 990 break; 991 992 default: 993 kind = x_kind; 994 break; 995 } 996 break; 997 998 case tcc_vl_exp: 999 kind = e_kind; 1000 break; 1001 1002 default: 1003 gcc_unreachable (); 1004 } 1005 1006 tree_code_counts[(int) code]++; 1007 tree_node_counts[(int) kind]++; 1008 tree_node_sizes[(int) kind] += length; 1009} 1010 1011/* Allocate and return a new UID from the DECL_UID namespace. */ 1012 1013int 1014allocate_decl_uid (void) 1015{ 1016 return next_decl_uid++; 1017} 1018 1019/* Return a newly allocated node of code CODE. For decl and type 1020 nodes, some other fields are initialized. The rest of the node is 1021 initialized to zero. This function cannot be used for TREE_VEC, 1022 INTEGER_CST or OMP_CLAUSE nodes, which is enforced by asserts in 1023 tree_code_size. 1024 1025 Achoo! I got a code in the node. */ 1026 1027tree 1028make_node_stat (enum tree_code code MEM_STAT_DECL) 1029{ 1030 tree t; 1031 enum tree_code_class type = TREE_CODE_CLASS (code); 1032 size_t length = tree_code_size (code); 1033 1034 record_node_allocation_statistics (code, length); 1035 1036 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); 1037 TREE_SET_CODE (t, code); 1038 1039 switch (type) 1040 { 1041 case tcc_statement: 1042 TREE_SIDE_EFFECTS (t) = 1; 1043 break; 1044 1045 case tcc_declaration: 1046 if (CODE_CONTAINS_STRUCT (code, TS_DECL_COMMON)) 1047 { 1048 if (code == FUNCTION_DECL) 1049 { 1050 DECL_ALIGN (t) = FUNCTION_BOUNDARY; 1051 DECL_MODE (t) = FUNCTION_MODE; 1052 } 1053 else 1054 DECL_ALIGN (t) = 1; 1055 } 1056 DECL_SOURCE_LOCATION (t) = input_location; 1057 if (TREE_CODE (t) == DEBUG_EXPR_DECL) 1058 DECL_UID (t) = --next_debug_decl_uid; 1059 else 1060 { 1061 DECL_UID (t) = allocate_decl_uid (); 1062 SET_DECL_PT_UID (t, -1); 1063 } 1064 if (TREE_CODE (t) == LABEL_DECL) 1065 LABEL_DECL_UID (t) = -1; 1066 1067 break; 1068 1069 case tcc_type: 1070 TYPE_UID (t) = next_type_uid++; 1071 TYPE_ALIGN (t) = BITS_PER_UNIT; 1072 TYPE_USER_ALIGN (t) = 0; 1073 TYPE_MAIN_VARIANT (t) = t; 1074 TYPE_CANONICAL (t) = t; 1075 1076 /* Default to no attributes for type, but let target change that. */ 1077 TYPE_ATTRIBUTES (t) = NULL_TREE; 1078 targetm.set_default_type_attributes (t); 1079 1080 /* We have not yet computed the alias set for this type. */ 1081 TYPE_ALIAS_SET (t) = -1; 1082 break; 1083 1084 case tcc_constant: 1085 TREE_CONSTANT (t) = 1; 1086 break; 1087 1088 case tcc_expression: 1089 switch (code) 1090 { 1091 case INIT_EXPR: 1092 case MODIFY_EXPR: 1093 case VA_ARG_EXPR: 1094 case PREDECREMENT_EXPR: 1095 case PREINCREMENT_EXPR: 1096 case POSTDECREMENT_EXPR: 1097 case POSTINCREMENT_EXPR: 1098 /* All of these have side-effects, no matter what their 1099 operands are. */ 1100 TREE_SIDE_EFFECTS (t) = 1; 1101 break; 1102 1103 default: 1104 break; 1105 } 1106 break; 1107 1108 default: 1109 /* Other classes need no special treatment. */ 1110 break; 1111 } 1112 1113 return t; 1114} 1115 1116/* Return a new node with the same contents as NODE except that its 1117 TREE_CHAIN, if it has one, is zero and it has a fresh uid. */ 1118 1119tree 1120copy_node_stat (tree node MEM_STAT_DECL) 1121{ 1122 tree t; 1123 enum tree_code code = TREE_CODE (node); 1124 size_t length; 1125 1126 gcc_assert (code != STATEMENT_LIST); 1127 1128 length = tree_size (node); 1129 record_node_allocation_statistics (code, length); 1130 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT); 1131 memcpy (t, node, length); 1132 1133 if (CODE_CONTAINS_STRUCT (code, TS_COMMON)) 1134 TREE_CHAIN (t) = 0; 1135 TREE_ASM_WRITTEN (t) = 0; 1136 TREE_VISITED (t) = 0; 1137 1138 if (TREE_CODE_CLASS (code) == tcc_declaration) 1139 { 1140 if (code == DEBUG_EXPR_DECL) 1141 DECL_UID (t) = --next_debug_decl_uid; 1142 else 1143 { 1144 DECL_UID (t) = allocate_decl_uid (); 1145 if (DECL_PT_UID_SET_P (node)) 1146 SET_DECL_PT_UID (t, DECL_PT_UID (node)); 1147 } 1148 if ((TREE_CODE (node) == PARM_DECL || TREE_CODE (node) == VAR_DECL) 1149 && DECL_HAS_VALUE_EXPR_P (node)) 1150 { 1151 SET_DECL_VALUE_EXPR (t, DECL_VALUE_EXPR (node)); 1152 DECL_HAS_VALUE_EXPR_P (t) = 1; 1153 } 1154 /* DECL_DEBUG_EXPR is copied explicitely by callers. */ 1155 if (TREE_CODE (node) == VAR_DECL) 1156 { 1157 DECL_HAS_DEBUG_EXPR_P (t) = 0; 1158 t->decl_with_vis.symtab_node = NULL; 1159 } 1160 if (TREE_CODE (node) == VAR_DECL && DECL_HAS_INIT_PRIORITY_P (node)) 1161 { 1162 SET_DECL_INIT_PRIORITY (t, DECL_INIT_PRIORITY (node)); 1163 DECL_HAS_INIT_PRIORITY_P (t) = 1; 1164 } 1165 if (TREE_CODE (node) == FUNCTION_DECL) 1166 { 1167 DECL_STRUCT_FUNCTION (t) = NULL; 1168 t->decl_with_vis.symtab_node = NULL; 1169 } 1170 } 1171 else if (TREE_CODE_CLASS (code) == tcc_type) 1172 { 1173 TYPE_UID (t) = next_type_uid++; 1174 /* The following is so that the debug code for 1175 the copy is different from the original type. 1176 The two statements usually duplicate each other 1177 (because they clear fields of the same union), 1178 but the optimizer should catch that. */ 1179 TYPE_SYMTAB_POINTER (t) = 0; 1180 TYPE_SYMTAB_ADDRESS (t) = 0; 1181 1182 /* Do not copy the values cache. */ 1183 if (TYPE_CACHED_VALUES_P (t)) 1184 { 1185 TYPE_CACHED_VALUES_P (t) = 0; 1186 TYPE_CACHED_VALUES (t) = NULL_TREE; 1187 } 1188 } 1189 1190 return t; 1191} 1192 1193/* Return a copy of a chain of nodes, chained through the TREE_CHAIN field. 1194 For example, this can copy a list made of TREE_LIST nodes. */ 1195 1196tree 1197copy_list (tree list) 1198{ 1199 tree head; 1200 tree prev, next; 1201 1202 if (list == 0) 1203 return 0; 1204 1205 head = prev = copy_node (list); 1206 next = TREE_CHAIN (list); 1207 while (next) 1208 { 1209 TREE_CHAIN (prev) = copy_node (next); 1210 prev = TREE_CHAIN (prev); 1211 next = TREE_CHAIN (next); 1212 } 1213 return head; 1214} 1215 1216 1217/* Return the value that TREE_INT_CST_EXT_NUNITS should have for an 1218 INTEGER_CST with value CST and type TYPE. */ 1219 1220static unsigned int 1221get_int_cst_ext_nunits (tree type, const wide_int &cst) 1222{ 1223 gcc_checking_assert (cst.get_precision () == TYPE_PRECISION (type)); 1224 /* We need extra HWIs if CST is an unsigned integer with its 1225 upper bit set. */ 1226 if (TYPE_UNSIGNED (type) && wi::neg_p (cst)) 1227 return cst.get_precision () / HOST_BITS_PER_WIDE_INT + 1; 1228 return cst.get_len (); 1229} 1230 1231/* Return a new INTEGER_CST with value CST and type TYPE. */ 1232 1233static tree 1234build_new_int_cst (tree type, const wide_int &cst) 1235{ 1236 unsigned int len = cst.get_len (); 1237 unsigned int ext_len = get_int_cst_ext_nunits (type, cst); 1238 tree nt = make_int_cst (len, ext_len); 1239 1240 if (len < ext_len) 1241 { 1242 --ext_len; 1243 TREE_INT_CST_ELT (nt, ext_len) 1244 = zext_hwi (-1, cst.get_precision () % HOST_BITS_PER_WIDE_INT); 1245 for (unsigned int i = len; i < ext_len; ++i) 1246 TREE_INT_CST_ELT (nt, i) = -1; 1247 } 1248 else if (TYPE_UNSIGNED (type) 1249 && cst.get_precision () < len * HOST_BITS_PER_WIDE_INT) 1250 { 1251 len--; 1252 TREE_INT_CST_ELT (nt, len) 1253 = zext_hwi (cst.elt (len), 1254 cst.get_precision () % HOST_BITS_PER_WIDE_INT); 1255 } 1256 1257 for (unsigned int i = 0; i < len; i++) 1258 TREE_INT_CST_ELT (nt, i) = cst.elt (i); 1259 TREE_TYPE (nt) = type; 1260 return nt; 1261} 1262 1263/* Create an INT_CST node with a LOW value sign extended to TYPE. */ 1264 1265tree 1266build_int_cst (tree type, HOST_WIDE_INT low) 1267{ 1268 /* Support legacy code. */ 1269 if (!type) 1270 type = integer_type_node; 1271 1272 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type))); 1273} 1274 1275tree 1276build_int_cstu (tree type, unsigned HOST_WIDE_INT cst) 1277{ 1278 return wide_int_to_tree (type, wi::uhwi (cst, TYPE_PRECISION (type))); 1279} 1280 1281/* Create an INT_CST node with a LOW value sign extended to TYPE. */ 1282 1283tree 1284build_int_cst_type (tree type, HOST_WIDE_INT low) 1285{ 1286 gcc_assert (type); 1287 return wide_int_to_tree (type, wi::shwi (low, TYPE_PRECISION (type))); 1288} 1289 1290/* Constructs tree in type TYPE from with value given by CST. Signedness 1291 of CST is assumed to be the same as the signedness of TYPE. */ 1292 1293tree 1294double_int_to_tree (tree type, double_int cst) 1295{ 1296 return wide_int_to_tree (type, widest_int::from (cst, TYPE_SIGN (type))); 1297} 1298 1299/* We force the wide_int CST to the range of the type TYPE by sign or 1300 zero extending it. OVERFLOWABLE indicates if we are interested in 1301 overflow of the value, when >0 we are only interested in signed 1302 overflow, for <0 we are interested in any overflow. OVERFLOWED 1303 indicates whether overflow has already occurred. CONST_OVERFLOWED 1304 indicates whether constant overflow has already occurred. We force 1305 T's value to be within range of T's type (by setting to 0 or 1 all 1306 the bits outside the type's range). We set TREE_OVERFLOWED if, 1307 OVERFLOWED is nonzero, 1308 or OVERFLOWABLE is >0 and signed overflow occurs 1309 or OVERFLOWABLE is <0 and any overflow occurs 1310 We return a new tree node for the extended wide_int. The node 1311 is shared if no overflow flags are set. */ 1312 1313 1314tree 1315force_fit_type (tree type, const wide_int_ref &cst, 1316 int overflowable, bool overflowed) 1317{ 1318 signop sign = TYPE_SIGN (type); 1319 1320 /* If we need to set overflow flags, return a new unshared node. */ 1321 if (overflowed || !wi::fits_to_tree_p (cst, type)) 1322 { 1323 if (overflowed 1324 || overflowable < 0 1325 || (overflowable > 0 && sign == SIGNED)) 1326 { 1327 wide_int tmp = wide_int::from (cst, TYPE_PRECISION (type), sign); 1328 tree t = build_new_int_cst (type, tmp); 1329 TREE_OVERFLOW (t) = 1; 1330 return t; 1331 } 1332 } 1333 1334 /* Else build a shared node. */ 1335 return wide_int_to_tree (type, cst); 1336} 1337 1338/* These are the hash table functions for the hash table of INTEGER_CST 1339 nodes of a sizetype. */ 1340 1341/* Return the hash code code X, an INTEGER_CST. */ 1342 1343hashval_t 1344int_cst_hasher::hash (tree x) 1345{ 1346 const_tree const t = x; 1347 hashval_t code = TYPE_UID (TREE_TYPE (t)); 1348 int i; 1349 1350 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++) 1351 code = iterative_hash_host_wide_int (TREE_INT_CST_ELT(t, i), code); 1352 1353 return code; 1354} 1355 1356/* Return nonzero if the value represented by *X (an INTEGER_CST tree node) 1357 is the same as that given by *Y, which is the same. */ 1358 1359bool 1360int_cst_hasher::equal (tree x, tree y) 1361{ 1362 const_tree const xt = x; 1363 const_tree const yt = y; 1364 1365 if (TREE_TYPE (xt) != TREE_TYPE (yt) 1366 || TREE_INT_CST_NUNITS (xt) != TREE_INT_CST_NUNITS (yt) 1367 || TREE_INT_CST_EXT_NUNITS (xt) != TREE_INT_CST_EXT_NUNITS (yt)) 1368 return false; 1369 1370 for (int i = 0; i < TREE_INT_CST_NUNITS (xt); i++) 1371 if (TREE_INT_CST_ELT (xt, i) != TREE_INT_CST_ELT (yt, i)) 1372 return false; 1373 1374 return true; 1375} 1376 1377/* Create an INT_CST node of TYPE and value CST. 1378 The returned node is always shared. For small integers we use a 1379 per-type vector cache, for larger ones we use a single hash table. 1380 The value is extended from its precision according to the sign of 1381 the type to be a multiple of HOST_BITS_PER_WIDE_INT. This defines 1382 the upper bits and ensures that hashing and value equality based 1383 upon the underlying HOST_WIDE_INTs works without masking. */ 1384 1385tree 1386wide_int_to_tree (tree type, const wide_int_ref &pcst) 1387{ 1388 tree t; 1389 int ix = -1; 1390 int limit = 0; 1391 1392 gcc_assert (type); 1393 unsigned int prec = TYPE_PRECISION (type); 1394 signop sgn = TYPE_SIGN (type); 1395 1396 /* Verify that everything is canonical. */ 1397 int l = pcst.get_len (); 1398 if (l > 1) 1399 { 1400 if (pcst.elt (l - 1) == 0) 1401 gcc_checking_assert (pcst.elt (l - 2) < 0); 1402 if (pcst.elt (l - 1) == (HOST_WIDE_INT) -1) 1403 gcc_checking_assert (pcst.elt (l - 2) >= 0); 1404 } 1405 1406 wide_int cst = wide_int::from (pcst, prec, sgn); 1407 unsigned int ext_len = get_int_cst_ext_nunits (type, cst); 1408 1409 if (ext_len == 1) 1410 { 1411 /* We just need to store a single HOST_WIDE_INT. */ 1412 HOST_WIDE_INT hwi; 1413 if (TYPE_UNSIGNED (type)) 1414 hwi = cst.to_uhwi (); 1415 else 1416 hwi = cst.to_shwi (); 1417 1418 switch (TREE_CODE (type)) 1419 { 1420 case NULLPTR_TYPE: 1421 gcc_assert (hwi == 0); 1422 /* Fallthru. */ 1423 1424 case POINTER_TYPE: 1425 case REFERENCE_TYPE: 1426 case POINTER_BOUNDS_TYPE: 1427 /* Cache NULL pointer and zero bounds. */ 1428 if (hwi == 0) 1429 { 1430 limit = 1; 1431 ix = 0; 1432 } 1433 break; 1434 1435 case BOOLEAN_TYPE: 1436 /* Cache false or true. */ 1437 limit = 2; 1438 if (hwi < 2) 1439 ix = hwi; 1440 break; 1441 1442 case INTEGER_TYPE: 1443 case OFFSET_TYPE: 1444 if (TYPE_SIGN (type) == UNSIGNED) 1445 { 1446 /* Cache [0, N). */ 1447 limit = INTEGER_SHARE_LIMIT; 1448 if (IN_RANGE (hwi, 0, INTEGER_SHARE_LIMIT - 1)) 1449 ix = hwi; 1450 } 1451 else 1452 { 1453 /* Cache [-1, N). */ 1454 limit = INTEGER_SHARE_LIMIT + 1; 1455 if (IN_RANGE (hwi, -1, INTEGER_SHARE_LIMIT - 1)) 1456 ix = hwi + 1; 1457 } 1458 break; 1459 1460 case ENUMERAL_TYPE: 1461 break; 1462 1463 default: 1464 gcc_unreachable (); 1465 } 1466 1467 if (ix >= 0) 1468 { 1469 /* Look for it in the type's vector of small shared ints. */ 1470 if (!TYPE_CACHED_VALUES_P (type)) 1471 { 1472 TYPE_CACHED_VALUES_P (type) = 1; 1473 TYPE_CACHED_VALUES (type) = make_tree_vec (limit); 1474 } 1475 1476 t = TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix); 1477 if (t) 1478 /* Make sure no one is clobbering the shared constant. */ 1479 gcc_checking_assert (TREE_TYPE (t) == type 1480 && TREE_INT_CST_NUNITS (t) == 1 1481 && TREE_INT_CST_OFFSET_NUNITS (t) == 1 1482 && TREE_INT_CST_EXT_NUNITS (t) == 1 1483 && TREE_INT_CST_ELT (t, 0) == hwi); 1484 else 1485 { 1486 /* Create a new shared int. */ 1487 t = build_new_int_cst (type, cst); 1488 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t; 1489 } 1490 } 1491 else 1492 { 1493 /* Use the cache of larger shared ints, using int_cst_node as 1494 a temporary. */ 1495 1496 TREE_INT_CST_ELT (int_cst_node, 0) = hwi; 1497 TREE_TYPE (int_cst_node) = type; 1498 1499 tree *slot = int_cst_hash_table->find_slot (int_cst_node, INSERT); 1500 t = *slot; 1501 if (!t) 1502 { 1503 /* Insert this one into the hash table. */ 1504 t = int_cst_node; 1505 *slot = t; 1506 /* Make a new node for next time round. */ 1507 int_cst_node = make_int_cst (1, 1); 1508 } 1509 } 1510 } 1511 else 1512 { 1513 /* The value either hashes properly or we drop it on the floor 1514 for the gc to take care of. There will not be enough of them 1515 to worry about. */ 1516 1517 tree nt = build_new_int_cst (type, cst); 1518 tree *slot = int_cst_hash_table->find_slot (nt, INSERT); 1519 t = *slot; 1520 if (!t) 1521 { 1522 /* Insert this one into the hash table. */ 1523 t = nt; 1524 *slot = t; 1525 } 1526 } 1527 1528 return t; 1529} 1530 1531void 1532cache_integer_cst (tree t) 1533{ 1534 tree type = TREE_TYPE (t); 1535 int ix = -1; 1536 int limit = 0; 1537 int prec = TYPE_PRECISION (type); 1538 1539 gcc_assert (!TREE_OVERFLOW (t)); 1540 1541 switch (TREE_CODE (type)) 1542 { 1543 case NULLPTR_TYPE: 1544 gcc_assert (integer_zerop (t)); 1545 /* Fallthru. */ 1546 1547 case POINTER_TYPE: 1548 case REFERENCE_TYPE: 1549 /* Cache NULL pointer. */ 1550 if (integer_zerop (t)) 1551 { 1552 limit = 1; 1553 ix = 0; 1554 } 1555 break; 1556 1557 case BOOLEAN_TYPE: 1558 /* Cache false or true. */ 1559 limit = 2; 1560 if (wi::ltu_p (t, 2)) 1561 ix = TREE_INT_CST_ELT (t, 0); 1562 break; 1563 1564 case INTEGER_TYPE: 1565 case OFFSET_TYPE: 1566 if (TYPE_UNSIGNED (type)) 1567 { 1568 /* Cache 0..N */ 1569 limit = INTEGER_SHARE_LIMIT; 1570 1571 /* This is a little hokie, but if the prec is smaller than 1572 what is necessary to hold INTEGER_SHARE_LIMIT, then the 1573 obvious test will not get the correct answer. */ 1574 if (prec < HOST_BITS_PER_WIDE_INT) 1575 { 1576 if (tree_to_uhwi (t) < (unsigned HOST_WIDE_INT) INTEGER_SHARE_LIMIT) 1577 ix = tree_to_uhwi (t); 1578 } 1579 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT)) 1580 ix = tree_to_uhwi (t); 1581 } 1582 else 1583 { 1584 /* Cache -1..N */ 1585 limit = INTEGER_SHARE_LIMIT + 1; 1586 1587 if (integer_minus_onep (t)) 1588 ix = 0; 1589 else if (!wi::neg_p (t)) 1590 { 1591 if (prec < HOST_BITS_PER_WIDE_INT) 1592 { 1593 if (tree_to_shwi (t) < INTEGER_SHARE_LIMIT) 1594 ix = tree_to_shwi (t) + 1; 1595 } 1596 else if (wi::ltu_p (t, INTEGER_SHARE_LIMIT)) 1597 ix = tree_to_shwi (t) + 1; 1598 } 1599 } 1600 break; 1601 1602 case ENUMERAL_TYPE: 1603 break; 1604 1605 default: 1606 gcc_unreachable (); 1607 } 1608 1609 if (ix >= 0) 1610 { 1611 /* Look for it in the type's vector of small shared ints. */ 1612 if (!TYPE_CACHED_VALUES_P (type)) 1613 { 1614 TYPE_CACHED_VALUES_P (type) = 1; 1615 TYPE_CACHED_VALUES (type) = make_tree_vec (limit); 1616 } 1617 1618 gcc_assert (TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) == NULL_TREE); 1619 TREE_VEC_ELT (TYPE_CACHED_VALUES (type), ix) = t; 1620 } 1621 else 1622 { 1623 /* Use the cache of larger shared ints. */ 1624 tree *slot = int_cst_hash_table->find_slot (t, INSERT); 1625 /* If there is already an entry for the number verify it's the 1626 same. */ 1627 if (*slot) 1628 gcc_assert (wi::eq_p (tree (*slot), t)); 1629 else 1630 /* Otherwise insert this one into the hash table. */ 1631 *slot = t; 1632 } 1633} 1634 1635 1636/* Builds an integer constant in TYPE such that lowest BITS bits are ones 1637 and the rest are zeros. */ 1638 1639tree 1640build_low_bits_mask (tree type, unsigned bits) 1641{ 1642 gcc_assert (bits <= TYPE_PRECISION (type)); 1643 1644 return wide_int_to_tree (type, wi::mask (bits, false, 1645 TYPE_PRECISION (type))); 1646} 1647 1648/* Checks that X is integer constant that can be expressed in (unsigned) 1649 HOST_WIDE_INT without loss of precision. */ 1650 1651bool 1652cst_and_fits_in_hwi (const_tree x) 1653{ 1654 if (TREE_CODE (x) != INTEGER_CST) 1655 return false; 1656 1657 if (TYPE_PRECISION (TREE_TYPE (x)) > HOST_BITS_PER_WIDE_INT) 1658 return false; 1659 1660 return TREE_INT_CST_NUNITS (x) == 1; 1661} 1662 1663/* Build a newly constructed TREE_VEC node of length LEN. */ 1664 1665tree 1666make_vector_stat (unsigned len MEM_STAT_DECL) 1667{ 1668 tree t; 1669 unsigned length = (len - 1) * sizeof (tree) + sizeof (struct tree_vector); 1670 1671 record_node_allocation_statistics (VECTOR_CST, length); 1672 1673 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); 1674 1675 TREE_SET_CODE (t, VECTOR_CST); 1676 TREE_CONSTANT (t) = 1; 1677 1678 return t; 1679} 1680 1681/* Return a new VECTOR_CST node whose type is TYPE and whose values 1682 are in a list pointed to by VALS. */ 1683 1684tree 1685build_vector_stat (tree type, tree *vals MEM_STAT_DECL) 1686{ 1687 int over = 0; 1688 unsigned cnt = 0; 1689 tree v = make_vector (TYPE_VECTOR_SUBPARTS (type)); 1690 TREE_TYPE (v) = type; 1691 1692 /* Iterate through elements and check for overflow. */ 1693 for (cnt = 0; cnt < TYPE_VECTOR_SUBPARTS (type); ++cnt) 1694 { 1695 tree value = vals[cnt]; 1696 1697 VECTOR_CST_ELT (v, cnt) = value; 1698 1699 /* Don't crash if we get an address constant. */ 1700 if (!CONSTANT_CLASS_P (value)) 1701 continue; 1702 1703 over |= TREE_OVERFLOW (value); 1704 } 1705 1706 TREE_OVERFLOW (v) = over; 1707 return v; 1708} 1709 1710/* Return a new VECTOR_CST node whose type is TYPE and whose values 1711 are extracted from V, a vector of CONSTRUCTOR_ELT. */ 1712 1713tree 1714build_vector_from_ctor (tree type, vec<constructor_elt, va_gc> *v) 1715{ 1716 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type)); 1717 unsigned HOST_WIDE_INT idx; 1718 tree value; 1719 1720 FOR_EACH_CONSTRUCTOR_VALUE (v, idx, value) 1721 vec[idx] = value; 1722 for (; idx < TYPE_VECTOR_SUBPARTS (type); ++idx) 1723 vec[idx] = build_zero_cst (TREE_TYPE (type)); 1724 1725 return build_vector (type, vec); 1726} 1727 1728/* Build a vector of type VECTYPE where all the elements are SCs. */ 1729tree 1730build_vector_from_val (tree vectype, tree sc) 1731{ 1732 int i, nunits = TYPE_VECTOR_SUBPARTS (vectype); 1733 1734 if (sc == error_mark_node) 1735 return sc; 1736 1737 /* Verify that the vector type is suitable for SC. Note that there 1738 is some inconsistency in the type-system with respect to restrict 1739 qualifications of pointers. Vector types always have a main-variant 1740 element type and the qualification is applied to the vector-type. 1741 So TREE_TYPE (vector-type) does not return a properly qualified 1742 vector element-type. */ 1743 gcc_checking_assert (types_compatible_p (TYPE_MAIN_VARIANT (TREE_TYPE (sc)), 1744 TREE_TYPE (vectype))); 1745 1746 if (CONSTANT_CLASS_P (sc)) 1747 { 1748 tree *v = XALLOCAVEC (tree, nunits); 1749 for (i = 0; i < nunits; ++i) 1750 v[i] = sc; 1751 return build_vector (vectype, v); 1752 } 1753 else 1754 { 1755 vec<constructor_elt, va_gc> *v; 1756 vec_alloc (v, nunits); 1757 for (i = 0; i < nunits; ++i) 1758 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, sc); 1759 return build_constructor (vectype, v); 1760 } 1761} 1762 1763/* Return a new CONSTRUCTOR node whose type is TYPE and whose values 1764 are in the vec pointed to by VALS. */ 1765tree 1766build_constructor (tree type, vec<constructor_elt, va_gc> *vals) 1767{ 1768 tree c = make_node (CONSTRUCTOR); 1769 unsigned int i; 1770 constructor_elt *elt; 1771 bool constant_p = true; 1772 bool side_effects_p = false; 1773 1774 TREE_TYPE (c) = type; 1775 CONSTRUCTOR_ELTS (c) = vals; 1776 1777 FOR_EACH_VEC_SAFE_ELT (vals, i, elt) 1778 { 1779 /* Mostly ctors will have elts that don't have side-effects, so 1780 the usual case is to scan all the elements. Hence a single 1781 loop for both const and side effects, rather than one loop 1782 each (with early outs). */ 1783 if (!TREE_CONSTANT (elt->value)) 1784 constant_p = false; 1785 if (TREE_SIDE_EFFECTS (elt->value)) 1786 side_effects_p = true; 1787 } 1788 1789 TREE_SIDE_EFFECTS (c) = side_effects_p; 1790 TREE_CONSTANT (c) = constant_p; 1791 1792 return c; 1793} 1794 1795/* Build a CONSTRUCTOR node made of a single initializer, with the specified 1796 INDEX and VALUE. */ 1797tree 1798build_constructor_single (tree type, tree index, tree value) 1799{ 1800 vec<constructor_elt, va_gc> *v; 1801 constructor_elt elt = {index, value}; 1802 1803 vec_alloc (v, 1); 1804 v->quick_push (elt); 1805 1806 return build_constructor (type, v); 1807} 1808 1809 1810/* Return a new CONSTRUCTOR node whose type is TYPE and whose values 1811 are in a list pointed to by VALS. */ 1812tree 1813build_constructor_from_list (tree type, tree vals) 1814{ 1815 tree t; 1816 vec<constructor_elt, va_gc> *v = NULL; 1817 1818 if (vals) 1819 { 1820 vec_alloc (v, list_length (vals)); 1821 for (t = vals; t; t = TREE_CHAIN (t)) 1822 CONSTRUCTOR_APPEND_ELT (v, TREE_PURPOSE (t), TREE_VALUE (t)); 1823 } 1824 1825 return build_constructor (type, v); 1826} 1827 1828/* Return a new CONSTRUCTOR node whose type is TYPE. NELTS is the number 1829 of elements, provided as index/value pairs. */ 1830 1831tree 1832build_constructor_va (tree type, int nelts, ...) 1833{ 1834 vec<constructor_elt, va_gc> *v = NULL; 1835 va_list p; 1836 1837 va_start (p, nelts); 1838 vec_alloc (v, nelts); 1839 while (nelts--) 1840 { 1841 tree index = va_arg (p, tree); 1842 tree value = va_arg (p, tree); 1843 CONSTRUCTOR_APPEND_ELT (v, index, value); 1844 } 1845 va_end (p); 1846 return build_constructor (type, v); 1847} 1848 1849/* Return a new FIXED_CST node whose type is TYPE and value is F. */ 1850 1851tree 1852build_fixed (tree type, FIXED_VALUE_TYPE f) 1853{ 1854 tree v; 1855 FIXED_VALUE_TYPE *fp; 1856 1857 v = make_node (FIXED_CST); 1858 fp = ggc_alloc<fixed_value> (); 1859 memcpy (fp, &f, sizeof (FIXED_VALUE_TYPE)); 1860 1861 TREE_TYPE (v) = type; 1862 TREE_FIXED_CST_PTR (v) = fp; 1863 return v; 1864} 1865 1866/* Return a new REAL_CST node whose type is TYPE and value is D. */ 1867 1868tree 1869build_real (tree type, REAL_VALUE_TYPE d) 1870{ 1871 tree v; 1872 REAL_VALUE_TYPE *dp; 1873 int overflow = 0; 1874 1875 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE. 1876 Consider doing it via real_convert now. */ 1877 1878 v = make_node (REAL_CST); 1879 dp = ggc_alloc<real_value> (); 1880 memcpy (dp, &d, sizeof (REAL_VALUE_TYPE)); 1881 1882 TREE_TYPE (v) = type; 1883 TREE_REAL_CST_PTR (v) = dp; 1884 TREE_OVERFLOW (v) = overflow; 1885 return v; 1886} 1887 1888/* Return a new REAL_CST node whose type is TYPE 1889 and whose value is the integer value of the INTEGER_CST node I. */ 1890 1891REAL_VALUE_TYPE 1892real_value_from_int_cst (const_tree type, const_tree i) 1893{ 1894 REAL_VALUE_TYPE d; 1895 1896 /* Clear all bits of the real value type so that we can later do 1897 bitwise comparisons to see if two values are the same. */ 1898 memset (&d, 0, sizeof d); 1899 1900 real_from_integer (&d, type ? TYPE_MODE (type) : VOIDmode, i, 1901 TYPE_SIGN (TREE_TYPE (i))); 1902 return d; 1903} 1904 1905/* Given a tree representing an integer constant I, return a tree 1906 representing the same value as a floating-point constant of type TYPE. */ 1907 1908tree 1909build_real_from_int_cst (tree type, const_tree i) 1910{ 1911 tree v; 1912 int overflow = TREE_OVERFLOW (i); 1913 1914 v = build_real (type, real_value_from_int_cst (type, i)); 1915 1916 TREE_OVERFLOW (v) |= overflow; 1917 return v; 1918} 1919 1920/* Return a newly constructed STRING_CST node whose value is 1921 the LEN characters at STR. 1922 Note that for a C string literal, LEN should include the trailing NUL. 1923 The TREE_TYPE is not initialized. */ 1924 1925tree 1926build_string (int len, const char *str) 1927{ 1928 tree s; 1929 size_t length; 1930 1931 /* Do not waste bytes provided by padding of struct tree_string. */ 1932 length = len + offsetof (struct tree_string, str) + 1; 1933 1934 record_node_allocation_statistics (STRING_CST, length); 1935 1936 s = (tree) ggc_internal_alloc (length); 1937 1938 memset (s, 0, sizeof (struct tree_typed)); 1939 TREE_SET_CODE (s, STRING_CST); 1940 TREE_CONSTANT (s) = 1; 1941 TREE_STRING_LENGTH (s) = len; 1942 memcpy (s->string.str, str, len); 1943 s->string.str[len] = '\0'; 1944 1945 return s; 1946} 1947 1948/* Return a newly constructed COMPLEX_CST node whose value is 1949 specified by the real and imaginary parts REAL and IMAG. 1950 Both REAL and IMAG should be constant nodes. TYPE, if specified, 1951 will be the type of the COMPLEX_CST; otherwise a new type will be made. */ 1952 1953tree 1954build_complex (tree type, tree real, tree imag) 1955{ 1956 tree t = make_node (COMPLEX_CST); 1957 1958 TREE_REALPART (t) = real; 1959 TREE_IMAGPART (t) = imag; 1960 TREE_TYPE (t) = type ? type : build_complex_type (TREE_TYPE (real)); 1961 TREE_OVERFLOW (t) = TREE_OVERFLOW (real) | TREE_OVERFLOW (imag); 1962 return t; 1963} 1964 1965/* Return a constant of arithmetic type TYPE which is the 1966 multiplicative identity of the set TYPE. */ 1967 1968tree 1969build_one_cst (tree type) 1970{ 1971 switch (TREE_CODE (type)) 1972 { 1973 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: 1974 case POINTER_TYPE: case REFERENCE_TYPE: 1975 case OFFSET_TYPE: 1976 return build_int_cst (type, 1); 1977 1978 case REAL_TYPE: 1979 return build_real (type, dconst1); 1980 1981 case FIXED_POINT_TYPE: 1982 /* We can only generate 1 for accum types. */ 1983 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type))); 1984 return build_fixed (type, FCONST1 (TYPE_MODE (type))); 1985 1986 case VECTOR_TYPE: 1987 { 1988 tree scalar = build_one_cst (TREE_TYPE (type)); 1989 1990 return build_vector_from_val (type, scalar); 1991 } 1992 1993 case COMPLEX_TYPE: 1994 return build_complex (type, 1995 build_one_cst (TREE_TYPE (type)), 1996 build_zero_cst (TREE_TYPE (type))); 1997 1998 default: 1999 gcc_unreachable (); 2000 } 2001} 2002 2003/* Return an integer of type TYPE containing all 1's in as much precision as 2004 it contains, or a complex or vector whose subparts are such integers. */ 2005 2006tree 2007build_all_ones_cst (tree type) 2008{ 2009 if (TREE_CODE (type) == COMPLEX_TYPE) 2010 { 2011 tree scalar = build_all_ones_cst (TREE_TYPE (type)); 2012 return build_complex (type, scalar, scalar); 2013 } 2014 else 2015 return build_minus_one_cst (type); 2016} 2017 2018/* Return a constant of arithmetic type TYPE which is the 2019 opposite of the multiplicative identity of the set TYPE. */ 2020 2021tree 2022build_minus_one_cst (tree type) 2023{ 2024 switch (TREE_CODE (type)) 2025 { 2026 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: 2027 case POINTER_TYPE: case REFERENCE_TYPE: 2028 case OFFSET_TYPE: 2029 return build_int_cst (type, -1); 2030 2031 case REAL_TYPE: 2032 return build_real (type, dconstm1); 2033 2034 case FIXED_POINT_TYPE: 2035 /* We can only generate 1 for accum types. */ 2036 gcc_assert (ALL_SCALAR_ACCUM_MODE_P (TYPE_MODE (type))); 2037 return build_fixed (type, fixed_from_double_int (double_int_minus_one, 2038 TYPE_MODE (type))); 2039 2040 case VECTOR_TYPE: 2041 { 2042 tree scalar = build_minus_one_cst (TREE_TYPE (type)); 2043 2044 return build_vector_from_val (type, scalar); 2045 } 2046 2047 case COMPLEX_TYPE: 2048 return build_complex (type, 2049 build_minus_one_cst (TREE_TYPE (type)), 2050 build_zero_cst (TREE_TYPE (type))); 2051 2052 default: 2053 gcc_unreachable (); 2054 } 2055} 2056 2057/* Build 0 constant of type TYPE. This is used by constructor folding 2058 and thus the constant should be represented in memory by 2059 zero(es). */ 2060 2061tree 2062build_zero_cst (tree type) 2063{ 2064 switch (TREE_CODE (type)) 2065 { 2066 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE: 2067 case POINTER_TYPE: case REFERENCE_TYPE: 2068 case OFFSET_TYPE: case NULLPTR_TYPE: 2069 return build_int_cst (type, 0); 2070 2071 case REAL_TYPE: 2072 return build_real (type, dconst0); 2073 2074 case FIXED_POINT_TYPE: 2075 return build_fixed (type, FCONST0 (TYPE_MODE (type))); 2076 2077 case VECTOR_TYPE: 2078 { 2079 tree scalar = build_zero_cst (TREE_TYPE (type)); 2080 2081 return build_vector_from_val (type, scalar); 2082 } 2083 2084 case COMPLEX_TYPE: 2085 { 2086 tree zero = build_zero_cst (TREE_TYPE (type)); 2087 2088 return build_complex (type, zero, zero); 2089 } 2090 2091 default: 2092 if (!AGGREGATE_TYPE_P (type)) 2093 return fold_convert (type, integer_zero_node); 2094 return build_constructor (type, NULL); 2095 } 2096} 2097 2098 2099/* Build a BINFO with LEN language slots. */ 2100 2101tree 2102make_tree_binfo_stat (unsigned base_binfos MEM_STAT_DECL) 2103{ 2104 tree t; 2105 size_t length = (offsetof (struct tree_binfo, base_binfos) 2106 + vec<tree, va_gc>::embedded_size (base_binfos)); 2107 2108 record_node_allocation_statistics (TREE_BINFO, length); 2109 2110 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT); 2111 2112 memset (t, 0, offsetof (struct tree_binfo, base_binfos)); 2113 2114 TREE_SET_CODE (t, TREE_BINFO); 2115 2116 BINFO_BASE_BINFOS (t)->embedded_init (base_binfos); 2117 2118 return t; 2119} 2120 2121/* Create a CASE_LABEL_EXPR tree node and return it. */ 2122 2123tree 2124build_case_label (tree low_value, tree high_value, tree label_decl) 2125{ 2126 tree t = make_node (CASE_LABEL_EXPR); 2127 2128 TREE_TYPE (t) = void_type_node; 2129 SET_EXPR_LOCATION (t, DECL_SOURCE_LOCATION (label_decl)); 2130 2131 CASE_LOW (t) = low_value; 2132 CASE_HIGH (t) = high_value; 2133 CASE_LABEL (t) = label_decl; 2134 CASE_CHAIN (t) = NULL_TREE; 2135 2136 return t; 2137} 2138 2139/* Build a newly constructed INTEGER_CST node. LEN and EXT_LEN are the 2140 values of TREE_INT_CST_NUNITS and TREE_INT_CST_EXT_NUNITS respectively. 2141 The latter determines the length of the HOST_WIDE_INT vector. */ 2142 2143tree 2144make_int_cst_stat (int len, int ext_len MEM_STAT_DECL) 2145{ 2146 tree t; 2147 int length = ((ext_len - 1) * sizeof (HOST_WIDE_INT) 2148 + sizeof (struct tree_int_cst)); 2149 2150 gcc_assert (len); 2151 record_node_allocation_statistics (INTEGER_CST, length); 2152 2153 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); 2154 2155 TREE_SET_CODE (t, INTEGER_CST); 2156 TREE_INT_CST_NUNITS (t) = len; 2157 TREE_INT_CST_EXT_NUNITS (t) = ext_len; 2158 /* to_offset can only be applied to trees that are offset_int-sized 2159 or smaller. EXT_LEN is correct if it fits, otherwise the constant 2160 must be exactly the precision of offset_int and so LEN is correct. */ 2161 if (ext_len <= OFFSET_INT_ELTS) 2162 TREE_INT_CST_OFFSET_NUNITS (t) = ext_len; 2163 else 2164 TREE_INT_CST_OFFSET_NUNITS (t) = len; 2165 2166 TREE_CONSTANT (t) = 1; 2167 2168 return t; 2169} 2170 2171/* Build a newly constructed TREE_VEC node of length LEN. */ 2172 2173tree 2174make_tree_vec_stat (int len MEM_STAT_DECL) 2175{ 2176 tree t; 2177 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec); 2178 2179 record_node_allocation_statistics (TREE_VEC, length); 2180 2181 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); 2182 2183 TREE_SET_CODE (t, TREE_VEC); 2184 TREE_VEC_LENGTH (t) = len; 2185 2186 return t; 2187} 2188 2189/* Grow a TREE_VEC node to new length LEN. */ 2190 2191tree 2192grow_tree_vec_stat (tree v, int len MEM_STAT_DECL) 2193{ 2194 gcc_assert (TREE_CODE (v) == TREE_VEC); 2195 2196 int oldlen = TREE_VEC_LENGTH (v); 2197 gcc_assert (len > oldlen); 2198 2199 int oldlength = (oldlen - 1) * sizeof (tree) + sizeof (struct tree_vec); 2200 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_vec); 2201 2202 record_node_allocation_statistics (TREE_VEC, length - oldlength); 2203 2204 v = (tree) ggc_realloc (v, length PASS_MEM_STAT); 2205 2206 TREE_VEC_LENGTH (v) = len; 2207 2208 return v; 2209} 2210 2211/* Return 1 if EXPR is the integer constant zero or a complex constant 2212 of zero. */ 2213 2214int 2215integer_zerop (const_tree expr) 2216{ 2217 STRIP_NOPS (expr); 2218 2219 switch (TREE_CODE (expr)) 2220 { 2221 case INTEGER_CST: 2222 return wi::eq_p (expr, 0); 2223 case COMPLEX_CST: 2224 return (integer_zerop (TREE_REALPART (expr)) 2225 && integer_zerop (TREE_IMAGPART (expr))); 2226 case VECTOR_CST: 2227 { 2228 unsigned i; 2229 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i) 2230 if (!integer_zerop (VECTOR_CST_ELT (expr, i))) 2231 return false; 2232 return true; 2233 } 2234 default: 2235 return false; 2236 } 2237} 2238 2239/* Return 1 if EXPR is the integer constant one or the corresponding 2240 complex constant. */ 2241 2242int 2243integer_onep (const_tree expr) 2244{ 2245 STRIP_NOPS (expr); 2246 2247 switch (TREE_CODE (expr)) 2248 { 2249 case INTEGER_CST: 2250 return wi::eq_p (wi::to_widest (expr), 1); 2251 case COMPLEX_CST: 2252 return (integer_onep (TREE_REALPART (expr)) 2253 && integer_zerop (TREE_IMAGPART (expr))); 2254 case VECTOR_CST: 2255 { 2256 unsigned i; 2257 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i) 2258 if (!integer_onep (VECTOR_CST_ELT (expr, i))) 2259 return false; 2260 return true; 2261 } 2262 default: 2263 return false; 2264 } 2265} 2266 2267/* Return 1 if EXPR is the integer constant one. For complex and vector, 2268 return 1 if every piece is the integer constant one. */ 2269 2270int 2271integer_each_onep (const_tree expr) 2272{ 2273 STRIP_NOPS (expr); 2274 2275 if (TREE_CODE (expr) == COMPLEX_CST) 2276 return (integer_onep (TREE_REALPART (expr)) 2277 && integer_onep (TREE_IMAGPART (expr))); 2278 else 2279 return integer_onep (expr); 2280} 2281 2282/* Return 1 if EXPR is an integer containing all 1's in as much precision as 2283 it contains, or a complex or vector whose subparts are such integers. */ 2284 2285int 2286integer_all_onesp (const_tree expr) 2287{ 2288 STRIP_NOPS (expr); 2289 2290 if (TREE_CODE (expr) == COMPLEX_CST 2291 && integer_all_onesp (TREE_REALPART (expr)) 2292 && integer_all_onesp (TREE_IMAGPART (expr))) 2293 return 1; 2294 2295 else if (TREE_CODE (expr) == VECTOR_CST) 2296 { 2297 unsigned i; 2298 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i) 2299 if (!integer_all_onesp (VECTOR_CST_ELT (expr, i))) 2300 return 0; 2301 return 1; 2302 } 2303 2304 else if (TREE_CODE (expr) != INTEGER_CST) 2305 return 0; 2306 2307 return wi::max_value (TYPE_PRECISION (TREE_TYPE (expr)), UNSIGNED) == expr; 2308} 2309 2310/* Return 1 if EXPR is the integer constant minus one. */ 2311 2312int 2313integer_minus_onep (const_tree expr) 2314{ 2315 STRIP_NOPS (expr); 2316 2317 if (TREE_CODE (expr) == COMPLEX_CST) 2318 return (integer_all_onesp (TREE_REALPART (expr)) 2319 && integer_zerop (TREE_IMAGPART (expr))); 2320 else 2321 return integer_all_onesp (expr); 2322} 2323 2324/* Return 1 if EXPR is an integer constant that is a power of 2 (i.e., has only 2325 one bit on). */ 2326 2327int 2328integer_pow2p (const_tree expr) 2329{ 2330 STRIP_NOPS (expr); 2331 2332 if (TREE_CODE (expr) == COMPLEX_CST 2333 && integer_pow2p (TREE_REALPART (expr)) 2334 && integer_zerop (TREE_IMAGPART (expr))) 2335 return 1; 2336 2337 if (TREE_CODE (expr) != INTEGER_CST) 2338 return 0; 2339 2340 return wi::popcount (expr) == 1; 2341} 2342 2343/* Return 1 if EXPR is an integer constant other than zero or a 2344 complex constant other than zero. */ 2345 2346int 2347integer_nonzerop (const_tree expr) 2348{ 2349 STRIP_NOPS (expr); 2350 2351 return ((TREE_CODE (expr) == INTEGER_CST 2352 && !wi::eq_p (expr, 0)) 2353 || (TREE_CODE (expr) == COMPLEX_CST 2354 && (integer_nonzerop (TREE_REALPART (expr)) 2355 || integer_nonzerop (TREE_IMAGPART (expr))))); 2356} 2357 2358/* Return 1 if EXPR is the integer constant one. For vector, 2359 return 1 if every piece is the integer constant minus one 2360 (representing the value TRUE). */ 2361 2362int 2363integer_truep (const_tree expr) 2364{ 2365 STRIP_NOPS (expr); 2366 2367 if (TREE_CODE (expr) == VECTOR_CST) 2368 return integer_all_onesp (expr); 2369 return integer_onep (expr); 2370} 2371 2372/* Return 1 if EXPR is the fixed-point constant zero. */ 2373 2374int 2375fixed_zerop (const_tree expr) 2376{ 2377 return (TREE_CODE (expr) == FIXED_CST 2378 && TREE_FIXED_CST (expr).data.is_zero ()); 2379} 2380 2381/* Return the power of two represented by a tree node known to be a 2382 power of two. */ 2383 2384int 2385tree_log2 (const_tree expr) 2386{ 2387 STRIP_NOPS (expr); 2388 2389 if (TREE_CODE (expr) == COMPLEX_CST) 2390 return tree_log2 (TREE_REALPART (expr)); 2391 2392 return wi::exact_log2 (expr); 2393} 2394 2395/* Similar, but return the largest integer Y such that 2 ** Y is less 2396 than or equal to EXPR. */ 2397 2398int 2399tree_floor_log2 (const_tree expr) 2400{ 2401 STRIP_NOPS (expr); 2402 2403 if (TREE_CODE (expr) == COMPLEX_CST) 2404 return tree_log2 (TREE_REALPART (expr)); 2405 2406 return wi::floor_log2 (expr); 2407} 2408 2409/* Return number of known trailing zero bits in EXPR, or, if the value of 2410 EXPR is known to be zero, the precision of it's type. */ 2411 2412unsigned int 2413tree_ctz (const_tree expr) 2414{ 2415 if (!INTEGRAL_TYPE_P (TREE_TYPE (expr)) 2416 && !POINTER_TYPE_P (TREE_TYPE (expr))) 2417 return 0; 2418 2419 unsigned int ret1, ret2, prec = TYPE_PRECISION (TREE_TYPE (expr)); 2420 switch (TREE_CODE (expr)) 2421 { 2422 case INTEGER_CST: 2423 ret1 = wi::ctz (expr); 2424 return MIN (ret1, prec); 2425 case SSA_NAME: 2426 ret1 = wi::ctz (get_nonzero_bits (expr)); 2427 return MIN (ret1, prec); 2428 case PLUS_EXPR: 2429 case MINUS_EXPR: 2430 case BIT_IOR_EXPR: 2431 case BIT_XOR_EXPR: 2432 case MIN_EXPR: 2433 case MAX_EXPR: 2434 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2435 if (ret1 == 0) 2436 return ret1; 2437 ret2 = tree_ctz (TREE_OPERAND (expr, 1)); 2438 return MIN (ret1, ret2); 2439 case POINTER_PLUS_EXPR: 2440 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2441 ret2 = tree_ctz (TREE_OPERAND (expr, 1)); 2442 /* Second operand is sizetype, which could be in theory 2443 wider than pointer's precision. Make sure we never 2444 return more than prec. */ 2445 ret2 = MIN (ret2, prec); 2446 return MIN (ret1, ret2); 2447 case BIT_AND_EXPR: 2448 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2449 ret2 = tree_ctz (TREE_OPERAND (expr, 1)); 2450 return MAX (ret1, ret2); 2451 case MULT_EXPR: 2452 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2453 ret2 = tree_ctz (TREE_OPERAND (expr, 1)); 2454 return MIN (ret1 + ret2, prec); 2455 case LSHIFT_EXPR: 2456 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2457 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1)) 2458 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec)) 2459 { 2460 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1)); 2461 return MIN (ret1 + ret2, prec); 2462 } 2463 return ret1; 2464 case RSHIFT_EXPR: 2465 if (tree_fits_uhwi_p (TREE_OPERAND (expr, 1)) 2466 && (tree_to_uhwi (TREE_OPERAND (expr, 1)) < prec)) 2467 { 2468 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2469 ret2 = tree_to_uhwi (TREE_OPERAND (expr, 1)); 2470 if (ret1 > ret2) 2471 return ret1 - ret2; 2472 } 2473 return 0; 2474 case TRUNC_DIV_EXPR: 2475 case CEIL_DIV_EXPR: 2476 case FLOOR_DIV_EXPR: 2477 case ROUND_DIV_EXPR: 2478 case EXACT_DIV_EXPR: 2479 if (TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST 2480 && tree_int_cst_sgn (TREE_OPERAND (expr, 1)) == 1) 2481 { 2482 int l = tree_log2 (TREE_OPERAND (expr, 1)); 2483 if (l >= 0) 2484 { 2485 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2486 ret2 = l; 2487 if (ret1 > ret2) 2488 return ret1 - ret2; 2489 } 2490 } 2491 return 0; 2492 CASE_CONVERT: 2493 ret1 = tree_ctz (TREE_OPERAND (expr, 0)); 2494 if (ret1 && ret1 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (expr, 0)))) 2495 ret1 = prec; 2496 return MIN (ret1, prec); 2497 case SAVE_EXPR: 2498 return tree_ctz (TREE_OPERAND (expr, 0)); 2499 case COND_EXPR: 2500 ret1 = tree_ctz (TREE_OPERAND (expr, 1)); 2501 if (ret1 == 0) 2502 return 0; 2503 ret2 = tree_ctz (TREE_OPERAND (expr, 2)); 2504 return MIN (ret1, ret2); 2505 case COMPOUND_EXPR: 2506 return tree_ctz (TREE_OPERAND (expr, 1)); 2507 case ADDR_EXPR: 2508 ret1 = get_pointer_alignment (CONST_CAST_TREE (expr)); 2509 if (ret1 > BITS_PER_UNIT) 2510 { 2511 ret1 = ctz_hwi (ret1 / BITS_PER_UNIT); 2512 return MIN (ret1, prec); 2513 } 2514 return 0; 2515 default: 2516 return 0; 2517 } 2518} 2519 2520/* Return 1 if EXPR is the real constant zero. Trailing zeroes matter for 2521 decimal float constants, so don't return 1 for them. */ 2522 2523int 2524real_zerop (const_tree expr) 2525{ 2526 STRIP_NOPS (expr); 2527 2528 switch (TREE_CODE (expr)) 2529 { 2530 case REAL_CST: 2531 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst0) 2532 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr)))); 2533 case COMPLEX_CST: 2534 return real_zerop (TREE_REALPART (expr)) 2535 && real_zerop (TREE_IMAGPART (expr)); 2536 case VECTOR_CST: 2537 { 2538 unsigned i; 2539 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i) 2540 if (!real_zerop (VECTOR_CST_ELT (expr, i))) 2541 return false; 2542 return true; 2543 } 2544 default: 2545 return false; 2546 } 2547} 2548 2549/* Return 1 if EXPR is the real constant one in real or complex form. 2550 Trailing zeroes matter for decimal float constants, so don't return 2551 1 for them. */ 2552 2553int 2554real_onep (const_tree expr) 2555{ 2556 STRIP_NOPS (expr); 2557 2558 switch (TREE_CODE (expr)) 2559 { 2560 case REAL_CST: 2561 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconst1) 2562 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr)))); 2563 case COMPLEX_CST: 2564 return real_onep (TREE_REALPART (expr)) 2565 && real_zerop (TREE_IMAGPART (expr)); 2566 case VECTOR_CST: 2567 { 2568 unsigned i; 2569 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i) 2570 if (!real_onep (VECTOR_CST_ELT (expr, i))) 2571 return false; 2572 return true; 2573 } 2574 default: 2575 return false; 2576 } 2577} 2578 2579/* Return 1 if EXPR is the real constant minus one. Trailing zeroes 2580 matter for decimal float constants, so don't return 1 for them. */ 2581 2582int 2583real_minus_onep (const_tree expr) 2584{ 2585 STRIP_NOPS (expr); 2586 2587 switch (TREE_CODE (expr)) 2588 { 2589 case REAL_CST: 2590 return REAL_VALUES_EQUAL (TREE_REAL_CST (expr), dconstm1) 2591 && !(DECIMAL_FLOAT_MODE_P (TYPE_MODE (TREE_TYPE (expr)))); 2592 case COMPLEX_CST: 2593 return real_minus_onep (TREE_REALPART (expr)) 2594 && real_zerop (TREE_IMAGPART (expr)); 2595 case VECTOR_CST: 2596 { 2597 unsigned i; 2598 for (i = 0; i < VECTOR_CST_NELTS (expr); ++i) 2599 if (!real_minus_onep (VECTOR_CST_ELT (expr, i))) 2600 return false; 2601 return true; 2602 } 2603 default: 2604 return false; 2605 } 2606} 2607 2608/* Nonzero if EXP is a constant or a cast of a constant. */ 2609 2610int 2611really_constant_p (const_tree exp) 2612{ 2613 /* This is not quite the same as STRIP_NOPS. It does more. */ 2614 while (CONVERT_EXPR_P (exp) 2615 || TREE_CODE (exp) == NON_LVALUE_EXPR) 2616 exp = TREE_OPERAND (exp, 0); 2617 return TREE_CONSTANT (exp); 2618} 2619 2620/* Return first list element whose TREE_VALUE is ELEM. 2621 Return 0 if ELEM is not in LIST. */ 2622 2623tree 2624value_member (tree elem, tree list) 2625{ 2626 while (list) 2627 { 2628 if (elem == TREE_VALUE (list)) 2629 return list; 2630 list = TREE_CHAIN (list); 2631 } 2632 return NULL_TREE; 2633} 2634 2635/* Return first list element whose TREE_PURPOSE is ELEM. 2636 Return 0 if ELEM is not in LIST. */ 2637 2638tree 2639purpose_member (const_tree elem, tree list) 2640{ 2641 while (list) 2642 { 2643 if (elem == TREE_PURPOSE (list)) 2644 return list; 2645 list = TREE_CHAIN (list); 2646 } 2647 return NULL_TREE; 2648} 2649 2650/* Return true if ELEM is in V. */ 2651 2652bool 2653vec_member (const_tree elem, vec<tree, va_gc> *v) 2654{ 2655 unsigned ix; 2656 tree t; 2657 FOR_EACH_VEC_SAFE_ELT (v, ix, t) 2658 if (elem == t) 2659 return true; 2660 return false; 2661} 2662 2663/* Returns element number IDX (zero-origin) of chain CHAIN, or 2664 NULL_TREE. */ 2665 2666tree 2667chain_index (int idx, tree chain) 2668{ 2669 for (; chain && idx > 0; --idx) 2670 chain = TREE_CHAIN (chain); 2671 return chain; 2672} 2673 2674/* Return nonzero if ELEM is part of the chain CHAIN. */ 2675 2676int 2677chain_member (const_tree elem, const_tree chain) 2678{ 2679 while (chain) 2680 { 2681 if (elem == chain) 2682 return 1; 2683 chain = DECL_CHAIN (chain); 2684 } 2685 2686 return 0; 2687} 2688 2689/* Return the length of a chain of nodes chained through TREE_CHAIN. 2690 We expect a null pointer to mark the end of the chain. 2691 This is the Lisp primitive `length'. */ 2692 2693int 2694list_length (const_tree t) 2695{ 2696 const_tree p = t; 2697#ifdef ENABLE_TREE_CHECKING 2698 const_tree q = t; 2699#endif 2700 int len = 0; 2701 2702 while (p) 2703 { 2704 p = TREE_CHAIN (p); 2705#ifdef ENABLE_TREE_CHECKING 2706 if (len % 2) 2707 q = TREE_CHAIN (q); 2708 gcc_assert (p != q); 2709#endif 2710 len++; 2711 } 2712 2713 return len; 2714} 2715 2716/* Returns the first FIELD_DECL in the TYPE_FIELDS of the RECORD_TYPE or 2717 UNION_TYPE TYPE, or NULL_TREE if none. */ 2718 2719tree 2720first_field (const_tree type) 2721{ 2722 tree t = TYPE_FIELDS (type); 2723 while (t && TREE_CODE (t) != FIELD_DECL) 2724 t = TREE_CHAIN (t); 2725 return t; 2726} 2727 2728/* Concatenate two chains of nodes (chained through TREE_CHAIN) 2729 by modifying the last node in chain 1 to point to chain 2. 2730 This is the Lisp primitive `nconc'. */ 2731 2732tree 2733chainon (tree op1, tree op2) 2734{ 2735 tree t1; 2736 2737 if (!op1) 2738 return op2; 2739 if (!op2) 2740 return op1; 2741 2742 for (t1 = op1; TREE_CHAIN (t1); t1 = TREE_CHAIN (t1)) 2743 continue; 2744 TREE_CHAIN (t1) = op2; 2745 2746#ifdef ENABLE_TREE_CHECKING 2747 { 2748 tree t2; 2749 for (t2 = op2; t2; t2 = TREE_CHAIN (t2)) 2750 gcc_assert (t2 != t1); 2751 } 2752#endif 2753 2754 return op1; 2755} 2756 2757/* Return the last node in a chain of nodes (chained through TREE_CHAIN). */ 2758 2759tree 2760tree_last (tree chain) 2761{ 2762 tree next; 2763 if (chain) 2764 while ((next = TREE_CHAIN (chain))) 2765 chain = next; 2766 return chain; 2767} 2768 2769/* Reverse the order of elements in the chain T, 2770 and return the new head of the chain (old last element). */ 2771 2772tree 2773nreverse (tree t) 2774{ 2775 tree prev = 0, decl, next; 2776 for (decl = t; decl; decl = next) 2777 { 2778 /* We shouldn't be using this function to reverse BLOCK chains; we 2779 have blocks_nreverse for that. */ 2780 gcc_checking_assert (TREE_CODE (decl) != BLOCK); 2781 next = TREE_CHAIN (decl); 2782 TREE_CHAIN (decl) = prev; 2783 prev = decl; 2784 } 2785 return prev; 2786} 2787 2788/* Return a newly created TREE_LIST node whose 2789 purpose and value fields are PARM and VALUE. */ 2790 2791tree 2792build_tree_list_stat (tree parm, tree value MEM_STAT_DECL) 2793{ 2794 tree t = make_node_stat (TREE_LIST PASS_MEM_STAT); 2795 TREE_PURPOSE (t) = parm; 2796 TREE_VALUE (t) = value; 2797 return t; 2798} 2799 2800/* Build a chain of TREE_LIST nodes from a vector. */ 2801 2802tree 2803build_tree_list_vec_stat (const vec<tree, va_gc> *vec MEM_STAT_DECL) 2804{ 2805 tree ret = NULL_TREE; 2806 tree *pp = &ret; 2807 unsigned int i; 2808 tree t; 2809 FOR_EACH_VEC_SAFE_ELT (vec, i, t) 2810 { 2811 *pp = build_tree_list_stat (NULL, t PASS_MEM_STAT); 2812 pp = &TREE_CHAIN (*pp); 2813 } 2814 return ret; 2815} 2816 2817/* Return a newly created TREE_LIST node whose 2818 purpose and value fields are PURPOSE and VALUE 2819 and whose TREE_CHAIN is CHAIN. */ 2820 2821tree 2822tree_cons_stat (tree purpose, tree value, tree chain MEM_STAT_DECL) 2823{ 2824 tree node; 2825 2826 node = ggc_alloc_tree_node_stat (sizeof (struct tree_list) PASS_MEM_STAT); 2827 memset (node, 0, sizeof (struct tree_common)); 2828 2829 record_node_allocation_statistics (TREE_LIST, sizeof (struct tree_list)); 2830 2831 TREE_SET_CODE (node, TREE_LIST); 2832 TREE_CHAIN (node) = chain; 2833 TREE_PURPOSE (node) = purpose; 2834 TREE_VALUE (node) = value; 2835 return node; 2836} 2837 2838/* Return the values of the elements of a CONSTRUCTOR as a vector of 2839 trees. */ 2840 2841vec<tree, va_gc> * 2842ctor_to_vec (tree ctor) 2843{ 2844 vec<tree, va_gc> *vec; 2845 vec_alloc (vec, CONSTRUCTOR_NELTS (ctor)); 2846 unsigned int ix; 2847 tree val; 2848 2849 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (ctor), ix, val) 2850 vec->quick_push (val); 2851 2852 return vec; 2853} 2854 2855/* Return the size nominally occupied by an object of type TYPE 2856 when it resides in memory. The value is measured in units of bytes, 2857 and its data type is that normally used for type sizes 2858 (which is the first type created by make_signed_type or 2859 make_unsigned_type). */ 2860 2861tree 2862size_in_bytes (const_tree type) 2863{ 2864 tree t; 2865 2866 if (type == error_mark_node) 2867 return integer_zero_node; 2868 2869 type = TYPE_MAIN_VARIANT (type); 2870 t = TYPE_SIZE_UNIT (type); 2871 2872 if (t == 0) 2873 { 2874 lang_hooks.types.incomplete_type_error (NULL_TREE, type); 2875 return size_zero_node; 2876 } 2877 2878 return t; 2879} 2880 2881/* Return the size of TYPE (in bytes) as a wide integer 2882 or return -1 if the size can vary or is larger than an integer. */ 2883 2884HOST_WIDE_INT 2885int_size_in_bytes (const_tree type) 2886{ 2887 tree t; 2888 2889 if (type == error_mark_node) 2890 return 0; 2891 2892 type = TYPE_MAIN_VARIANT (type); 2893 t = TYPE_SIZE_UNIT (type); 2894 2895 if (t && tree_fits_uhwi_p (t)) 2896 return TREE_INT_CST_LOW (t); 2897 else 2898 return -1; 2899} 2900 2901/* Return the maximum size of TYPE (in bytes) as a wide integer 2902 or return -1 if the size can vary or is larger than an integer. */ 2903 2904HOST_WIDE_INT 2905max_int_size_in_bytes (const_tree type) 2906{ 2907 HOST_WIDE_INT size = -1; 2908 tree size_tree; 2909 2910 /* If this is an array type, check for a possible MAX_SIZE attached. */ 2911 2912 if (TREE_CODE (type) == ARRAY_TYPE) 2913 { 2914 size_tree = TYPE_ARRAY_MAX_SIZE (type); 2915 2916 if (size_tree && tree_fits_uhwi_p (size_tree)) 2917 size = tree_to_uhwi (size_tree); 2918 } 2919 2920 /* If we still haven't been able to get a size, see if the language 2921 can compute a maximum size. */ 2922 2923 if (size == -1) 2924 { 2925 size_tree = lang_hooks.types.max_size (type); 2926 2927 if (size_tree && tree_fits_uhwi_p (size_tree)) 2928 size = tree_to_uhwi (size_tree); 2929 } 2930 2931 return size; 2932} 2933 2934/* Return the bit position of FIELD, in bits from the start of the record. 2935 This is a tree of type bitsizetype. */ 2936 2937tree 2938bit_position (const_tree field) 2939{ 2940 return bit_from_pos (DECL_FIELD_OFFSET (field), 2941 DECL_FIELD_BIT_OFFSET (field)); 2942} 2943 2944/* Return the byte position of FIELD, in bytes from the start of the record. 2945 This is a tree of type sizetype. */ 2946 2947tree 2948byte_position (const_tree field) 2949{ 2950 return byte_from_pos (DECL_FIELD_OFFSET (field), 2951 DECL_FIELD_BIT_OFFSET (field)); 2952} 2953 2954/* Likewise, but return as an integer. It must be representable in 2955 that way (since it could be a signed value, we don't have the 2956 option of returning -1 like int_size_in_byte can. */ 2957 2958HOST_WIDE_INT 2959int_byte_position (const_tree field) 2960{ 2961 return tree_to_shwi (byte_position (field)); 2962} 2963 2964/* Return the strictest alignment, in bits, that T is known to have. */ 2965 2966unsigned int 2967expr_align (const_tree t) 2968{ 2969 unsigned int align0, align1; 2970 2971 switch (TREE_CODE (t)) 2972 { 2973 CASE_CONVERT: case NON_LVALUE_EXPR: 2974 /* If we have conversions, we know that the alignment of the 2975 object must meet each of the alignments of the types. */ 2976 align0 = expr_align (TREE_OPERAND (t, 0)); 2977 align1 = TYPE_ALIGN (TREE_TYPE (t)); 2978 return MAX (align0, align1); 2979 2980 case SAVE_EXPR: case COMPOUND_EXPR: case MODIFY_EXPR: 2981 case INIT_EXPR: case TARGET_EXPR: case WITH_CLEANUP_EXPR: 2982 case CLEANUP_POINT_EXPR: 2983 /* These don't change the alignment of an object. */ 2984 return expr_align (TREE_OPERAND (t, 0)); 2985 2986 case COND_EXPR: 2987 /* The best we can do is say that the alignment is the least aligned 2988 of the two arms. */ 2989 align0 = expr_align (TREE_OPERAND (t, 1)); 2990 align1 = expr_align (TREE_OPERAND (t, 2)); 2991 return MIN (align0, align1); 2992 2993 /* FIXME: LABEL_DECL and CONST_DECL never have DECL_ALIGN set 2994 meaningfully, it's always 1. */ 2995 case LABEL_DECL: case CONST_DECL: 2996 case VAR_DECL: case PARM_DECL: case RESULT_DECL: 2997 case FUNCTION_DECL: 2998 gcc_assert (DECL_ALIGN (t) != 0); 2999 return DECL_ALIGN (t); 3000 3001 default: 3002 break; 3003 } 3004 3005 /* Otherwise take the alignment from that of the type. */ 3006 return TYPE_ALIGN (TREE_TYPE (t)); 3007} 3008 3009/* Return, as a tree node, the number of elements for TYPE (which is an 3010 ARRAY_TYPE) minus one. This counts only elements of the top array. */ 3011 3012tree 3013array_type_nelts (const_tree type) 3014{ 3015 tree index_type, min, max; 3016 3017 /* If they did it with unspecified bounds, then we should have already 3018 given an error about it before we got here. */ 3019 if (! TYPE_DOMAIN (type)) 3020 return error_mark_node; 3021 3022 index_type = TYPE_DOMAIN (type); 3023 min = TYPE_MIN_VALUE (index_type); 3024 max = TYPE_MAX_VALUE (index_type); 3025 3026 /* TYPE_MAX_VALUE may not be set if the array has unknown length. */ 3027 if (!max) 3028 return error_mark_node; 3029 3030 return (integer_zerop (min) 3031 ? max 3032 : fold_build2 (MINUS_EXPR, TREE_TYPE (max), max, min)); 3033} 3034 3035/* If arg is static -- a reference to an object in static storage -- then 3036 return the object. This is not the same as the C meaning of `static'. 3037 If arg isn't static, return NULL. */ 3038 3039tree 3040staticp (tree arg) 3041{ 3042 switch (TREE_CODE (arg)) 3043 { 3044 case FUNCTION_DECL: 3045 /* Nested functions are static, even though taking their address will 3046 involve a trampoline as we unnest the nested function and create 3047 the trampoline on the tree level. */ 3048 return arg; 3049 3050 case VAR_DECL: 3051 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg)) 3052 && ! DECL_THREAD_LOCAL_P (arg) 3053 && ! DECL_DLLIMPORT_P (arg) 3054 ? arg : NULL); 3055 3056 case CONST_DECL: 3057 return ((TREE_STATIC (arg) || DECL_EXTERNAL (arg)) 3058 ? arg : NULL); 3059 3060 case CONSTRUCTOR: 3061 return TREE_STATIC (arg) ? arg : NULL; 3062 3063 case LABEL_DECL: 3064 case STRING_CST: 3065 return arg; 3066 3067 case COMPONENT_REF: 3068 /* If the thing being referenced is not a field, then it is 3069 something language specific. */ 3070 gcc_assert (TREE_CODE (TREE_OPERAND (arg, 1)) == FIELD_DECL); 3071 3072 /* If we are referencing a bitfield, we can't evaluate an 3073 ADDR_EXPR at compile time and so it isn't a constant. */ 3074 if (DECL_BIT_FIELD (TREE_OPERAND (arg, 1))) 3075 return NULL; 3076 3077 return staticp (TREE_OPERAND (arg, 0)); 3078 3079 case BIT_FIELD_REF: 3080 return NULL; 3081 3082 case INDIRECT_REF: 3083 return TREE_CONSTANT (TREE_OPERAND (arg, 0)) ? arg : NULL; 3084 3085 case ARRAY_REF: 3086 case ARRAY_RANGE_REF: 3087 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (arg))) == INTEGER_CST 3088 && TREE_CODE (TREE_OPERAND (arg, 1)) == INTEGER_CST) 3089 return staticp (TREE_OPERAND (arg, 0)); 3090 else 3091 return NULL; 3092 3093 case COMPOUND_LITERAL_EXPR: 3094 return TREE_STATIC (COMPOUND_LITERAL_EXPR_DECL (arg)) ? arg : NULL; 3095 3096 default: 3097 return NULL; 3098 } 3099} 3100 3101 3102 3103 3104/* Return whether OP is a DECL whose address is function-invariant. */ 3105 3106bool 3107decl_address_invariant_p (const_tree op) 3108{ 3109 /* The conditions below are slightly less strict than the one in 3110 staticp. */ 3111 3112 switch (TREE_CODE (op)) 3113 { 3114 case PARM_DECL: 3115 case RESULT_DECL: 3116 case LABEL_DECL: 3117 case FUNCTION_DECL: 3118 return true; 3119 3120 case VAR_DECL: 3121 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)) 3122 || DECL_THREAD_LOCAL_P (op) 3123 || DECL_CONTEXT (op) == current_function_decl 3124 || decl_function_context (op) == current_function_decl) 3125 return true; 3126 break; 3127 3128 case CONST_DECL: 3129 if ((TREE_STATIC (op) || DECL_EXTERNAL (op)) 3130 || decl_function_context (op) == current_function_decl) 3131 return true; 3132 break; 3133 3134 default: 3135 break; 3136 } 3137 3138 return false; 3139} 3140 3141/* Return whether OP is a DECL whose address is interprocedural-invariant. */ 3142 3143bool 3144decl_address_ip_invariant_p (const_tree op) 3145{ 3146 /* The conditions below are slightly less strict than the one in 3147 staticp. */ 3148 3149 switch (TREE_CODE (op)) 3150 { 3151 case LABEL_DECL: 3152 case FUNCTION_DECL: 3153 case STRING_CST: 3154 return true; 3155 3156 case VAR_DECL: 3157 if (((TREE_STATIC (op) || DECL_EXTERNAL (op)) 3158 && !DECL_DLLIMPORT_P (op)) 3159 || DECL_THREAD_LOCAL_P (op)) 3160 return true; 3161 break; 3162 3163 case CONST_DECL: 3164 if ((TREE_STATIC (op) || DECL_EXTERNAL (op))) 3165 return true; 3166 break; 3167 3168 default: 3169 break; 3170 } 3171 3172 return false; 3173} 3174 3175 3176/* Return true if T is function-invariant (internal function, does 3177 not handle arithmetic; that's handled in skip_simple_arithmetic and 3178 tree_invariant_p). */ 3179 3180static bool tree_invariant_p (tree t); 3181 3182static bool 3183tree_invariant_p_1 (tree t) 3184{ 3185 tree op; 3186 3187 if (TREE_CONSTANT (t) 3188 || (TREE_READONLY (t) && !TREE_SIDE_EFFECTS (t))) 3189 return true; 3190 3191 switch (TREE_CODE (t)) 3192 { 3193 case SAVE_EXPR: 3194 return true; 3195 3196 case ADDR_EXPR: 3197 op = TREE_OPERAND (t, 0); 3198 while (handled_component_p (op)) 3199 { 3200 switch (TREE_CODE (op)) 3201 { 3202 case ARRAY_REF: 3203 case ARRAY_RANGE_REF: 3204 if (!tree_invariant_p (TREE_OPERAND (op, 1)) 3205 || TREE_OPERAND (op, 2) != NULL_TREE 3206 || TREE_OPERAND (op, 3) != NULL_TREE) 3207 return false; 3208 break; 3209 3210 case COMPONENT_REF: 3211 if (TREE_OPERAND (op, 2) != NULL_TREE) 3212 return false; 3213 break; 3214 3215 default:; 3216 } 3217 op = TREE_OPERAND (op, 0); 3218 } 3219 3220 return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op); 3221 3222 default: 3223 break; 3224 } 3225 3226 return false; 3227} 3228 3229/* Return true if T is function-invariant. */ 3230 3231static bool 3232tree_invariant_p (tree t) 3233{ 3234 tree inner = skip_simple_arithmetic (t); 3235 return tree_invariant_p_1 (inner); 3236} 3237 3238/* Wrap a SAVE_EXPR around EXPR, if appropriate. 3239 Do this to any expression which may be used in more than one place, 3240 but must be evaluated only once. 3241 3242 Normally, expand_expr would reevaluate the expression each time. 3243 Calling save_expr produces something that is evaluated and recorded 3244 the first time expand_expr is called on it. Subsequent calls to 3245 expand_expr just reuse the recorded value. 3246 3247 The call to expand_expr that generates code that actually computes 3248 the value is the first call *at compile time*. Subsequent calls 3249 *at compile time* generate code to use the saved value. 3250 This produces correct result provided that *at run time* control 3251 always flows through the insns made by the first expand_expr 3252 before reaching the other places where the save_expr was evaluated. 3253 You, the caller of save_expr, must make sure this is so. 3254 3255 Constants, and certain read-only nodes, are returned with no 3256 SAVE_EXPR because that is safe. Expressions containing placeholders 3257 are not touched; see tree.def for an explanation of what these 3258 are used for. */ 3259 3260tree 3261save_expr (tree expr) 3262{ 3263 tree t = fold (expr); 3264 tree inner; 3265 3266 /* If the tree evaluates to a constant, then we don't want to hide that 3267 fact (i.e. this allows further folding, and direct checks for constants). 3268 However, a read-only object that has side effects cannot be bypassed. 3269 Since it is no problem to reevaluate literals, we just return the 3270 literal node. */ 3271 inner = skip_simple_arithmetic (t); 3272 if (TREE_CODE (inner) == ERROR_MARK) 3273 return inner; 3274 3275 if (tree_invariant_p_1 (inner)) 3276 return t; 3277 3278 /* If INNER contains a PLACEHOLDER_EXPR, we must evaluate it each time, since 3279 it means that the size or offset of some field of an object depends on 3280 the value within another field. 3281 3282 Note that it must not be the case that T contains both a PLACEHOLDER_EXPR 3283 and some variable since it would then need to be both evaluated once and 3284 evaluated more than once. Front-ends must assure this case cannot 3285 happen by surrounding any such subexpressions in their own SAVE_EXPR 3286 and forcing evaluation at the proper time. */ 3287 if (contains_placeholder_p (inner)) 3288 return t; 3289 3290 t = build1 (SAVE_EXPR, TREE_TYPE (expr), t); 3291 SET_EXPR_LOCATION (t, EXPR_LOCATION (expr)); 3292 3293 /* This expression might be placed ahead of a jump to ensure that the 3294 value was computed on both sides of the jump. So make sure it isn't 3295 eliminated as dead. */ 3296 TREE_SIDE_EFFECTS (t) = 1; 3297 return t; 3298} 3299 3300/* Look inside EXPR into any simple arithmetic operations. Return the 3301 outermost non-arithmetic or non-invariant node. */ 3302 3303tree 3304skip_simple_arithmetic (tree expr) 3305{ 3306 /* We don't care about whether this can be used as an lvalue in this 3307 context. */ 3308 while (TREE_CODE (expr) == NON_LVALUE_EXPR) 3309 expr = TREE_OPERAND (expr, 0); 3310 3311 /* If we have simple operations applied to a SAVE_EXPR or to a SAVE_EXPR and 3312 a constant, it will be more efficient to not make another SAVE_EXPR since 3313 it will allow better simplification and GCSE will be able to merge the 3314 computations if they actually occur. */ 3315 while (true) 3316 { 3317 if (UNARY_CLASS_P (expr)) 3318 expr = TREE_OPERAND (expr, 0); 3319 else if (BINARY_CLASS_P (expr)) 3320 { 3321 if (tree_invariant_p (TREE_OPERAND (expr, 1))) 3322 expr = TREE_OPERAND (expr, 0); 3323 else if (tree_invariant_p (TREE_OPERAND (expr, 0))) 3324 expr = TREE_OPERAND (expr, 1); 3325 else 3326 break; 3327 } 3328 else 3329 break; 3330 } 3331 3332 return expr; 3333} 3334 3335/* Look inside EXPR into simple arithmetic operations involving constants. 3336 Return the outermost non-arithmetic or non-constant node. */ 3337 3338tree 3339skip_simple_constant_arithmetic (tree expr) 3340{ 3341 while (TREE_CODE (expr) == NON_LVALUE_EXPR) 3342 expr = TREE_OPERAND (expr, 0); 3343 3344 while (true) 3345 { 3346 if (UNARY_CLASS_P (expr)) 3347 expr = TREE_OPERAND (expr, 0); 3348 else if (BINARY_CLASS_P (expr)) 3349 { 3350 if (TREE_CONSTANT (TREE_OPERAND (expr, 1))) 3351 expr = TREE_OPERAND (expr, 0); 3352 else if (TREE_CONSTANT (TREE_OPERAND (expr, 0))) 3353 expr = TREE_OPERAND (expr, 1); 3354 else 3355 break; 3356 } 3357 else 3358 break; 3359 } 3360 3361 return expr; 3362} 3363 3364/* Return which tree structure is used by T. */ 3365 3366enum tree_node_structure_enum 3367tree_node_structure (const_tree t) 3368{ 3369 const enum tree_code code = TREE_CODE (t); 3370 return tree_node_structure_for_code (code); 3371} 3372 3373/* Set various status flags when building a CALL_EXPR object T. */ 3374 3375static void 3376process_call_operands (tree t) 3377{ 3378 bool side_effects = TREE_SIDE_EFFECTS (t); 3379 bool read_only = false; 3380 int i = call_expr_flags (t); 3381 3382 /* Calls have side-effects, except those to const or pure functions. */ 3383 if ((i & ECF_LOOPING_CONST_OR_PURE) || !(i & (ECF_CONST | ECF_PURE))) 3384 side_effects = true; 3385 /* Propagate TREE_READONLY of arguments for const functions. */ 3386 if (i & ECF_CONST) 3387 read_only = true; 3388 3389 if (!side_effects || read_only) 3390 for (i = 1; i < TREE_OPERAND_LENGTH (t); i++) 3391 { 3392 tree op = TREE_OPERAND (t, i); 3393 if (op && TREE_SIDE_EFFECTS (op)) 3394 side_effects = true; 3395 if (op && !TREE_READONLY (op) && !CONSTANT_CLASS_P (op)) 3396 read_only = false; 3397 } 3398 3399 TREE_SIDE_EFFECTS (t) = side_effects; 3400 TREE_READONLY (t) = read_only; 3401} 3402 3403/* Return true if EXP contains a PLACEHOLDER_EXPR, i.e. if it represents a 3404 size or offset that depends on a field within a record. */ 3405 3406bool 3407contains_placeholder_p (const_tree exp) 3408{ 3409 enum tree_code code; 3410 3411 if (!exp) 3412 return 0; 3413 3414 code = TREE_CODE (exp); 3415 if (code == PLACEHOLDER_EXPR) 3416 return 1; 3417 3418 switch (TREE_CODE_CLASS (code)) 3419 { 3420 case tcc_reference: 3421 /* Don't look at any PLACEHOLDER_EXPRs that might be in index or bit 3422 position computations since they will be converted into a 3423 WITH_RECORD_EXPR involving the reference, which will assume 3424 here will be valid. */ 3425 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0)); 3426 3427 case tcc_exceptional: 3428 if (code == TREE_LIST) 3429 return (CONTAINS_PLACEHOLDER_P (TREE_VALUE (exp)) 3430 || CONTAINS_PLACEHOLDER_P (TREE_CHAIN (exp))); 3431 break; 3432 3433 case tcc_unary: 3434 case tcc_binary: 3435 case tcc_comparison: 3436 case tcc_expression: 3437 switch (code) 3438 { 3439 case COMPOUND_EXPR: 3440 /* Ignoring the first operand isn't quite right, but works best. */ 3441 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)); 3442 3443 case COND_EXPR: 3444 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0)) 3445 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1)) 3446 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 2))); 3447 3448 case SAVE_EXPR: 3449 /* The save_expr function never wraps anything containing 3450 a PLACEHOLDER_EXPR. */ 3451 return 0; 3452 3453 default: 3454 break; 3455 } 3456 3457 switch (TREE_CODE_LENGTH (code)) 3458 { 3459 case 1: 3460 return CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0)); 3461 case 2: 3462 return (CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 0)) 3463 || CONTAINS_PLACEHOLDER_P (TREE_OPERAND (exp, 1))); 3464 default: 3465 return 0; 3466 } 3467 3468 case tcc_vl_exp: 3469 switch (code) 3470 { 3471 case CALL_EXPR: 3472 { 3473 const_tree arg; 3474 const_call_expr_arg_iterator iter; 3475 FOR_EACH_CONST_CALL_EXPR_ARG (arg, iter, exp) 3476 if (CONTAINS_PLACEHOLDER_P (arg)) 3477 return 1; 3478 return 0; 3479 } 3480 default: 3481 return 0; 3482 } 3483 3484 default: 3485 return 0; 3486 } 3487 return 0; 3488} 3489 3490/* Return true if any part of the structure of TYPE involves a PLACEHOLDER_EXPR 3491 directly. This includes size, bounds, qualifiers (for QUAL_UNION_TYPE) and 3492 field positions. */ 3493 3494static bool 3495type_contains_placeholder_1 (const_tree type) 3496{ 3497 /* If the size contains a placeholder or the parent type (component type in 3498 the case of arrays) type involves a placeholder, this type does. */ 3499 if (CONTAINS_PLACEHOLDER_P (TYPE_SIZE (type)) 3500 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE_UNIT (type)) 3501 || (!POINTER_TYPE_P (type) 3502 && TREE_TYPE (type) 3503 && type_contains_placeholder_p (TREE_TYPE (type)))) 3504 return true; 3505 3506 /* Now do type-specific checks. Note that the last part of the check above 3507 greatly limits what we have to do below. */ 3508 switch (TREE_CODE (type)) 3509 { 3510 case VOID_TYPE: 3511 case POINTER_BOUNDS_TYPE: 3512 case COMPLEX_TYPE: 3513 case ENUMERAL_TYPE: 3514 case BOOLEAN_TYPE: 3515 case POINTER_TYPE: 3516 case OFFSET_TYPE: 3517 case REFERENCE_TYPE: 3518 case METHOD_TYPE: 3519 case FUNCTION_TYPE: 3520 case VECTOR_TYPE: 3521 case NULLPTR_TYPE: 3522 return false; 3523 3524 case INTEGER_TYPE: 3525 case REAL_TYPE: 3526 case FIXED_POINT_TYPE: 3527 /* Here we just check the bounds. */ 3528 return (CONTAINS_PLACEHOLDER_P (TYPE_MIN_VALUE (type)) 3529 || CONTAINS_PLACEHOLDER_P (TYPE_MAX_VALUE (type))); 3530 3531 case ARRAY_TYPE: 3532 /* We have already checked the component type above, so just check the 3533 domain type. */ 3534 return type_contains_placeholder_p (TYPE_DOMAIN (type)); 3535 3536 case RECORD_TYPE: 3537 case UNION_TYPE: 3538 case QUAL_UNION_TYPE: 3539 { 3540 tree field; 3541 3542 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field)) 3543 if (TREE_CODE (field) == FIELD_DECL 3544 && (CONTAINS_PLACEHOLDER_P (DECL_FIELD_OFFSET (field)) 3545 || (TREE_CODE (type) == QUAL_UNION_TYPE 3546 && CONTAINS_PLACEHOLDER_P (DECL_QUALIFIER (field))) 3547 || type_contains_placeholder_p (TREE_TYPE (field)))) 3548 return true; 3549 3550 return false; 3551 } 3552 3553 default: 3554 gcc_unreachable (); 3555 } 3556} 3557 3558/* Wrapper around above function used to cache its result. */ 3559 3560bool 3561type_contains_placeholder_p (tree type) 3562{ 3563 bool result; 3564 3565 /* If the contains_placeholder_bits field has been initialized, 3566 then we know the answer. */ 3567 if (TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) > 0) 3568 return TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) - 1; 3569 3570 /* Indicate that we've seen this type node, and the answer is false. 3571 This is what we want to return if we run into recursion via fields. */ 3572 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = 1; 3573 3574 /* Compute the real value. */ 3575 result = type_contains_placeholder_1 (type); 3576 3577 /* Store the real value. */ 3578 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (type) = result + 1; 3579 3580 return result; 3581} 3582 3583/* Push tree EXP onto vector QUEUE if it is not already present. */ 3584 3585static void 3586push_without_duplicates (tree exp, vec<tree> *queue) 3587{ 3588 unsigned int i; 3589 tree iter; 3590 3591 FOR_EACH_VEC_ELT (*queue, i, iter) 3592 if (simple_cst_equal (iter, exp) == 1) 3593 break; 3594 3595 if (!iter) 3596 queue->safe_push (exp); 3597} 3598 3599/* Given a tree EXP, find all occurrences of references to fields 3600 in a PLACEHOLDER_EXPR and place them in vector REFS without 3601 duplicates. Also record VAR_DECLs and CONST_DECLs. Note that 3602 we assume here that EXP contains only arithmetic expressions 3603 or CALL_EXPRs with PLACEHOLDER_EXPRs occurring only in their 3604 argument list. */ 3605 3606void 3607find_placeholder_in_expr (tree exp, vec<tree> *refs) 3608{ 3609 enum tree_code code = TREE_CODE (exp); 3610 tree inner; 3611 int i; 3612 3613 /* We handle TREE_LIST and COMPONENT_REF separately. */ 3614 if (code == TREE_LIST) 3615 { 3616 FIND_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), refs); 3617 FIND_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), refs); 3618 } 3619 else if (code == COMPONENT_REF) 3620 { 3621 for (inner = TREE_OPERAND (exp, 0); 3622 REFERENCE_CLASS_P (inner); 3623 inner = TREE_OPERAND (inner, 0)) 3624 ; 3625 3626 if (TREE_CODE (inner) == PLACEHOLDER_EXPR) 3627 push_without_duplicates (exp, refs); 3628 else 3629 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), refs); 3630 } 3631 else 3632 switch (TREE_CODE_CLASS (code)) 3633 { 3634 case tcc_constant: 3635 break; 3636 3637 case tcc_declaration: 3638 /* Variables allocated to static storage can stay. */ 3639 if (!TREE_STATIC (exp)) 3640 push_without_duplicates (exp, refs); 3641 break; 3642 3643 case tcc_expression: 3644 /* This is the pattern built in ada/make_aligning_type. */ 3645 if (code == ADDR_EXPR 3646 && TREE_CODE (TREE_OPERAND (exp, 0)) == PLACEHOLDER_EXPR) 3647 { 3648 push_without_duplicates (exp, refs); 3649 break; 3650 } 3651 3652 /* Fall through... */ 3653 3654 case tcc_exceptional: 3655 case tcc_unary: 3656 case tcc_binary: 3657 case tcc_comparison: 3658 case tcc_reference: 3659 for (i = 0; i < TREE_CODE_LENGTH (code); i++) 3660 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs); 3661 break; 3662 3663 case tcc_vl_exp: 3664 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++) 3665 FIND_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, i), refs); 3666 break; 3667 3668 default: 3669 gcc_unreachable (); 3670 } 3671} 3672 3673/* Given a tree EXP, a FIELD_DECL F, and a replacement value R, 3674 return a tree with all occurrences of references to F in a 3675 PLACEHOLDER_EXPR replaced by R. Also handle VAR_DECLs and 3676 CONST_DECLs. Note that we assume here that EXP contains only 3677 arithmetic expressions or CALL_EXPRs with PLACEHOLDER_EXPRs 3678 occurring only in their argument list. */ 3679 3680tree 3681substitute_in_expr (tree exp, tree f, tree r) 3682{ 3683 enum tree_code code = TREE_CODE (exp); 3684 tree op0, op1, op2, op3; 3685 tree new_tree; 3686 3687 /* We handle TREE_LIST and COMPONENT_REF separately. */ 3688 if (code == TREE_LIST) 3689 { 3690 op0 = SUBSTITUTE_IN_EXPR (TREE_CHAIN (exp), f, r); 3691 op1 = SUBSTITUTE_IN_EXPR (TREE_VALUE (exp), f, r); 3692 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp)) 3693 return exp; 3694 3695 return tree_cons (TREE_PURPOSE (exp), op1, op0); 3696 } 3697 else if (code == COMPONENT_REF) 3698 { 3699 tree inner; 3700 3701 /* If this expression is getting a value from a PLACEHOLDER_EXPR 3702 and it is the right field, replace it with R. */ 3703 for (inner = TREE_OPERAND (exp, 0); 3704 REFERENCE_CLASS_P (inner); 3705 inner = TREE_OPERAND (inner, 0)) 3706 ; 3707 3708 /* The field. */ 3709 op1 = TREE_OPERAND (exp, 1); 3710 3711 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && op1 == f) 3712 return r; 3713 3714 /* If this expression hasn't been completed let, leave it alone. */ 3715 if (TREE_CODE (inner) == PLACEHOLDER_EXPR && !TREE_TYPE (inner)) 3716 return exp; 3717 3718 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); 3719 if (op0 == TREE_OPERAND (exp, 0)) 3720 return exp; 3721 3722 new_tree 3723 = fold_build3 (COMPONENT_REF, TREE_TYPE (exp), op0, op1, NULL_TREE); 3724 } 3725 else 3726 switch (TREE_CODE_CLASS (code)) 3727 { 3728 case tcc_constant: 3729 return exp; 3730 3731 case tcc_declaration: 3732 if (exp == f) 3733 return r; 3734 else 3735 return exp; 3736 3737 case tcc_expression: 3738 if (exp == f) 3739 return r; 3740 3741 /* Fall through... */ 3742 3743 case tcc_exceptional: 3744 case tcc_unary: 3745 case tcc_binary: 3746 case tcc_comparison: 3747 case tcc_reference: 3748 switch (TREE_CODE_LENGTH (code)) 3749 { 3750 case 0: 3751 return exp; 3752 3753 case 1: 3754 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); 3755 if (op0 == TREE_OPERAND (exp, 0)) 3756 return exp; 3757 3758 new_tree = fold_build1 (code, TREE_TYPE (exp), op0); 3759 break; 3760 3761 case 2: 3762 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); 3763 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r); 3764 3765 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)) 3766 return exp; 3767 3768 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1); 3769 break; 3770 3771 case 3: 3772 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); 3773 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r); 3774 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r); 3775 3776 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1) 3777 && op2 == TREE_OPERAND (exp, 2)) 3778 return exp; 3779 3780 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2); 3781 break; 3782 3783 case 4: 3784 op0 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 0), f, r); 3785 op1 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 1), f, r); 3786 op2 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 2), f, r); 3787 op3 = SUBSTITUTE_IN_EXPR (TREE_OPERAND (exp, 3), f, r); 3788 3789 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1) 3790 && op2 == TREE_OPERAND (exp, 2) 3791 && op3 == TREE_OPERAND (exp, 3)) 3792 return exp; 3793 3794 new_tree 3795 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3)); 3796 break; 3797 3798 default: 3799 gcc_unreachable (); 3800 } 3801 break; 3802 3803 case tcc_vl_exp: 3804 { 3805 int i; 3806 3807 new_tree = NULL_TREE; 3808 3809 /* If we are trying to replace F with a constant, inline back 3810 functions which do nothing else than computing a value from 3811 the arguments they are passed. This makes it possible to 3812 fold partially or entirely the replacement expression. */ 3813 if (CONSTANT_CLASS_P (r) && code == CALL_EXPR) 3814 { 3815 tree t = maybe_inline_call_in_expr (exp); 3816 if (t) 3817 return SUBSTITUTE_IN_EXPR (t, f, r); 3818 } 3819 3820 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++) 3821 { 3822 tree op = TREE_OPERAND (exp, i); 3823 tree new_op = SUBSTITUTE_IN_EXPR (op, f, r); 3824 if (new_op != op) 3825 { 3826 if (!new_tree) 3827 new_tree = copy_node (exp); 3828 TREE_OPERAND (new_tree, i) = new_op; 3829 } 3830 } 3831 3832 if (new_tree) 3833 { 3834 new_tree = fold (new_tree); 3835 if (TREE_CODE (new_tree) == CALL_EXPR) 3836 process_call_operands (new_tree); 3837 } 3838 else 3839 return exp; 3840 } 3841 break; 3842 3843 default: 3844 gcc_unreachable (); 3845 } 3846 3847 TREE_READONLY (new_tree) |= TREE_READONLY (exp); 3848 3849 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF) 3850 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp); 3851 3852 return new_tree; 3853} 3854 3855/* Similar, but look for a PLACEHOLDER_EXPR in EXP and find a replacement 3856 for it within OBJ, a tree that is an object or a chain of references. */ 3857 3858tree 3859substitute_placeholder_in_expr (tree exp, tree obj) 3860{ 3861 enum tree_code code = TREE_CODE (exp); 3862 tree op0, op1, op2, op3; 3863 tree new_tree; 3864 3865 /* If this is a PLACEHOLDER_EXPR, see if we find a corresponding type 3866 in the chain of OBJ. */ 3867 if (code == PLACEHOLDER_EXPR) 3868 { 3869 tree need_type = TYPE_MAIN_VARIANT (TREE_TYPE (exp)); 3870 tree elt; 3871 3872 for (elt = obj; elt != 0; 3873 elt = ((TREE_CODE (elt) == COMPOUND_EXPR 3874 || TREE_CODE (elt) == COND_EXPR) 3875 ? TREE_OPERAND (elt, 1) 3876 : (REFERENCE_CLASS_P (elt) 3877 || UNARY_CLASS_P (elt) 3878 || BINARY_CLASS_P (elt) 3879 || VL_EXP_CLASS_P (elt) 3880 || EXPRESSION_CLASS_P (elt)) 3881 ? TREE_OPERAND (elt, 0) : 0)) 3882 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type) 3883 return elt; 3884 3885 for (elt = obj; elt != 0; 3886 elt = ((TREE_CODE (elt) == COMPOUND_EXPR 3887 || TREE_CODE (elt) == COND_EXPR) 3888 ? TREE_OPERAND (elt, 1) 3889 : (REFERENCE_CLASS_P (elt) 3890 || UNARY_CLASS_P (elt) 3891 || BINARY_CLASS_P (elt) 3892 || VL_EXP_CLASS_P (elt) 3893 || EXPRESSION_CLASS_P (elt)) 3894 ? TREE_OPERAND (elt, 0) : 0)) 3895 if (POINTER_TYPE_P (TREE_TYPE (elt)) 3896 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt))) 3897 == need_type)) 3898 return fold_build1 (INDIRECT_REF, need_type, elt); 3899 3900 /* If we didn't find it, return the original PLACEHOLDER_EXPR. If it 3901 survives until RTL generation, there will be an error. */ 3902 return exp; 3903 } 3904 3905 /* TREE_LIST is special because we need to look at TREE_VALUE 3906 and TREE_CHAIN, not TREE_OPERANDS. */ 3907 else if (code == TREE_LIST) 3908 { 3909 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_CHAIN (exp), obj); 3910 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_VALUE (exp), obj); 3911 if (op0 == TREE_CHAIN (exp) && op1 == TREE_VALUE (exp)) 3912 return exp; 3913 3914 return tree_cons (TREE_PURPOSE (exp), op1, op0); 3915 } 3916 else 3917 switch (TREE_CODE_CLASS (code)) 3918 { 3919 case tcc_constant: 3920 case tcc_declaration: 3921 return exp; 3922 3923 case tcc_exceptional: 3924 case tcc_unary: 3925 case tcc_binary: 3926 case tcc_comparison: 3927 case tcc_expression: 3928 case tcc_reference: 3929 case tcc_statement: 3930 switch (TREE_CODE_LENGTH (code)) 3931 { 3932 case 0: 3933 return exp; 3934 3935 case 1: 3936 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj); 3937 if (op0 == TREE_OPERAND (exp, 0)) 3938 return exp; 3939 3940 new_tree = fold_build1 (code, TREE_TYPE (exp), op0); 3941 break; 3942 3943 case 2: 3944 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj); 3945 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj); 3946 3947 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1)) 3948 return exp; 3949 3950 new_tree = fold_build2 (code, TREE_TYPE (exp), op0, op1); 3951 break; 3952 3953 case 3: 3954 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj); 3955 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj); 3956 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj); 3957 3958 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1) 3959 && op2 == TREE_OPERAND (exp, 2)) 3960 return exp; 3961 3962 new_tree = fold_build3 (code, TREE_TYPE (exp), op0, op1, op2); 3963 break; 3964 3965 case 4: 3966 op0 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 0), obj); 3967 op1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 1), obj); 3968 op2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 2), obj); 3969 op3 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TREE_OPERAND (exp, 3), obj); 3970 3971 if (op0 == TREE_OPERAND (exp, 0) && op1 == TREE_OPERAND (exp, 1) 3972 && op2 == TREE_OPERAND (exp, 2) 3973 && op3 == TREE_OPERAND (exp, 3)) 3974 return exp; 3975 3976 new_tree 3977 = fold (build4 (code, TREE_TYPE (exp), op0, op1, op2, op3)); 3978 break; 3979 3980 default: 3981 gcc_unreachable (); 3982 } 3983 break; 3984 3985 case tcc_vl_exp: 3986 { 3987 int i; 3988 3989 new_tree = NULL_TREE; 3990 3991 for (i = 1; i < TREE_OPERAND_LENGTH (exp); i++) 3992 { 3993 tree op = TREE_OPERAND (exp, i); 3994 tree new_op = SUBSTITUTE_PLACEHOLDER_IN_EXPR (op, obj); 3995 if (new_op != op) 3996 { 3997 if (!new_tree) 3998 new_tree = copy_node (exp); 3999 TREE_OPERAND (new_tree, i) = new_op; 4000 } 4001 } 4002 4003 if (new_tree) 4004 { 4005 new_tree = fold (new_tree); 4006 if (TREE_CODE (new_tree) == CALL_EXPR) 4007 process_call_operands (new_tree); 4008 } 4009 else 4010 return exp; 4011 } 4012 break; 4013 4014 default: 4015 gcc_unreachable (); 4016 } 4017 4018 TREE_READONLY (new_tree) |= TREE_READONLY (exp); 4019 4020 if (code == INDIRECT_REF || code == ARRAY_REF || code == ARRAY_RANGE_REF) 4021 TREE_THIS_NOTRAP (new_tree) |= TREE_THIS_NOTRAP (exp); 4022 4023 return new_tree; 4024} 4025 4026 4027/* Subroutine of stabilize_reference; this is called for subtrees of 4028 references. Any expression with side-effects must be put in a SAVE_EXPR 4029 to ensure that it is only evaluated once. 4030 4031 We don't put SAVE_EXPR nodes around everything, because assigning very 4032 simple expressions to temporaries causes us to miss good opportunities 4033 for optimizations. Among other things, the opportunity to fold in the 4034 addition of a constant into an addressing mode often gets lost, e.g. 4035 "y[i+1] += x;". In general, we take the approach that we should not make 4036 an assignment unless we are forced into it - i.e., that any non-side effect 4037 operator should be allowed, and that cse should take care of coalescing 4038 multiple utterances of the same expression should that prove fruitful. */ 4039 4040static tree 4041stabilize_reference_1 (tree e) 4042{ 4043 tree result; 4044 enum tree_code code = TREE_CODE (e); 4045 4046 /* We cannot ignore const expressions because it might be a reference 4047 to a const array but whose index contains side-effects. But we can 4048 ignore things that are actual constant or that already have been 4049 handled by this function. */ 4050 4051 if (tree_invariant_p (e)) 4052 return e; 4053 4054 switch (TREE_CODE_CLASS (code)) 4055 { 4056 case tcc_exceptional: 4057 case tcc_type: 4058 case tcc_declaration: 4059 case tcc_comparison: 4060 case tcc_statement: 4061 case tcc_expression: 4062 case tcc_reference: 4063 case tcc_vl_exp: 4064 /* If the expression has side-effects, then encase it in a SAVE_EXPR 4065 so that it will only be evaluated once. */ 4066 /* The reference (r) and comparison (<) classes could be handled as 4067 below, but it is generally faster to only evaluate them once. */ 4068 if (TREE_SIDE_EFFECTS (e)) 4069 return save_expr (e); 4070 return e; 4071 4072 case tcc_constant: 4073 /* Constants need no processing. In fact, we should never reach 4074 here. */ 4075 return e; 4076 4077 case tcc_binary: 4078 /* Division is slow and tends to be compiled with jumps, 4079 especially the division by powers of 2 that is often 4080 found inside of an array reference. So do it just once. */ 4081 if (code == TRUNC_DIV_EXPR || code == TRUNC_MOD_EXPR 4082 || code == FLOOR_DIV_EXPR || code == FLOOR_MOD_EXPR 4083 || code == CEIL_DIV_EXPR || code == CEIL_MOD_EXPR 4084 || code == ROUND_DIV_EXPR || code == ROUND_MOD_EXPR) 4085 return save_expr (e); 4086 /* Recursively stabilize each operand. */ 4087 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0)), 4088 stabilize_reference_1 (TREE_OPERAND (e, 1))); 4089 break; 4090 4091 case tcc_unary: 4092 /* Recursively stabilize each operand. */ 4093 result = build_nt (code, stabilize_reference_1 (TREE_OPERAND (e, 0))); 4094 break; 4095 4096 default: 4097 gcc_unreachable (); 4098 } 4099 4100 TREE_TYPE (result) = TREE_TYPE (e); 4101 TREE_READONLY (result) = TREE_READONLY (e); 4102 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (e); 4103 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e); 4104 4105 return result; 4106} 4107 4108/* Stabilize a reference so that we can use it any number of times 4109 without causing its operands to be evaluated more than once. 4110 Returns the stabilized reference. This works by means of save_expr, 4111 so see the caveats in the comments about save_expr. 4112 4113 Also allows conversion expressions whose operands are references. 4114 Any other kind of expression is returned unchanged. */ 4115 4116tree 4117stabilize_reference (tree ref) 4118{ 4119 tree result; 4120 enum tree_code code = TREE_CODE (ref); 4121 4122 switch (code) 4123 { 4124 case VAR_DECL: 4125 case PARM_DECL: 4126 case RESULT_DECL: 4127 /* No action is needed in this case. */ 4128 return ref; 4129 4130 CASE_CONVERT: 4131 case FLOAT_EXPR: 4132 case FIX_TRUNC_EXPR: 4133 result = build_nt (code, stabilize_reference (TREE_OPERAND (ref, 0))); 4134 break; 4135 4136 case INDIRECT_REF: 4137 result = build_nt (INDIRECT_REF, 4138 stabilize_reference_1 (TREE_OPERAND (ref, 0))); 4139 break; 4140 4141 case COMPONENT_REF: 4142 result = build_nt (COMPONENT_REF, 4143 stabilize_reference (TREE_OPERAND (ref, 0)), 4144 TREE_OPERAND (ref, 1), NULL_TREE); 4145 break; 4146 4147 case BIT_FIELD_REF: 4148 result = build_nt (BIT_FIELD_REF, 4149 stabilize_reference (TREE_OPERAND (ref, 0)), 4150 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2)); 4151 break; 4152 4153 case ARRAY_REF: 4154 result = build_nt (ARRAY_REF, 4155 stabilize_reference (TREE_OPERAND (ref, 0)), 4156 stabilize_reference_1 (TREE_OPERAND (ref, 1)), 4157 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3)); 4158 break; 4159 4160 case ARRAY_RANGE_REF: 4161 result = build_nt (ARRAY_RANGE_REF, 4162 stabilize_reference (TREE_OPERAND (ref, 0)), 4163 stabilize_reference_1 (TREE_OPERAND (ref, 1)), 4164 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3)); 4165 break; 4166 4167 case COMPOUND_EXPR: 4168 /* We cannot wrap the first expression in a SAVE_EXPR, as then 4169 it wouldn't be ignored. This matters when dealing with 4170 volatiles. */ 4171 return stabilize_reference_1 (ref); 4172 4173 /* If arg isn't a kind of lvalue we recognize, make no change. 4174 Caller should recognize the error for an invalid lvalue. */ 4175 default: 4176 return ref; 4177 4178 case ERROR_MARK: 4179 return error_mark_node; 4180 } 4181 4182 TREE_TYPE (result) = TREE_TYPE (ref); 4183 TREE_READONLY (result) = TREE_READONLY (ref); 4184 TREE_SIDE_EFFECTS (result) = TREE_SIDE_EFFECTS (ref); 4185 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref); 4186 4187 return result; 4188} 4189 4190/* Low-level constructors for expressions. */ 4191 4192/* A helper function for build1 and constant folders. Set TREE_CONSTANT, 4193 and TREE_SIDE_EFFECTS for an ADDR_EXPR. */ 4194 4195void 4196recompute_tree_invariant_for_addr_expr (tree t) 4197{ 4198 tree node; 4199 bool tc = true, se = false; 4200 4201 /* We started out assuming this address is both invariant and constant, but 4202 does not have side effects. Now go down any handled components and see if 4203 any of them involve offsets that are either non-constant or non-invariant. 4204 Also check for side-effects. 4205 4206 ??? Note that this code makes no attempt to deal with the case where 4207 taking the address of something causes a copy due to misalignment. */ 4208 4209#define UPDATE_FLAGS(NODE) \ 4210do { tree _node = (NODE); \ 4211 if (_node && !TREE_CONSTANT (_node)) tc = false; \ 4212 if (_node && TREE_SIDE_EFFECTS (_node)) se = true; } while (0) 4213 4214 for (node = TREE_OPERAND (t, 0); handled_component_p (node); 4215 node = TREE_OPERAND (node, 0)) 4216 { 4217 /* If the first operand doesn't have an ARRAY_TYPE, this is a bogus 4218 array reference (probably made temporarily by the G++ front end), 4219 so ignore all the operands. */ 4220 if ((TREE_CODE (node) == ARRAY_REF 4221 || TREE_CODE (node) == ARRAY_RANGE_REF) 4222 && TREE_CODE (TREE_TYPE (TREE_OPERAND (node, 0))) == ARRAY_TYPE) 4223 { 4224 UPDATE_FLAGS (TREE_OPERAND (node, 1)); 4225 if (TREE_OPERAND (node, 2)) 4226 UPDATE_FLAGS (TREE_OPERAND (node, 2)); 4227 if (TREE_OPERAND (node, 3)) 4228 UPDATE_FLAGS (TREE_OPERAND (node, 3)); 4229 } 4230 /* Likewise, just because this is a COMPONENT_REF doesn't mean we have a 4231 FIELD_DECL, apparently. The G++ front end can put something else 4232 there, at least temporarily. */ 4233 else if (TREE_CODE (node) == COMPONENT_REF 4234 && TREE_CODE (TREE_OPERAND (node, 1)) == FIELD_DECL) 4235 { 4236 if (TREE_OPERAND (node, 2)) 4237 UPDATE_FLAGS (TREE_OPERAND (node, 2)); 4238 } 4239 } 4240 4241 node = lang_hooks.expr_to_decl (node, &tc, &se); 4242 4243 /* Now see what's inside. If it's an INDIRECT_REF, copy our properties from 4244 the address, since &(*a)->b is a form of addition. If it's a constant, the 4245 address is constant too. If it's a decl, its address is constant if the 4246 decl is static. Everything else is not constant and, furthermore, 4247 taking the address of a volatile variable is not volatile. */ 4248 if (TREE_CODE (node) == INDIRECT_REF 4249 || TREE_CODE (node) == MEM_REF) 4250 UPDATE_FLAGS (TREE_OPERAND (node, 0)); 4251 else if (CONSTANT_CLASS_P (node)) 4252 ; 4253 else if (DECL_P (node)) 4254 tc &= (staticp (node) != NULL_TREE); 4255 else 4256 { 4257 tc = false; 4258 se |= TREE_SIDE_EFFECTS (node); 4259 } 4260 4261 4262 TREE_CONSTANT (t) = tc; 4263 TREE_SIDE_EFFECTS (t) = se; 4264#undef UPDATE_FLAGS 4265} 4266 4267/* Build an expression of code CODE, data type TYPE, and operands as 4268 specified. Expressions and reference nodes can be created this way. 4269 Constants, decls, types and misc nodes cannot be. 4270 4271 We define 5 non-variadic functions, from 0 to 4 arguments. This is 4272 enough for all extant tree codes. */ 4273 4274tree 4275build0_stat (enum tree_code code, tree tt MEM_STAT_DECL) 4276{ 4277 tree t; 4278 4279 gcc_assert (TREE_CODE_LENGTH (code) == 0); 4280 4281 t = make_node_stat (code PASS_MEM_STAT); 4282 TREE_TYPE (t) = tt; 4283 4284 return t; 4285} 4286 4287tree 4288build1_stat (enum tree_code code, tree type, tree node MEM_STAT_DECL) 4289{ 4290 int length = sizeof (struct tree_exp); 4291 tree t; 4292 4293 record_node_allocation_statistics (code, length); 4294 4295 gcc_assert (TREE_CODE_LENGTH (code) == 1); 4296 4297 t = ggc_alloc_tree_node_stat (length PASS_MEM_STAT); 4298 4299 memset (t, 0, sizeof (struct tree_common)); 4300 4301 TREE_SET_CODE (t, code); 4302 4303 TREE_TYPE (t) = type; 4304 SET_EXPR_LOCATION (t, UNKNOWN_LOCATION); 4305 TREE_OPERAND (t, 0) = node; 4306 if (node && !TYPE_P (node)) 4307 { 4308 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (node); 4309 TREE_READONLY (t) = TREE_READONLY (node); 4310 } 4311 4312 if (TREE_CODE_CLASS (code) == tcc_statement) 4313 TREE_SIDE_EFFECTS (t) = 1; 4314 else switch (code) 4315 { 4316 case VA_ARG_EXPR: 4317 /* All of these have side-effects, no matter what their 4318 operands are. */ 4319 TREE_SIDE_EFFECTS (t) = 1; 4320 TREE_READONLY (t) = 0; 4321 break; 4322 4323 case INDIRECT_REF: 4324 /* Whether a dereference is readonly has nothing to do with whether 4325 its operand is readonly. */ 4326 TREE_READONLY (t) = 0; 4327 break; 4328 4329 case ADDR_EXPR: 4330 if (node) 4331 recompute_tree_invariant_for_addr_expr (t); 4332 break; 4333 4334 default: 4335 if ((TREE_CODE_CLASS (code) == tcc_unary || code == VIEW_CONVERT_EXPR) 4336 && node && !TYPE_P (node) 4337 && TREE_CONSTANT (node)) 4338 TREE_CONSTANT (t) = 1; 4339 if (TREE_CODE_CLASS (code) == tcc_reference 4340 && node && TREE_THIS_VOLATILE (node)) 4341 TREE_THIS_VOLATILE (t) = 1; 4342 break; 4343 } 4344 4345 return t; 4346} 4347 4348#define PROCESS_ARG(N) \ 4349 do { \ 4350 TREE_OPERAND (t, N) = arg##N; \ 4351 if (arg##N &&!TYPE_P (arg##N)) \ 4352 { \ 4353 if (TREE_SIDE_EFFECTS (arg##N)) \ 4354 side_effects = 1; \ 4355 if (!TREE_READONLY (arg##N) \ 4356 && !CONSTANT_CLASS_P (arg##N)) \ 4357 (void) (read_only = 0); \ 4358 if (!TREE_CONSTANT (arg##N)) \ 4359 (void) (constant = 0); \ 4360 } \ 4361 } while (0) 4362 4363tree 4364build2_stat (enum tree_code code, tree tt, tree arg0, tree arg1 MEM_STAT_DECL) 4365{ 4366 bool constant, read_only, side_effects; 4367 tree t; 4368 4369 gcc_assert (TREE_CODE_LENGTH (code) == 2); 4370 4371 if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR) 4372 && arg0 && arg1 && tt && POINTER_TYPE_P (tt) 4373 /* When sizetype precision doesn't match that of pointers 4374 we need to be able to build explicit extensions or truncations 4375 of the offset argument. */ 4376 && TYPE_PRECISION (sizetype) == TYPE_PRECISION (tt)) 4377 gcc_assert (TREE_CODE (arg0) == INTEGER_CST 4378 && TREE_CODE (arg1) == INTEGER_CST); 4379 4380 if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt) 4381 gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0)) 4382 && ptrofftype_p (TREE_TYPE (arg1))); 4383 4384 t = make_node_stat (code PASS_MEM_STAT); 4385 TREE_TYPE (t) = tt; 4386 4387 /* Below, we automatically set TREE_SIDE_EFFECTS and TREE_READONLY for the 4388 result based on those same flags for the arguments. But if the 4389 arguments aren't really even `tree' expressions, we shouldn't be trying 4390 to do this. */ 4391 4392 /* Expressions without side effects may be constant if their 4393 arguments are as well. */ 4394 constant = (TREE_CODE_CLASS (code) == tcc_comparison 4395 || TREE_CODE_CLASS (code) == tcc_binary); 4396 read_only = 1; 4397 side_effects = TREE_SIDE_EFFECTS (t); 4398 4399 PROCESS_ARG (0); 4400 PROCESS_ARG (1); 4401 4402 TREE_SIDE_EFFECTS (t) = side_effects; 4403 if (code == MEM_REF) 4404 { 4405 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR) 4406 { 4407 tree o = TREE_OPERAND (arg0, 0); 4408 TREE_READONLY (t) = TREE_READONLY (o); 4409 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o); 4410 } 4411 } 4412 else 4413 { 4414 TREE_READONLY (t) = read_only; 4415 TREE_CONSTANT (t) = constant; 4416 TREE_THIS_VOLATILE (t) 4417 = (TREE_CODE_CLASS (code) == tcc_reference 4418 && arg0 && TREE_THIS_VOLATILE (arg0)); 4419 } 4420 4421 return t; 4422} 4423 4424 4425tree 4426build3_stat (enum tree_code code, tree tt, tree arg0, tree arg1, 4427 tree arg2 MEM_STAT_DECL) 4428{ 4429 bool constant, read_only, side_effects; 4430 tree t; 4431 4432 gcc_assert (TREE_CODE_LENGTH (code) == 3); 4433 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp); 4434 4435 t = make_node_stat (code PASS_MEM_STAT); 4436 TREE_TYPE (t) = tt; 4437 4438 read_only = 1; 4439 4440 /* As a special exception, if COND_EXPR has NULL branches, we 4441 assume that it is a gimple statement and always consider 4442 it to have side effects. */ 4443 if (code == COND_EXPR 4444 && tt == void_type_node 4445 && arg1 == NULL_TREE 4446 && arg2 == NULL_TREE) 4447 side_effects = true; 4448 else 4449 side_effects = TREE_SIDE_EFFECTS (t); 4450 4451 PROCESS_ARG (0); 4452 PROCESS_ARG (1); 4453 PROCESS_ARG (2); 4454 4455 if (code == COND_EXPR) 4456 TREE_READONLY (t) = read_only; 4457 4458 TREE_SIDE_EFFECTS (t) = side_effects; 4459 TREE_THIS_VOLATILE (t) 4460 = (TREE_CODE_CLASS (code) == tcc_reference 4461 && arg0 && TREE_THIS_VOLATILE (arg0)); 4462 4463 return t; 4464} 4465 4466tree 4467build4_stat (enum tree_code code, tree tt, tree arg0, tree arg1, 4468 tree arg2, tree arg3 MEM_STAT_DECL) 4469{ 4470 bool constant, read_only, side_effects; 4471 tree t; 4472 4473 gcc_assert (TREE_CODE_LENGTH (code) == 4); 4474 4475 t = make_node_stat (code PASS_MEM_STAT); 4476 TREE_TYPE (t) = tt; 4477 4478 side_effects = TREE_SIDE_EFFECTS (t); 4479 4480 PROCESS_ARG (0); 4481 PROCESS_ARG (1); 4482 PROCESS_ARG (2); 4483 PROCESS_ARG (3); 4484 4485 TREE_SIDE_EFFECTS (t) = side_effects; 4486 TREE_THIS_VOLATILE (t) 4487 = (TREE_CODE_CLASS (code) == tcc_reference 4488 && arg0 && TREE_THIS_VOLATILE (arg0)); 4489 4490 return t; 4491} 4492 4493tree 4494build5_stat (enum tree_code code, tree tt, tree arg0, tree arg1, 4495 tree arg2, tree arg3, tree arg4 MEM_STAT_DECL) 4496{ 4497 bool constant, read_only, side_effects; 4498 tree t; 4499 4500 gcc_assert (TREE_CODE_LENGTH (code) == 5); 4501 4502 t = make_node_stat (code PASS_MEM_STAT); 4503 TREE_TYPE (t) = tt; 4504 4505 side_effects = TREE_SIDE_EFFECTS (t); 4506 4507 PROCESS_ARG (0); 4508 PROCESS_ARG (1); 4509 PROCESS_ARG (2); 4510 PROCESS_ARG (3); 4511 PROCESS_ARG (4); 4512 4513 TREE_SIDE_EFFECTS (t) = side_effects; 4514 if (code == TARGET_MEM_REF) 4515 { 4516 if (arg0 && TREE_CODE (arg0) == ADDR_EXPR) 4517 { 4518 tree o = TREE_OPERAND (arg0, 0); 4519 TREE_READONLY (t) = TREE_READONLY (o); 4520 TREE_THIS_VOLATILE (t) = TREE_THIS_VOLATILE (o); 4521 } 4522 } 4523 else 4524 TREE_THIS_VOLATILE (t) 4525 = (TREE_CODE_CLASS (code) == tcc_reference 4526 && arg0 && TREE_THIS_VOLATILE (arg0)); 4527 4528 return t; 4529} 4530 4531/* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF 4532 on the pointer PTR. */ 4533 4534tree 4535build_simple_mem_ref_loc (location_t loc, tree ptr) 4536{ 4537 HOST_WIDE_INT offset = 0; 4538 tree ptype = TREE_TYPE (ptr); 4539 tree tem; 4540 /* For convenience allow addresses that collapse to a simple base 4541 and offset. */ 4542 if (TREE_CODE (ptr) == ADDR_EXPR 4543 && (handled_component_p (TREE_OPERAND (ptr, 0)) 4544 || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF)) 4545 { 4546 ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset); 4547 gcc_assert (ptr); 4548 ptr = build_fold_addr_expr (ptr); 4549 gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr)); 4550 } 4551 tem = build2 (MEM_REF, TREE_TYPE (ptype), 4552 ptr, build_int_cst (ptype, offset)); 4553 SET_EXPR_LOCATION (tem, loc); 4554 return tem; 4555} 4556 4557/* Return the constant offset of a MEM_REF or TARGET_MEM_REF tree T. */ 4558 4559offset_int 4560mem_ref_offset (const_tree t) 4561{ 4562 return offset_int::from (TREE_OPERAND (t, 1), SIGNED); 4563} 4564 4565/* Return an invariant ADDR_EXPR of type TYPE taking the address of BASE 4566 offsetted by OFFSET units. */ 4567 4568tree 4569build_invariant_address (tree type, tree base, HOST_WIDE_INT offset) 4570{ 4571 tree ref = fold_build2 (MEM_REF, TREE_TYPE (type), 4572 build_fold_addr_expr (base), 4573 build_int_cst (ptr_type_node, offset)); 4574 tree addr = build1 (ADDR_EXPR, type, ref); 4575 recompute_tree_invariant_for_addr_expr (addr); 4576 return addr; 4577} 4578 4579/* Similar except don't specify the TREE_TYPE 4580 and leave the TREE_SIDE_EFFECTS as 0. 4581 It is permissible for arguments to be null, 4582 or even garbage if their values do not matter. */ 4583 4584tree 4585build_nt (enum tree_code code, ...) 4586{ 4587 tree t; 4588 int length; 4589 int i; 4590 va_list p; 4591 4592 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp); 4593 4594 va_start (p, code); 4595 4596 t = make_node (code); 4597 length = TREE_CODE_LENGTH (code); 4598 4599 for (i = 0; i < length; i++) 4600 TREE_OPERAND (t, i) = va_arg (p, tree); 4601 4602 va_end (p); 4603 return t; 4604} 4605 4606/* Similar to build_nt, but for creating a CALL_EXPR object with a 4607 tree vec. */ 4608 4609tree 4610build_nt_call_vec (tree fn, vec<tree, va_gc> *args) 4611{ 4612 tree ret, t; 4613 unsigned int ix; 4614 4615 ret = build_vl_exp (CALL_EXPR, vec_safe_length (args) + 3); 4616 CALL_EXPR_FN (ret) = fn; 4617 CALL_EXPR_STATIC_CHAIN (ret) = NULL_TREE; 4618 FOR_EACH_VEC_SAFE_ELT (args, ix, t) 4619 CALL_EXPR_ARG (ret, ix) = t; 4620 return ret; 4621} 4622 4623/* Create a DECL_... node of code CODE, name NAME and data type TYPE. 4624 We do NOT enter this node in any sort of symbol table. 4625 4626 LOC is the location of the decl. 4627 4628 layout_decl is used to set up the decl's storage layout. 4629 Other slots are initialized to 0 or null pointers. */ 4630 4631tree 4632build_decl_stat (location_t loc, enum tree_code code, tree name, 4633 tree type MEM_STAT_DECL) 4634{ 4635 tree t; 4636 4637 t = make_node_stat (code PASS_MEM_STAT); 4638 DECL_SOURCE_LOCATION (t) = loc; 4639 4640/* if (type == error_mark_node) 4641 type = integer_type_node; */ 4642/* That is not done, deliberately, so that having error_mark_node 4643 as the type can suppress useless errors in the use of this variable. */ 4644 4645 DECL_NAME (t) = name; 4646 TREE_TYPE (t) = type; 4647 4648 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL) 4649 layout_decl (t, 0); 4650 4651 return t; 4652} 4653 4654/* Builds and returns function declaration with NAME and TYPE. */ 4655 4656tree 4657build_fn_decl (const char *name, tree type) 4658{ 4659 tree id = get_identifier (name); 4660 tree decl = build_decl (input_location, FUNCTION_DECL, id, type); 4661 4662 DECL_EXTERNAL (decl) = 1; 4663 TREE_PUBLIC (decl) = 1; 4664 DECL_ARTIFICIAL (decl) = 1; 4665 TREE_NOTHROW (decl) = 1; 4666 4667 return decl; 4668} 4669 4670vec<tree, va_gc> *all_translation_units; 4671 4672/* Builds a new translation-unit decl with name NAME, queues it in the 4673 global list of translation-unit decls and returns it. */ 4674 4675tree 4676build_translation_unit_decl (tree name) 4677{ 4678 tree tu = build_decl (UNKNOWN_LOCATION, TRANSLATION_UNIT_DECL, 4679 name, NULL_TREE); 4680 TRANSLATION_UNIT_LANGUAGE (tu) = lang_hooks.name; 4681 vec_safe_push (all_translation_units, tu); 4682 return tu; 4683} 4684 4685 4686/* BLOCK nodes are used to represent the structure of binding contours 4687 and declarations, once those contours have been exited and their contents 4688 compiled. This information is used for outputting debugging info. */ 4689 4690tree 4691build_block (tree vars, tree subblocks, tree supercontext, tree chain) 4692{ 4693 tree block = make_node (BLOCK); 4694 4695 BLOCK_VARS (block) = vars; 4696 BLOCK_SUBBLOCKS (block) = subblocks; 4697 BLOCK_SUPERCONTEXT (block) = supercontext; 4698 BLOCK_CHAIN (block) = chain; 4699 return block; 4700} 4701 4702 4703/* Like SET_EXPR_LOCATION, but make sure the tree can have a location. 4704 4705 LOC is the location to use in tree T. */ 4706 4707void 4708protected_set_expr_location (tree t, location_t loc) 4709{ 4710 if (CAN_HAVE_LOCATION_P (t)) 4711 SET_EXPR_LOCATION (t, loc); 4712} 4713 4714/* Return a declaration like DDECL except that its DECL_ATTRIBUTES 4715 is ATTRIBUTE. */ 4716 4717tree 4718build_decl_attribute_variant (tree ddecl, tree attribute) 4719{ 4720 DECL_ATTRIBUTES (ddecl) = attribute; 4721 return ddecl; 4722} 4723 4724/* Return a type like TTYPE except that its TYPE_ATTRIBUTE 4725 is ATTRIBUTE and its qualifiers are QUALS. 4726 4727 Record such modified types already made so we don't make duplicates. */ 4728 4729tree 4730build_type_attribute_qual_variant (tree ttype, tree attribute, int quals) 4731{ 4732 if (! attribute_list_equal (TYPE_ATTRIBUTES (ttype), attribute)) 4733 { 4734 inchash::hash hstate; 4735 tree ntype; 4736 int i; 4737 tree t; 4738 enum tree_code code = TREE_CODE (ttype); 4739 4740 /* Building a distinct copy of a tagged type is inappropriate; it 4741 causes breakage in code that expects there to be a one-to-one 4742 relationship between a struct and its fields. 4743 build_duplicate_type is another solution (as used in 4744 handle_transparent_union_attribute), but that doesn't play well 4745 with the stronger C++ type identity model. */ 4746 if (TREE_CODE (ttype) == RECORD_TYPE 4747 || TREE_CODE (ttype) == UNION_TYPE 4748 || TREE_CODE (ttype) == QUAL_UNION_TYPE 4749 || TREE_CODE (ttype) == ENUMERAL_TYPE) 4750 { 4751 warning (OPT_Wattributes, 4752 "ignoring attributes applied to %qT after definition", 4753 TYPE_MAIN_VARIANT (ttype)); 4754 return build_qualified_type (ttype, quals); 4755 } 4756 4757 ttype = build_qualified_type (ttype, TYPE_UNQUALIFIED); 4758 ntype = build_distinct_type_copy (ttype); 4759 4760 TYPE_ATTRIBUTES (ntype) = attribute; 4761 4762 hstate.add_int (code); 4763 if (TREE_TYPE (ntype)) 4764 hstate.add_object (TYPE_HASH (TREE_TYPE (ntype))); 4765 attribute_hash_list (attribute, hstate); 4766 4767 switch (TREE_CODE (ntype)) 4768 { 4769 case FUNCTION_TYPE: 4770 type_hash_list (TYPE_ARG_TYPES (ntype), hstate); 4771 break; 4772 case ARRAY_TYPE: 4773 if (TYPE_DOMAIN (ntype)) 4774 hstate.add_object (TYPE_HASH (TYPE_DOMAIN (ntype))); 4775 break; 4776 case INTEGER_TYPE: 4777 t = TYPE_MAX_VALUE (ntype); 4778 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++) 4779 hstate.add_object (TREE_INT_CST_ELT (t, i)); 4780 break; 4781 case REAL_TYPE: 4782 case FIXED_POINT_TYPE: 4783 { 4784 unsigned int precision = TYPE_PRECISION (ntype); 4785 hstate.add_object (precision); 4786 } 4787 break; 4788 default: 4789 break; 4790 } 4791 4792 ntype = type_hash_canon (hstate.end(), ntype); 4793 4794 /* If the target-dependent attributes make NTYPE different from 4795 its canonical type, we will need to use structural equality 4796 checks for this type. */ 4797 if (TYPE_STRUCTURAL_EQUALITY_P (ttype) 4798 || !comp_type_attributes (ntype, ttype)) 4799 SET_TYPE_STRUCTURAL_EQUALITY (ntype); 4800 else if (TYPE_CANONICAL (ntype) == ntype) 4801 TYPE_CANONICAL (ntype) = TYPE_CANONICAL (ttype); 4802 4803 ttype = build_qualified_type (ntype, quals); 4804 } 4805 else if (TYPE_QUALS (ttype) != quals) 4806 ttype = build_qualified_type (ttype, quals); 4807 4808 return ttype; 4809} 4810 4811/* Check if "omp declare simd" attribute arguments, CLAUSES1 and CLAUSES2, are 4812 the same. */ 4813 4814static bool 4815omp_declare_simd_clauses_equal (tree clauses1, tree clauses2) 4816{ 4817 tree cl1, cl2; 4818 for (cl1 = clauses1, cl2 = clauses2; 4819 cl1 && cl2; 4820 cl1 = OMP_CLAUSE_CHAIN (cl1), cl2 = OMP_CLAUSE_CHAIN (cl2)) 4821 { 4822 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_CODE (cl2)) 4823 return false; 4824 if (OMP_CLAUSE_CODE (cl1) != OMP_CLAUSE_SIMDLEN) 4825 { 4826 if (simple_cst_equal (OMP_CLAUSE_DECL (cl1), 4827 OMP_CLAUSE_DECL (cl2)) != 1) 4828 return false; 4829 } 4830 switch (OMP_CLAUSE_CODE (cl1)) 4831 { 4832 case OMP_CLAUSE_ALIGNED: 4833 if (simple_cst_equal (OMP_CLAUSE_ALIGNED_ALIGNMENT (cl1), 4834 OMP_CLAUSE_ALIGNED_ALIGNMENT (cl2)) != 1) 4835 return false; 4836 break; 4837 case OMP_CLAUSE_LINEAR: 4838 if (simple_cst_equal (OMP_CLAUSE_LINEAR_STEP (cl1), 4839 OMP_CLAUSE_LINEAR_STEP (cl2)) != 1) 4840 return false; 4841 break; 4842 case OMP_CLAUSE_SIMDLEN: 4843 if (simple_cst_equal (OMP_CLAUSE_SIMDLEN_EXPR (cl1), 4844 OMP_CLAUSE_SIMDLEN_EXPR (cl2)) != 1) 4845 return false; 4846 default: 4847 break; 4848 } 4849 } 4850 return true; 4851} 4852 4853/* Compare two constructor-element-type constants. Return 1 if the lists 4854 are known to be equal; otherwise return 0. */ 4855 4856static bool 4857simple_cst_list_equal (const_tree l1, const_tree l2) 4858{ 4859 while (l1 != NULL_TREE && l2 != NULL_TREE) 4860 { 4861 if (simple_cst_equal (TREE_VALUE (l1), TREE_VALUE (l2)) != 1) 4862 return false; 4863 4864 l1 = TREE_CHAIN (l1); 4865 l2 = TREE_CHAIN (l2); 4866 } 4867 4868 return l1 == l2; 4869} 4870 4871/* Compare two attributes for their value identity. Return true if the 4872 attribute values are known to be equal; otherwise return false. 4873*/ 4874 4875static bool 4876attribute_value_equal (const_tree attr1, const_tree attr2) 4877{ 4878 if (TREE_VALUE (attr1) == TREE_VALUE (attr2)) 4879 return true; 4880 4881 if (TREE_VALUE (attr1) != NULL_TREE 4882 && TREE_CODE (TREE_VALUE (attr1)) == TREE_LIST 4883 && TREE_VALUE (attr2) != NULL 4884 && TREE_CODE (TREE_VALUE (attr2)) == TREE_LIST) 4885 return (simple_cst_list_equal (TREE_VALUE (attr1), 4886 TREE_VALUE (attr2)) == 1); 4887 4888 if ((flag_openmp || flag_openmp_simd) 4889 && TREE_VALUE (attr1) && TREE_VALUE (attr2) 4890 && TREE_CODE (TREE_VALUE (attr1)) == OMP_CLAUSE 4891 && TREE_CODE (TREE_VALUE (attr2)) == OMP_CLAUSE) 4892 return omp_declare_simd_clauses_equal (TREE_VALUE (attr1), 4893 TREE_VALUE (attr2)); 4894 4895 return (simple_cst_equal (TREE_VALUE (attr1), TREE_VALUE (attr2)) == 1); 4896} 4897 4898/* Return 0 if the attributes for two types are incompatible, 1 if they 4899 are compatible, and 2 if they are nearly compatible (which causes a 4900 warning to be generated). */ 4901int 4902comp_type_attributes (const_tree type1, const_tree type2) 4903{ 4904 const_tree a1 = TYPE_ATTRIBUTES (type1); 4905 const_tree a2 = TYPE_ATTRIBUTES (type2); 4906 const_tree a; 4907 4908 if (a1 == a2) 4909 return 1; 4910 for (a = a1; a != NULL_TREE; a = TREE_CHAIN (a)) 4911 { 4912 const struct attribute_spec *as; 4913 const_tree attr; 4914 4915 as = lookup_attribute_spec (get_attribute_name (a)); 4916 if (!as || as->affects_type_identity == false) 4917 continue; 4918 4919 attr = lookup_attribute (as->name, CONST_CAST_TREE (a2)); 4920 if (!attr || !attribute_value_equal (a, attr)) 4921 break; 4922 } 4923 if (!a) 4924 { 4925 for (a = a2; a != NULL_TREE; a = TREE_CHAIN (a)) 4926 { 4927 const struct attribute_spec *as; 4928 4929 as = lookup_attribute_spec (get_attribute_name (a)); 4930 if (!as || as->affects_type_identity == false) 4931 continue; 4932 4933 if (!lookup_attribute (as->name, CONST_CAST_TREE (a1))) 4934 break; 4935 /* We don't need to compare trees again, as we did this 4936 already in first loop. */ 4937 } 4938 /* All types - affecting identity - are equal, so 4939 there is no need to call target hook for comparison. */ 4940 if (!a) 4941 return 1; 4942 } 4943 /* As some type combinations - like default calling-convention - might 4944 be compatible, we have to call the target hook to get the final result. */ 4945 return targetm.comp_type_attributes (type1, type2); 4946} 4947 4948/* Return a type like TTYPE except that its TYPE_ATTRIBUTE 4949 is ATTRIBUTE. 4950 4951 Record such modified types already made so we don't make duplicates. */ 4952 4953tree 4954build_type_attribute_variant (tree ttype, tree attribute) 4955{ 4956 return build_type_attribute_qual_variant (ttype, attribute, 4957 TYPE_QUALS (ttype)); 4958} 4959 4960 4961/* Reset the expression *EXPR_P, a size or position. 4962 4963 ??? We could reset all non-constant sizes or positions. But it's cheap 4964 enough to not do so and refrain from adding workarounds to dwarf2out.c. 4965 4966 We need to reset self-referential sizes or positions because they cannot 4967 be gimplified and thus can contain a CALL_EXPR after the gimplification 4968 is finished, which will run afoul of LTO streaming. And they need to be 4969 reset to something essentially dummy but not constant, so as to preserve 4970 the properties of the object they are attached to. */ 4971 4972static inline void 4973free_lang_data_in_one_sizepos (tree *expr_p) 4974{ 4975 tree expr = *expr_p; 4976 if (CONTAINS_PLACEHOLDER_P (expr)) 4977 *expr_p = build0 (PLACEHOLDER_EXPR, TREE_TYPE (expr)); 4978} 4979 4980 4981/* Reset all the fields in a binfo node BINFO. We only keep 4982 BINFO_VTABLE, which is used by gimple_fold_obj_type_ref. */ 4983 4984static void 4985free_lang_data_in_binfo (tree binfo) 4986{ 4987 unsigned i; 4988 tree t; 4989 4990 gcc_assert (TREE_CODE (binfo) == TREE_BINFO); 4991 4992 BINFO_VIRTUALS (binfo) = NULL_TREE; 4993 BINFO_BASE_ACCESSES (binfo) = NULL; 4994 BINFO_INHERITANCE_CHAIN (binfo) = NULL_TREE; 4995 BINFO_SUBVTT_INDEX (binfo) = NULL_TREE; 4996 4997 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (binfo), i, t) 4998 free_lang_data_in_binfo (t); 4999} 5000 5001 5002/* Reset all language specific information still present in TYPE. */ 5003 5004static void 5005free_lang_data_in_type (tree type) 5006{ 5007 gcc_assert (TYPE_P (type)); 5008 5009 /* Give the FE a chance to remove its own data first. */ 5010 lang_hooks.free_lang_data (type); 5011 5012 TREE_LANG_FLAG_0 (type) = 0; 5013 TREE_LANG_FLAG_1 (type) = 0; 5014 TREE_LANG_FLAG_2 (type) = 0; 5015 TREE_LANG_FLAG_3 (type) = 0; 5016 TREE_LANG_FLAG_4 (type) = 0; 5017 TREE_LANG_FLAG_5 (type) = 0; 5018 TREE_LANG_FLAG_6 (type) = 0; 5019 5020 if (TREE_CODE (type) == FUNCTION_TYPE) 5021 { 5022 /* Remove the const and volatile qualifiers from arguments. The 5023 C++ front end removes them, but the C front end does not, 5024 leading to false ODR violation errors when merging two 5025 instances of the same function signature compiled by 5026 different front ends. */ 5027 tree p; 5028 5029 for (p = TYPE_ARG_TYPES (type); p; p = TREE_CHAIN (p)) 5030 { 5031 tree arg_type = TREE_VALUE (p); 5032 5033 if (TYPE_READONLY (arg_type) || TYPE_VOLATILE (arg_type)) 5034 { 5035 int quals = TYPE_QUALS (arg_type) 5036 & ~TYPE_QUAL_CONST 5037 & ~TYPE_QUAL_VOLATILE; 5038 TREE_VALUE (p) = build_qualified_type (arg_type, quals); 5039 free_lang_data_in_type (TREE_VALUE (p)); 5040 } 5041 } 5042 } 5043 5044 /* Remove members that are not actually FIELD_DECLs from the field 5045 list of an aggregate. These occur in C++. */ 5046 if (RECORD_OR_UNION_TYPE_P (type)) 5047 { 5048 tree prev, member; 5049 5050 /* Note that TYPE_FIELDS can be shared across distinct 5051 TREE_TYPEs. Therefore, if the first field of TYPE_FIELDS is 5052 to be removed, we cannot set its TREE_CHAIN to NULL. 5053 Otherwise, we would not be able to find all the other fields 5054 in the other instances of this TREE_TYPE. 5055 5056 This was causing an ICE in testsuite/g++.dg/lto/20080915.C. */ 5057 prev = NULL_TREE; 5058 member = TYPE_FIELDS (type); 5059 while (member) 5060 { 5061 if (TREE_CODE (member) == FIELD_DECL 5062 || TREE_CODE (member) == TYPE_DECL) 5063 { 5064 if (prev) 5065 TREE_CHAIN (prev) = member; 5066 else 5067 TYPE_FIELDS (type) = member; 5068 prev = member; 5069 } 5070 5071 member = TREE_CHAIN (member); 5072 } 5073 5074 if (prev) 5075 TREE_CHAIN (prev) = NULL_TREE; 5076 else 5077 TYPE_FIELDS (type) = NULL_TREE; 5078 5079 TYPE_METHODS (type) = NULL_TREE; 5080 if (TYPE_BINFO (type)) 5081 { 5082 free_lang_data_in_binfo (TYPE_BINFO (type)); 5083 /* We need to preserve link to bases and virtual table for all 5084 polymorphic types to make devirtualization machinery working. 5085 Debug output cares only about bases, but output also 5086 virtual table pointers so merging of -fdevirtualize and 5087 -fno-devirtualize units is easier. */ 5088 if ((!BINFO_VTABLE (TYPE_BINFO (type)) 5089 || !flag_devirtualize) 5090 && ((!BINFO_N_BASE_BINFOS (TYPE_BINFO (type)) 5091 && !BINFO_VTABLE (TYPE_BINFO (type))) 5092 || debug_info_level != DINFO_LEVEL_NONE)) 5093 TYPE_BINFO (type) = NULL; 5094 } 5095 } 5096 else 5097 { 5098 /* For non-aggregate types, clear out the language slot (which 5099 overloads TYPE_BINFO). */ 5100 TYPE_LANG_SLOT_1 (type) = NULL_TREE; 5101 5102 if (INTEGRAL_TYPE_P (type) 5103 || SCALAR_FLOAT_TYPE_P (type) 5104 || FIXED_POINT_TYPE_P (type)) 5105 { 5106 free_lang_data_in_one_sizepos (&TYPE_MIN_VALUE (type)); 5107 free_lang_data_in_one_sizepos (&TYPE_MAX_VALUE (type)); 5108 } 5109 } 5110 5111 free_lang_data_in_one_sizepos (&TYPE_SIZE (type)); 5112 free_lang_data_in_one_sizepos (&TYPE_SIZE_UNIT (type)); 5113 5114 if (TYPE_CONTEXT (type) 5115 && TREE_CODE (TYPE_CONTEXT (type)) == BLOCK) 5116 { 5117 tree ctx = TYPE_CONTEXT (type); 5118 do 5119 { 5120 ctx = BLOCK_SUPERCONTEXT (ctx); 5121 } 5122 while (ctx && TREE_CODE (ctx) == BLOCK); 5123 TYPE_CONTEXT (type) = ctx; 5124 } 5125} 5126 5127 5128/* Return true if DECL may need an assembler name to be set. */ 5129 5130static inline bool 5131need_assembler_name_p (tree decl) 5132{ 5133 /* We use DECL_ASSEMBLER_NAME to hold mangled type names for One Definition Rule 5134 merging. */ 5135 if (flag_lto_odr_type_mering 5136 && TREE_CODE (decl) == TYPE_DECL 5137 && DECL_NAME (decl) 5138 && decl == TYPE_NAME (TREE_TYPE (decl)) 5139 && !is_lang_specific (TREE_TYPE (decl)) 5140 && AGGREGATE_TYPE_P (TREE_TYPE (decl)) 5141 && !TYPE_ARTIFICIAL (TREE_TYPE (decl)) 5142 && !variably_modified_type_p (TREE_TYPE (decl), NULL_TREE) 5143 && !type_in_anonymous_namespace_p (TREE_TYPE (decl))) 5144 return !DECL_ASSEMBLER_NAME_SET_P (decl); 5145 /* Only FUNCTION_DECLs and VAR_DECLs are considered. */ 5146 if (TREE_CODE (decl) != FUNCTION_DECL 5147 && TREE_CODE (decl) != VAR_DECL) 5148 return false; 5149 5150 /* If DECL already has its assembler name set, it does not need a 5151 new one. */ 5152 if (!HAS_DECL_ASSEMBLER_NAME_P (decl) 5153 || DECL_ASSEMBLER_NAME_SET_P (decl)) 5154 return false; 5155 5156 /* Abstract decls do not need an assembler name. */ 5157 if (DECL_ABSTRACT_P (decl)) 5158 return false; 5159 5160 /* For VAR_DECLs, only static, public and external symbols need an 5161 assembler name. */ 5162 if (TREE_CODE (decl) == VAR_DECL 5163 && !TREE_STATIC (decl) 5164 && !TREE_PUBLIC (decl) 5165 && !DECL_EXTERNAL (decl)) 5166 return false; 5167 5168 if (TREE_CODE (decl) == FUNCTION_DECL) 5169 { 5170 /* Do not set assembler name on builtins. Allow RTL expansion to 5171 decide whether to expand inline or via a regular call. */ 5172 if (DECL_BUILT_IN (decl) 5173 && DECL_BUILT_IN_CLASS (decl) != BUILT_IN_FRONTEND) 5174 return false; 5175 5176 /* Functions represented in the callgraph need an assembler name. */ 5177 if (cgraph_node::get (decl) != NULL) 5178 return true; 5179 5180 /* Unused and not public functions don't need an assembler name. */ 5181 if (!TREE_USED (decl) && !TREE_PUBLIC (decl)) 5182 return false; 5183 } 5184 5185 return true; 5186} 5187 5188 5189/* Reset all language specific information still present in symbol 5190 DECL. */ 5191 5192static void 5193free_lang_data_in_decl (tree decl) 5194{ 5195 gcc_assert (DECL_P (decl)); 5196 5197 /* Give the FE a chance to remove its own data first. */ 5198 lang_hooks.free_lang_data (decl); 5199 5200 TREE_LANG_FLAG_0 (decl) = 0; 5201 TREE_LANG_FLAG_1 (decl) = 0; 5202 TREE_LANG_FLAG_2 (decl) = 0; 5203 TREE_LANG_FLAG_3 (decl) = 0; 5204 TREE_LANG_FLAG_4 (decl) = 0; 5205 TREE_LANG_FLAG_5 (decl) = 0; 5206 TREE_LANG_FLAG_6 (decl) = 0; 5207 5208 free_lang_data_in_one_sizepos (&DECL_SIZE (decl)); 5209 free_lang_data_in_one_sizepos (&DECL_SIZE_UNIT (decl)); 5210 if (TREE_CODE (decl) == FIELD_DECL) 5211 { 5212 free_lang_data_in_one_sizepos (&DECL_FIELD_OFFSET (decl)); 5213 if (TREE_CODE (DECL_CONTEXT (decl)) == QUAL_UNION_TYPE) 5214 DECL_QUALIFIER (decl) = NULL_TREE; 5215 } 5216 5217 if (TREE_CODE (decl) == FUNCTION_DECL) 5218 { 5219 struct cgraph_node *node; 5220 if (!(node = cgraph_node::get (decl)) 5221 || (!node->definition && !node->clones)) 5222 { 5223 if (node) 5224 node->release_body (); 5225 else 5226 { 5227 release_function_body (decl); 5228 DECL_ARGUMENTS (decl) = NULL; 5229 DECL_RESULT (decl) = NULL; 5230 DECL_INITIAL (decl) = error_mark_node; 5231 } 5232 } 5233 if (gimple_has_body_p (decl)) 5234 { 5235 tree t; 5236 5237 /* If DECL has a gimple body, then the context for its 5238 arguments must be DECL. Otherwise, it doesn't really 5239 matter, as we will not be emitting any code for DECL. In 5240 general, there may be other instances of DECL created by 5241 the front end and since PARM_DECLs are generally shared, 5242 their DECL_CONTEXT changes as the replicas of DECL are 5243 created. The only time where DECL_CONTEXT is important 5244 is for the FUNCTION_DECLs that have a gimple body (since 5245 the PARM_DECL will be used in the function's body). */ 5246 for (t = DECL_ARGUMENTS (decl); t; t = TREE_CHAIN (t)) 5247 DECL_CONTEXT (t) = decl; 5248 if (!DECL_FUNCTION_SPECIFIC_TARGET (decl)) 5249 DECL_FUNCTION_SPECIFIC_TARGET (decl) 5250 = target_option_default_node; 5251 if (!DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)) 5252 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl) 5253 = optimization_default_node; 5254 } 5255 5256 /* DECL_SAVED_TREE holds the GENERIC representation for DECL. 5257 At this point, it is not needed anymore. */ 5258 DECL_SAVED_TREE (decl) = NULL_TREE; 5259 5260 /* Clear the abstract origin if it refers to a method. Otherwise 5261 dwarf2out.c will ICE as we clear TYPE_METHODS and thus the 5262 origin will not be output correctly. */ 5263 if (DECL_ABSTRACT_ORIGIN (decl) 5264 && DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl)) 5265 && RECORD_OR_UNION_TYPE_P 5266 (DECL_CONTEXT (DECL_ABSTRACT_ORIGIN (decl)))) 5267 DECL_ABSTRACT_ORIGIN (decl) = NULL_TREE; 5268 5269 /* Sometimes the C++ frontend doesn't manage to transform a temporary 5270 DECL_VINDEX referring to itself into a vtable slot number as it 5271 should. Happens with functions that are copied and then forgotten 5272 about. Just clear it, it won't matter anymore. */ 5273 if (DECL_VINDEX (decl) && !tree_fits_shwi_p (DECL_VINDEX (decl))) 5274 DECL_VINDEX (decl) = NULL_TREE; 5275 } 5276 else if (TREE_CODE (decl) == VAR_DECL) 5277 { 5278 if ((DECL_EXTERNAL (decl) 5279 && (!TREE_STATIC (decl) || !TREE_READONLY (decl))) 5280 || (decl_function_context (decl) && !TREE_STATIC (decl))) 5281 DECL_INITIAL (decl) = NULL_TREE; 5282 } 5283 else if (TREE_CODE (decl) == TYPE_DECL 5284 || TREE_CODE (decl) == FIELD_DECL) 5285 DECL_INITIAL (decl) = NULL_TREE; 5286 else if (TREE_CODE (decl) == TRANSLATION_UNIT_DECL 5287 && DECL_INITIAL (decl) 5288 && TREE_CODE (DECL_INITIAL (decl)) == BLOCK) 5289 { 5290 /* Strip builtins from the translation-unit BLOCK. We still have targets 5291 without builtin_decl_explicit support and also builtins are shared 5292 nodes and thus we can't use TREE_CHAIN in multiple lists. */ 5293 tree *nextp = &BLOCK_VARS (DECL_INITIAL (decl)); 5294 while (*nextp) 5295 { 5296 tree var = *nextp; 5297 if (TREE_CODE (var) == FUNCTION_DECL 5298 && DECL_BUILT_IN (var)) 5299 *nextp = TREE_CHAIN (var); 5300 else 5301 nextp = &TREE_CHAIN (var); 5302 } 5303 } 5304} 5305 5306 5307/* Data used when collecting DECLs and TYPEs for language data removal. */ 5308 5309struct free_lang_data_d 5310{ 5311 /* Worklist to avoid excessive recursion. */ 5312 vec<tree> worklist; 5313 5314 /* Set of traversed objects. Used to avoid duplicate visits. */ 5315 hash_set<tree> *pset; 5316 5317 /* Array of symbols to process with free_lang_data_in_decl. */ 5318 vec<tree> decls; 5319 5320 /* Array of types to process with free_lang_data_in_type. */ 5321 vec<tree> types; 5322}; 5323 5324 5325/* Save all language fields needed to generate proper debug information 5326 for DECL. This saves most fields cleared out by free_lang_data_in_decl. */ 5327 5328static void 5329save_debug_info_for_decl (tree t) 5330{ 5331 /*struct saved_debug_info_d *sdi;*/ 5332 5333 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && DECL_P (t)); 5334 5335 /* FIXME. Partial implementation for saving debug info removed. */ 5336} 5337 5338 5339/* Save all language fields needed to generate proper debug information 5340 for TYPE. This saves most fields cleared out by free_lang_data_in_type. */ 5341 5342static void 5343save_debug_info_for_type (tree t) 5344{ 5345 /*struct saved_debug_info_d *sdi;*/ 5346 5347 gcc_assert (debug_info_level > DINFO_LEVEL_TERSE && t && TYPE_P (t)); 5348 5349 /* FIXME. Partial implementation for saving debug info removed. */ 5350} 5351 5352 5353/* Add type or decl T to one of the list of tree nodes that need their 5354 language data removed. The lists are held inside FLD. */ 5355 5356static void 5357add_tree_to_fld_list (tree t, struct free_lang_data_d *fld) 5358{ 5359 if (DECL_P (t)) 5360 { 5361 fld->decls.safe_push (t); 5362 if (debug_info_level > DINFO_LEVEL_TERSE) 5363 save_debug_info_for_decl (t); 5364 } 5365 else if (TYPE_P (t)) 5366 { 5367 fld->types.safe_push (t); 5368 if (debug_info_level > DINFO_LEVEL_TERSE) 5369 save_debug_info_for_type (t); 5370 } 5371 else 5372 gcc_unreachable (); 5373} 5374 5375/* Push tree node T into FLD->WORKLIST. */ 5376 5377static inline void 5378fld_worklist_push (tree t, struct free_lang_data_d *fld) 5379{ 5380 if (t && !is_lang_specific (t) && !fld->pset->contains (t)) 5381 fld->worklist.safe_push ((t)); 5382} 5383 5384 5385/* Operand callback helper for free_lang_data_in_node. *TP is the 5386 subtree operand being considered. */ 5387 5388static tree 5389find_decls_types_r (tree *tp, int *ws, void *data) 5390{ 5391 tree t = *tp; 5392 struct free_lang_data_d *fld = (struct free_lang_data_d *) data; 5393 5394 if (TREE_CODE (t) == TREE_LIST) 5395 return NULL_TREE; 5396 5397 /* Language specific nodes will be removed, so there is no need 5398 to gather anything under them. */ 5399 if (is_lang_specific (t)) 5400 { 5401 *ws = 0; 5402 return NULL_TREE; 5403 } 5404 5405 if (DECL_P (t)) 5406 { 5407 /* Note that walk_tree does not traverse every possible field in 5408 decls, so we have to do our own traversals here. */ 5409 add_tree_to_fld_list (t, fld); 5410 5411 fld_worklist_push (DECL_NAME (t), fld); 5412 fld_worklist_push (DECL_CONTEXT (t), fld); 5413 fld_worklist_push (DECL_SIZE (t), fld); 5414 fld_worklist_push (DECL_SIZE_UNIT (t), fld); 5415 5416 /* We are going to remove everything under DECL_INITIAL for 5417 TYPE_DECLs. No point walking them. */ 5418 if (TREE_CODE (t) != TYPE_DECL) 5419 fld_worklist_push (DECL_INITIAL (t), fld); 5420 5421 fld_worklist_push (DECL_ATTRIBUTES (t), fld); 5422 fld_worklist_push (DECL_ABSTRACT_ORIGIN (t), fld); 5423 5424 if (TREE_CODE (t) == FUNCTION_DECL) 5425 { 5426 fld_worklist_push (DECL_ARGUMENTS (t), fld); 5427 fld_worklist_push (DECL_RESULT (t), fld); 5428 } 5429 else if (TREE_CODE (t) == TYPE_DECL) 5430 { 5431 fld_worklist_push (DECL_ORIGINAL_TYPE (t), fld); 5432 } 5433 else if (TREE_CODE (t) == FIELD_DECL) 5434 { 5435 fld_worklist_push (DECL_FIELD_OFFSET (t), fld); 5436 fld_worklist_push (DECL_BIT_FIELD_TYPE (t), fld); 5437 fld_worklist_push (DECL_FIELD_BIT_OFFSET (t), fld); 5438 fld_worklist_push (DECL_FCONTEXT (t), fld); 5439 } 5440 5441 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == PARM_DECL) 5442 && DECL_HAS_VALUE_EXPR_P (t)) 5443 fld_worklist_push (DECL_VALUE_EXPR (t), fld); 5444 5445 if (TREE_CODE (t) != FIELD_DECL 5446 && TREE_CODE (t) != TYPE_DECL) 5447 fld_worklist_push (TREE_CHAIN (t), fld); 5448 *ws = 0; 5449 } 5450 else if (TYPE_P (t)) 5451 { 5452 /* Note that walk_tree does not traverse every possible field in 5453 types, so we have to do our own traversals here. */ 5454 add_tree_to_fld_list (t, fld); 5455 5456 if (!RECORD_OR_UNION_TYPE_P (t)) 5457 fld_worklist_push (TYPE_CACHED_VALUES (t), fld); 5458 fld_worklist_push (TYPE_SIZE (t), fld); 5459 fld_worklist_push (TYPE_SIZE_UNIT (t), fld); 5460 fld_worklist_push (TYPE_ATTRIBUTES (t), fld); 5461 fld_worklist_push (TYPE_POINTER_TO (t), fld); 5462 fld_worklist_push (TYPE_REFERENCE_TO (t), fld); 5463 fld_worklist_push (TYPE_NAME (t), fld); 5464 /* Do not walk TYPE_NEXT_PTR_TO or TYPE_NEXT_REF_TO. We do not stream 5465 them and thus do not and want not to reach unused pointer types 5466 this way. */ 5467 if (!POINTER_TYPE_P (t)) 5468 fld_worklist_push (TYPE_MINVAL (t), fld); 5469 if (!RECORD_OR_UNION_TYPE_P (t)) 5470 fld_worklist_push (TYPE_MAXVAL (t), fld); 5471 fld_worklist_push (TYPE_MAIN_VARIANT (t), fld); 5472 /* Do not walk TYPE_NEXT_VARIANT. We do not stream it and thus 5473 do not and want not to reach unused variants this way. */ 5474 if (TYPE_CONTEXT (t)) 5475 { 5476 tree ctx = TYPE_CONTEXT (t); 5477 /* We adjust BLOCK TYPE_CONTEXTs to the innermost non-BLOCK one. 5478 So push that instead. */ 5479 while (ctx && TREE_CODE (ctx) == BLOCK) 5480 ctx = BLOCK_SUPERCONTEXT (ctx); 5481 fld_worklist_push (ctx, fld); 5482 } 5483 /* Do not walk TYPE_CANONICAL. We do not stream it and thus do not 5484 and want not to reach unused types this way. */ 5485 5486 if (RECORD_OR_UNION_TYPE_P (t) && TYPE_BINFO (t)) 5487 { 5488 unsigned i; 5489 tree tem; 5490 FOR_EACH_VEC_ELT (*BINFO_BASE_BINFOS (TYPE_BINFO (t)), i, tem) 5491 fld_worklist_push (TREE_TYPE (tem), fld); 5492 tem = BINFO_VIRTUALS (TYPE_BINFO (t)); 5493 if (tem 5494 /* The Java FE overloads BINFO_VIRTUALS for its own purpose. */ 5495 && TREE_CODE (tem) == TREE_LIST) 5496 do 5497 { 5498 fld_worklist_push (TREE_VALUE (tem), fld); 5499 tem = TREE_CHAIN (tem); 5500 } 5501 while (tem); 5502 } 5503 if (RECORD_OR_UNION_TYPE_P (t)) 5504 { 5505 tree tem; 5506 /* Push all TYPE_FIELDS - there can be interleaving interesting 5507 and non-interesting things. */ 5508 tem = TYPE_FIELDS (t); 5509 while (tem) 5510 { 5511 if (TREE_CODE (tem) == FIELD_DECL 5512 || TREE_CODE (tem) == TYPE_DECL) 5513 fld_worklist_push (tem, fld); 5514 tem = TREE_CHAIN (tem); 5515 } 5516 } 5517 5518 fld_worklist_push (TYPE_STUB_DECL (t), fld); 5519 *ws = 0; 5520 } 5521 else if (TREE_CODE (t) == BLOCK) 5522 { 5523 tree tem; 5524 for (tem = BLOCK_VARS (t); tem; tem = TREE_CHAIN (tem)) 5525 fld_worklist_push (tem, fld); 5526 for (tem = BLOCK_SUBBLOCKS (t); tem; tem = BLOCK_CHAIN (tem)) 5527 fld_worklist_push (tem, fld); 5528 fld_worklist_push (BLOCK_ABSTRACT_ORIGIN (t), fld); 5529 } 5530 5531 if (TREE_CODE (t) != IDENTIFIER_NODE 5532 && CODE_CONTAINS_STRUCT (TREE_CODE (t), TS_TYPED)) 5533 fld_worklist_push (TREE_TYPE (t), fld); 5534 5535 return NULL_TREE; 5536} 5537 5538 5539/* Find decls and types in T. */ 5540 5541static void 5542find_decls_types (tree t, struct free_lang_data_d *fld) 5543{ 5544 while (1) 5545 { 5546 if (!fld->pset->contains (t)) 5547 walk_tree (&t, find_decls_types_r, fld, fld->pset); 5548 if (fld->worklist.is_empty ()) 5549 break; 5550 t = fld->worklist.pop (); 5551 } 5552} 5553 5554/* Translate all the types in LIST with the corresponding runtime 5555 types. */ 5556 5557static tree 5558get_eh_types_for_runtime (tree list) 5559{ 5560 tree head, prev; 5561 5562 if (list == NULL_TREE) 5563 return NULL_TREE; 5564 5565 head = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list))); 5566 prev = head; 5567 list = TREE_CHAIN (list); 5568 while (list) 5569 { 5570 tree n = build_tree_list (0, lookup_type_for_runtime (TREE_VALUE (list))); 5571 TREE_CHAIN (prev) = n; 5572 prev = TREE_CHAIN (prev); 5573 list = TREE_CHAIN (list); 5574 } 5575 5576 return head; 5577} 5578 5579 5580/* Find decls and types referenced in EH region R and store them in 5581 FLD->DECLS and FLD->TYPES. */ 5582 5583static void 5584find_decls_types_in_eh_region (eh_region r, struct free_lang_data_d *fld) 5585{ 5586 switch (r->type) 5587 { 5588 case ERT_CLEANUP: 5589 break; 5590 5591 case ERT_TRY: 5592 { 5593 eh_catch c; 5594 5595 /* The types referenced in each catch must first be changed to the 5596 EH types used at runtime. This removes references to FE types 5597 in the region. */ 5598 for (c = r->u.eh_try.first_catch; c ; c = c->next_catch) 5599 { 5600 c->type_list = get_eh_types_for_runtime (c->type_list); 5601 walk_tree (&c->type_list, find_decls_types_r, fld, fld->pset); 5602 } 5603 } 5604 break; 5605 5606 case ERT_ALLOWED_EXCEPTIONS: 5607 r->u.allowed.type_list 5608 = get_eh_types_for_runtime (r->u.allowed.type_list); 5609 walk_tree (&r->u.allowed.type_list, find_decls_types_r, fld, fld->pset); 5610 break; 5611 5612 case ERT_MUST_NOT_THROW: 5613 walk_tree (&r->u.must_not_throw.failure_decl, 5614 find_decls_types_r, fld, fld->pset); 5615 break; 5616 } 5617} 5618 5619 5620/* Find decls and types referenced in cgraph node N and store them in 5621 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will 5622 look for *every* kind of DECL and TYPE node reachable from N, 5623 including those embedded inside types and decls (i.e,, TYPE_DECLs, 5624 NAMESPACE_DECLs, etc). */ 5625 5626static void 5627find_decls_types_in_node (struct cgraph_node *n, struct free_lang_data_d *fld) 5628{ 5629 basic_block bb; 5630 struct function *fn; 5631 unsigned ix; 5632 tree t; 5633 5634 find_decls_types (n->decl, fld); 5635 5636 if (!gimple_has_body_p (n->decl)) 5637 return; 5638 5639 gcc_assert (current_function_decl == NULL_TREE && cfun == NULL); 5640 5641 fn = DECL_STRUCT_FUNCTION (n->decl); 5642 5643 /* Traverse locals. */ 5644 FOR_EACH_LOCAL_DECL (fn, ix, t) 5645 find_decls_types (t, fld); 5646 5647 /* Traverse EH regions in FN. */ 5648 { 5649 eh_region r; 5650 FOR_ALL_EH_REGION_FN (r, fn) 5651 find_decls_types_in_eh_region (r, fld); 5652 } 5653 5654 /* Traverse every statement in FN. */ 5655 FOR_EACH_BB_FN (bb, fn) 5656 { 5657 gphi_iterator psi; 5658 gimple_stmt_iterator si; 5659 unsigned i; 5660 5661 for (psi = gsi_start_phis (bb); !gsi_end_p (psi); gsi_next (&psi)) 5662 { 5663 gphi *phi = psi.phi (); 5664 5665 for (i = 0; i < gimple_phi_num_args (phi); i++) 5666 { 5667 tree *arg_p = gimple_phi_arg_def_ptr (phi, i); 5668 find_decls_types (*arg_p, fld); 5669 } 5670 } 5671 5672 for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si)) 5673 { 5674 gimple stmt = gsi_stmt (si); 5675 5676 if (is_gimple_call (stmt)) 5677 find_decls_types (gimple_call_fntype (stmt), fld); 5678 5679 for (i = 0; i < gimple_num_ops (stmt); i++) 5680 { 5681 tree arg = gimple_op (stmt, i); 5682 find_decls_types (arg, fld); 5683 } 5684 } 5685 } 5686} 5687 5688 5689/* Find decls and types referenced in varpool node N and store them in 5690 FLD->DECLS and FLD->TYPES. Unlike pass_referenced_vars, this will 5691 look for *every* kind of DECL and TYPE node reachable from N, 5692 including those embedded inside types and decls (i.e,, TYPE_DECLs, 5693 NAMESPACE_DECLs, etc). */ 5694 5695static void 5696find_decls_types_in_var (varpool_node *v, struct free_lang_data_d *fld) 5697{ 5698 find_decls_types (v->decl, fld); 5699} 5700 5701/* If T needs an assembler name, have one created for it. */ 5702 5703void 5704assign_assembler_name_if_neeeded (tree t) 5705{ 5706 if (need_assembler_name_p (t)) 5707 { 5708 /* When setting DECL_ASSEMBLER_NAME, the C++ mangler may emit 5709 diagnostics that use input_location to show locus 5710 information. The problem here is that, at this point, 5711 input_location is generally anchored to the end of the file 5712 (since the parser is long gone), so we don't have a good 5713 position to pin it to. 5714 5715 To alleviate this problem, this uses the location of T's 5716 declaration. Examples of this are 5717 testsuite/g++.dg/template/cond2.C and 5718 testsuite/g++.dg/template/pr35240.C. */ 5719 location_t saved_location = input_location; 5720 input_location = DECL_SOURCE_LOCATION (t); 5721 5722 decl_assembler_name (t); 5723 5724 input_location = saved_location; 5725 } 5726} 5727 5728 5729/* Free language specific information for every operand and expression 5730 in every node of the call graph. This process operates in three stages: 5731 5732 1- Every callgraph node and varpool node is traversed looking for 5733 decls and types embedded in them. This is a more exhaustive 5734 search than that done by find_referenced_vars, because it will 5735 also collect individual fields, decls embedded in types, etc. 5736 5737 2- All the decls found are sent to free_lang_data_in_decl. 5738 5739 3- All the types found are sent to free_lang_data_in_type. 5740 5741 The ordering between decls and types is important because 5742 free_lang_data_in_decl sets assembler names, which includes 5743 mangling. So types cannot be freed up until assembler names have 5744 been set up. */ 5745 5746static void 5747free_lang_data_in_cgraph (void) 5748{ 5749 struct cgraph_node *n; 5750 varpool_node *v; 5751 struct free_lang_data_d fld; 5752 tree t; 5753 unsigned i; 5754 alias_pair *p; 5755 5756 /* Initialize sets and arrays to store referenced decls and types. */ 5757 fld.pset = new hash_set<tree>; 5758 fld.worklist.create (0); 5759 fld.decls.create (100); 5760 fld.types.create (100); 5761 5762 /* Find decls and types in the body of every function in the callgraph. */ 5763 FOR_EACH_FUNCTION (n) 5764 find_decls_types_in_node (n, &fld); 5765 5766 FOR_EACH_VEC_SAFE_ELT (alias_pairs, i, p) 5767 find_decls_types (p->decl, &fld); 5768 5769 /* Find decls and types in every varpool symbol. */ 5770 FOR_EACH_VARIABLE (v) 5771 find_decls_types_in_var (v, &fld); 5772 5773 /* Set the assembler name on every decl found. We need to do this 5774 now because free_lang_data_in_decl will invalidate data needed 5775 for mangling. This breaks mangling on interdependent decls. */ 5776 FOR_EACH_VEC_ELT (fld.decls, i, t) 5777 assign_assembler_name_if_neeeded (t); 5778 5779 /* Traverse every decl found freeing its language data. */ 5780 FOR_EACH_VEC_ELT (fld.decls, i, t) 5781 free_lang_data_in_decl (t); 5782 5783 /* Traverse every type found freeing its language data. */ 5784 FOR_EACH_VEC_ELT (fld.types, i, t) 5785 free_lang_data_in_type (t); 5786 5787 delete fld.pset; 5788 fld.worklist.release (); 5789 fld.decls.release (); 5790 fld.types.release (); 5791} 5792 5793 5794/* Free resources that are used by FE but are not needed once they are done. */ 5795 5796static unsigned 5797free_lang_data (void) 5798{ 5799 unsigned i; 5800 5801 /* If we are the LTO frontend we have freed lang-specific data already. */ 5802 if (in_lto_p 5803 || (!flag_generate_lto && !flag_generate_offload)) 5804 return 0; 5805 5806 /* Allocate and assign alias sets to the standard integer types 5807 while the slots are still in the way the frontends generated them. */ 5808 for (i = 0; i < itk_none; ++i) 5809 if (integer_types[i]) 5810 TYPE_ALIAS_SET (integer_types[i]) = get_alias_set (integer_types[i]); 5811 5812 /* Traverse the IL resetting language specific information for 5813 operands, expressions, etc. */ 5814 free_lang_data_in_cgraph (); 5815 5816 /* Create gimple variants for common types. */ 5817 ptrdiff_type_node = integer_type_node; 5818 fileptr_type_node = ptr_type_node; 5819 5820 /* Reset some langhooks. Do not reset types_compatible_p, it may 5821 still be used indirectly via the get_alias_set langhook. */ 5822 lang_hooks.dwarf_name = lhd_dwarf_name; 5823 lang_hooks.decl_printable_name = gimple_decl_printable_name; 5824 /* We do not want the default decl_assembler_name implementation, 5825 rather if we have fixed everything we want a wrapper around it 5826 asserting that all non-local symbols already got their assembler 5827 name and only produce assembler names for local symbols. Or rather 5828 make sure we never call decl_assembler_name on local symbols and 5829 devise a separate, middle-end private scheme for it. */ 5830 5831 /* Reset diagnostic machinery. */ 5832 tree_diagnostics_defaults (global_dc); 5833 5834 return 0; 5835} 5836 5837 5838namespace { 5839 5840const pass_data pass_data_ipa_free_lang_data = 5841{ 5842 SIMPLE_IPA_PASS, /* type */ 5843 "*free_lang_data", /* name */ 5844 OPTGROUP_NONE, /* optinfo_flags */ 5845 TV_IPA_FREE_LANG_DATA, /* tv_id */ 5846 0, /* properties_required */ 5847 0, /* properties_provided */ 5848 0, /* properties_destroyed */ 5849 0, /* todo_flags_start */ 5850 0, /* todo_flags_finish */ 5851}; 5852 5853class pass_ipa_free_lang_data : public simple_ipa_opt_pass 5854{ 5855public: 5856 pass_ipa_free_lang_data (gcc::context *ctxt) 5857 : simple_ipa_opt_pass (pass_data_ipa_free_lang_data, ctxt) 5858 {} 5859 5860 /* opt_pass methods: */ 5861 virtual unsigned int execute (function *) { return free_lang_data (); } 5862 5863}; // class pass_ipa_free_lang_data 5864 5865} // anon namespace 5866 5867simple_ipa_opt_pass * 5868make_pass_ipa_free_lang_data (gcc::context *ctxt) 5869{ 5870 return new pass_ipa_free_lang_data (ctxt); 5871} 5872 5873/* The backbone of is_attribute_p(). ATTR_LEN is the string length of 5874 ATTR_NAME. Also used internally by remove_attribute(). */ 5875bool 5876private_is_attribute_p (const char *attr_name, size_t attr_len, const_tree ident) 5877{ 5878 size_t ident_len = IDENTIFIER_LENGTH (ident); 5879 5880 if (ident_len == attr_len) 5881 { 5882 if (strcmp (attr_name, IDENTIFIER_POINTER (ident)) == 0) 5883 return true; 5884 } 5885 else if (ident_len == attr_len + 4) 5886 { 5887 /* There is the possibility that ATTR is 'text' and IDENT is 5888 '__text__'. */ 5889 const char *p = IDENTIFIER_POINTER (ident); 5890 if (p[0] == '_' && p[1] == '_' 5891 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_' 5892 && strncmp (attr_name, p + 2, attr_len) == 0) 5893 return true; 5894 } 5895 5896 return false; 5897} 5898 5899/* The backbone of lookup_attribute(). ATTR_LEN is the string length 5900 of ATTR_NAME, and LIST is not NULL_TREE. */ 5901tree 5902private_lookup_attribute (const char *attr_name, size_t attr_len, tree list) 5903{ 5904 while (list) 5905 { 5906 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list)); 5907 5908 if (ident_len == attr_len) 5909 { 5910 if (!strcmp (attr_name, 5911 IDENTIFIER_POINTER (get_attribute_name (list)))) 5912 break; 5913 } 5914 /* TODO: If we made sure that attributes were stored in the 5915 canonical form without '__...__' (ie, as in 'text' as opposed 5916 to '__text__') then we could avoid the following case. */ 5917 else if (ident_len == attr_len + 4) 5918 { 5919 const char *p = IDENTIFIER_POINTER (get_attribute_name (list)); 5920 if (p[0] == '_' && p[1] == '_' 5921 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_' 5922 && strncmp (attr_name, p + 2, attr_len) == 0) 5923 break; 5924 } 5925 list = TREE_CHAIN (list); 5926 } 5927 5928 return list; 5929} 5930 5931/* Given an attribute name ATTR_NAME and a list of attributes LIST, 5932 return a pointer to the attribute's list first element if the attribute 5933 starts with ATTR_NAME. ATTR_NAME must be in the form 'text' (not 5934 '__text__'). */ 5935 5936tree 5937private_lookup_attribute_by_prefix (const char *attr_name, size_t attr_len, 5938 tree list) 5939{ 5940 while (list) 5941 { 5942 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list)); 5943 5944 if (attr_len > ident_len) 5945 { 5946 list = TREE_CHAIN (list); 5947 continue; 5948 } 5949 5950 const char *p = IDENTIFIER_POINTER (get_attribute_name (list)); 5951 5952 if (strncmp (attr_name, p, attr_len) == 0) 5953 break; 5954 5955 /* TODO: If we made sure that attributes were stored in the 5956 canonical form without '__...__' (ie, as in 'text' as opposed 5957 to '__text__') then we could avoid the following case. */ 5958 if (p[0] == '_' && p[1] == '_' && 5959 strncmp (attr_name, p + 2, attr_len) == 0) 5960 break; 5961 5962 list = TREE_CHAIN (list); 5963 } 5964 5965 return list; 5966} 5967 5968 5969/* A variant of lookup_attribute() that can be used with an identifier 5970 as the first argument, and where the identifier can be either 5971 'text' or '__text__'. 5972 5973 Given an attribute ATTR_IDENTIFIER, and a list of attributes LIST, 5974 return a pointer to the attribute's list element if the attribute 5975 is part of the list, or NULL_TREE if not found. If the attribute 5976 appears more than once, this only returns the first occurrence; the 5977 TREE_CHAIN of the return value should be passed back in if further 5978 occurrences are wanted. ATTR_IDENTIFIER must be an identifier but 5979 can be in the form 'text' or '__text__'. */ 5980static tree 5981lookup_ident_attribute (tree attr_identifier, tree list) 5982{ 5983 gcc_checking_assert (TREE_CODE (attr_identifier) == IDENTIFIER_NODE); 5984 5985 while (list) 5986 { 5987 gcc_checking_assert (TREE_CODE (get_attribute_name (list)) 5988 == IDENTIFIER_NODE); 5989 5990 /* Identifiers can be compared directly for equality. */ 5991 if (attr_identifier == get_attribute_name (list)) 5992 break; 5993 5994 /* If they are not equal, they may still be one in the form 5995 'text' while the other one is in the form '__text__'. TODO: 5996 If we were storing attributes in normalized 'text' form, then 5997 this could all go away and we could take full advantage of 5998 the fact that we're comparing identifiers. :-) */ 5999 { 6000 size_t attr_len = IDENTIFIER_LENGTH (attr_identifier); 6001 size_t ident_len = IDENTIFIER_LENGTH (get_attribute_name (list)); 6002 6003 if (ident_len == attr_len + 4) 6004 { 6005 const char *p = IDENTIFIER_POINTER (get_attribute_name (list)); 6006 const char *q = IDENTIFIER_POINTER (attr_identifier); 6007 if (p[0] == '_' && p[1] == '_' 6008 && p[ident_len - 2] == '_' && p[ident_len - 1] == '_' 6009 && strncmp (q, p + 2, attr_len) == 0) 6010 break; 6011 } 6012 else if (ident_len + 4 == attr_len) 6013 { 6014 const char *p = IDENTIFIER_POINTER (get_attribute_name (list)); 6015 const char *q = IDENTIFIER_POINTER (attr_identifier); 6016 if (q[0] == '_' && q[1] == '_' 6017 && q[attr_len - 2] == '_' && q[attr_len - 1] == '_' 6018 && strncmp (q + 2, p, ident_len) == 0) 6019 break; 6020 } 6021 } 6022 list = TREE_CHAIN (list); 6023 } 6024 6025 return list; 6026} 6027 6028/* Remove any instances of attribute ATTR_NAME in LIST and return the 6029 modified list. */ 6030 6031tree 6032remove_attribute (const char *attr_name, tree list) 6033{ 6034 tree *p; 6035 size_t attr_len = strlen (attr_name); 6036 6037 gcc_checking_assert (attr_name[0] != '_'); 6038 6039 for (p = &list; *p; ) 6040 { 6041 tree l = *p; 6042 /* TODO: If we were storing attributes in normalized form, here 6043 we could use a simple strcmp(). */ 6044 if (private_is_attribute_p (attr_name, attr_len, get_attribute_name (l))) 6045 *p = TREE_CHAIN (l); 6046 else 6047 p = &TREE_CHAIN (l); 6048 } 6049 6050 return list; 6051} 6052 6053/* Return an attribute list that is the union of a1 and a2. */ 6054 6055tree 6056merge_attributes (tree a1, tree a2) 6057{ 6058 tree attributes; 6059 6060 /* Either one unset? Take the set one. */ 6061 6062 if ((attributes = a1) == 0) 6063 attributes = a2; 6064 6065 /* One that completely contains the other? Take it. */ 6066 6067 else if (a2 != 0 && ! attribute_list_contained (a1, a2)) 6068 { 6069 if (attribute_list_contained (a2, a1)) 6070 attributes = a2; 6071 else 6072 { 6073 /* Pick the longest list, and hang on the other list. */ 6074 6075 if (list_length (a1) < list_length (a2)) 6076 attributes = a2, a2 = a1; 6077 6078 for (; a2 != 0; a2 = TREE_CHAIN (a2)) 6079 { 6080 tree a; 6081 for (a = lookup_ident_attribute (get_attribute_name (a2), 6082 attributes); 6083 a != NULL_TREE && !attribute_value_equal (a, a2); 6084 a = lookup_ident_attribute (get_attribute_name (a2), 6085 TREE_CHAIN (a))) 6086 ; 6087 if (a == NULL_TREE) 6088 { 6089 a1 = copy_node (a2); 6090 TREE_CHAIN (a1) = attributes; 6091 attributes = a1; 6092 } 6093 } 6094 } 6095 } 6096 return attributes; 6097} 6098 6099/* Given types T1 and T2, merge their attributes and return 6100 the result. */ 6101 6102tree 6103merge_type_attributes (tree t1, tree t2) 6104{ 6105 return merge_attributes (TYPE_ATTRIBUTES (t1), 6106 TYPE_ATTRIBUTES (t2)); 6107} 6108 6109/* Given decls OLDDECL and NEWDECL, merge their attributes and return 6110 the result. */ 6111 6112tree 6113merge_decl_attributes (tree olddecl, tree newdecl) 6114{ 6115 return merge_attributes (DECL_ATTRIBUTES (olddecl), 6116 DECL_ATTRIBUTES (newdecl)); 6117} 6118 6119#if TARGET_DLLIMPORT_DECL_ATTRIBUTES 6120 6121/* Specialization of merge_decl_attributes for various Windows targets. 6122 6123 This handles the following situation: 6124 6125 __declspec (dllimport) int foo; 6126 int foo; 6127 6128 The second instance of `foo' nullifies the dllimport. */ 6129 6130tree 6131merge_dllimport_decl_attributes (tree old, tree new_tree) 6132{ 6133 tree a; 6134 int delete_dllimport_p = 1; 6135 6136 /* What we need to do here is remove from `old' dllimport if it doesn't 6137 appear in `new'. dllimport behaves like extern: if a declaration is 6138 marked dllimport and a definition appears later, then the object 6139 is not dllimport'd. We also remove a `new' dllimport if the old list 6140 contains dllexport: dllexport always overrides dllimport, regardless 6141 of the order of declaration. */ 6142 if (!VAR_OR_FUNCTION_DECL_P (new_tree)) 6143 delete_dllimport_p = 0; 6144 else if (DECL_DLLIMPORT_P (new_tree) 6145 && lookup_attribute ("dllexport", DECL_ATTRIBUTES (old))) 6146 { 6147 DECL_DLLIMPORT_P (new_tree) = 0; 6148 warning (OPT_Wattributes, "%q+D already declared with dllexport attribute: " 6149 "dllimport ignored", new_tree); 6150 } 6151 else if (DECL_DLLIMPORT_P (old) && !DECL_DLLIMPORT_P (new_tree)) 6152 { 6153 /* Warn about overriding a symbol that has already been used, e.g.: 6154 extern int __attribute__ ((dllimport)) foo; 6155 int* bar () {return &foo;} 6156 int foo; 6157 */ 6158 if (TREE_USED (old)) 6159 { 6160 warning (0, "%q+D redeclared without dllimport attribute " 6161 "after being referenced with dll linkage", new_tree); 6162 /* If we have used a variable's address with dllimport linkage, 6163 keep the old DECL_DLLIMPORT_P flag: the ADDR_EXPR using the 6164 decl may already have had TREE_CONSTANT computed. 6165 We still remove the attribute so that assembler code refers 6166 to '&foo rather than '_imp__foo'. */ 6167 if (TREE_CODE (old) == VAR_DECL && TREE_ADDRESSABLE (old)) 6168 DECL_DLLIMPORT_P (new_tree) = 1; 6169 } 6170 6171 /* Let an inline definition silently override the external reference, 6172 but otherwise warn about attribute inconsistency. */ 6173 else if (TREE_CODE (new_tree) == VAR_DECL 6174 || !DECL_DECLARED_INLINE_P (new_tree)) 6175 warning (OPT_Wattributes, "%q+D redeclared without dllimport attribute: " 6176 "previous dllimport ignored", new_tree); 6177 } 6178 else 6179 delete_dllimport_p = 0; 6180 6181 a = merge_attributes (DECL_ATTRIBUTES (old), DECL_ATTRIBUTES (new_tree)); 6182 6183 if (delete_dllimport_p) 6184 a = remove_attribute ("dllimport", a); 6185 6186 return a; 6187} 6188 6189/* Handle a "dllimport" or "dllexport" attribute; arguments as in 6190 struct attribute_spec.handler. */ 6191 6192tree 6193handle_dll_attribute (tree * pnode, tree name, tree args, int flags, 6194 bool *no_add_attrs) 6195{ 6196 tree node = *pnode; 6197 bool is_dllimport; 6198 6199 /* These attributes may apply to structure and union types being created, 6200 but otherwise should pass to the declaration involved. */ 6201 if (!DECL_P (node)) 6202 { 6203 if (flags & ((int) ATTR_FLAG_DECL_NEXT | (int) ATTR_FLAG_FUNCTION_NEXT 6204 | (int) ATTR_FLAG_ARRAY_NEXT)) 6205 { 6206 *no_add_attrs = true; 6207 return tree_cons (name, args, NULL_TREE); 6208 } 6209 if (TREE_CODE (node) == RECORD_TYPE 6210 || TREE_CODE (node) == UNION_TYPE) 6211 { 6212 node = TYPE_NAME (node); 6213 if (!node) 6214 return NULL_TREE; 6215 } 6216 else 6217 { 6218 warning (OPT_Wattributes, "%qE attribute ignored", 6219 name); 6220 *no_add_attrs = true; 6221 return NULL_TREE; 6222 } 6223 } 6224 6225 if (TREE_CODE (node) != FUNCTION_DECL 6226 && TREE_CODE (node) != VAR_DECL 6227 && TREE_CODE (node) != TYPE_DECL) 6228 { 6229 *no_add_attrs = true; 6230 warning (OPT_Wattributes, "%qE attribute ignored", 6231 name); 6232 return NULL_TREE; 6233 } 6234 6235 if (TREE_CODE (node) == TYPE_DECL 6236 && TREE_CODE (TREE_TYPE (node)) != RECORD_TYPE 6237 && TREE_CODE (TREE_TYPE (node)) != UNION_TYPE) 6238 { 6239 *no_add_attrs = true; 6240 warning (OPT_Wattributes, "%qE attribute ignored", 6241 name); 6242 return NULL_TREE; 6243 } 6244 6245 is_dllimport = is_attribute_p ("dllimport", name); 6246 6247 /* Report error on dllimport ambiguities seen now before they cause 6248 any damage. */ 6249 if (is_dllimport) 6250 { 6251 /* Honor any target-specific overrides. */ 6252 if (!targetm.valid_dllimport_attribute_p (node)) 6253 *no_add_attrs = true; 6254 6255 else if (TREE_CODE (node) == FUNCTION_DECL 6256 && DECL_DECLARED_INLINE_P (node)) 6257 { 6258 warning (OPT_Wattributes, "inline function %q+D declared as " 6259 " dllimport: attribute ignored", node); 6260 *no_add_attrs = true; 6261 } 6262 /* Like MS, treat definition of dllimported variables and 6263 non-inlined functions on declaration as syntax errors. */ 6264 else if (TREE_CODE (node) == FUNCTION_DECL && DECL_INITIAL (node)) 6265 { 6266 error ("function %q+D definition is marked dllimport", node); 6267 *no_add_attrs = true; 6268 } 6269 6270 else if (TREE_CODE (node) == VAR_DECL) 6271 { 6272 if (DECL_INITIAL (node)) 6273 { 6274 error ("variable %q+D definition is marked dllimport", 6275 node); 6276 *no_add_attrs = true; 6277 } 6278 6279 /* `extern' needn't be specified with dllimport. 6280 Specify `extern' now and hope for the best. Sigh. */ 6281 DECL_EXTERNAL (node) = 1; 6282 /* Also, implicitly give dllimport'd variables declared within 6283 a function global scope, unless declared static. */ 6284 if (current_function_decl != NULL_TREE && !TREE_STATIC (node)) 6285 TREE_PUBLIC (node) = 1; 6286 } 6287 6288 if (*no_add_attrs == false) 6289 DECL_DLLIMPORT_P (node) = 1; 6290 } 6291 else if (TREE_CODE (node) == FUNCTION_DECL 6292 && DECL_DECLARED_INLINE_P (node) 6293 && flag_keep_inline_dllexport) 6294 /* An exported function, even if inline, must be emitted. */ 6295 DECL_EXTERNAL (node) = 0; 6296 6297 /* Report error if symbol is not accessible at global scope. */ 6298 if (!TREE_PUBLIC (node) 6299 && (TREE_CODE (node) == VAR_DECL 6300 || TREE_CODE (node) == FUNCTION_DECL)) 6301 { 6302 error ("external linkage required for symbol %q+D because of " 6303 "%qE attribute", node, name); 6304 *no_add_attrs = true; 6305 } 6306 6307 /* A dllexport'd entity must have default visibility so that other 6308 program units (shared libraries or the main executable) can see 6309 it. A dllimport'd entity must have default visibility so that 6310 the linker knows that undefined references within this program 6311 unit can be resolved by the dynamic linker. */ 6312 if (!*no_add_attrs) 6313 { 6314 if (DECL_VISIBILITY_SPECIFIED (node) 6315 && DECL_VISIBILITY (node) != VISIBILITY_DEFAULT) 6316 error ("%qE implies default visibility, but %qD has already " 6317 "been declared with a different visibility", 6318 name, node); 6319 DECL_VISIBILITY (node) = VISIBILITY_DEFAULT; 6320 DECL_VISIBILITY_SPECIFIED (node) = 1; 6321 } 6322 6323 return NULL_TREE; 6324} 6325 6326#endif /* TARGET_DLLIMPORT_DECL_ATTRIBUTES */ 6327 6328/* Set the type qualifiers for TYPE to TYPE_QUALS, which is a bitmask 6329 of the various TYPE_QUAL values. */ 6330 6331static void 6332set_type_quals (tree type, int type_quals) 6333{ 6334 TYPE_READONLY (type) = (type_quals & TYPE_QUAL_CONST) != 0; 6335 TYPE_VOLATILE (type) = (type_quals & TYPE_QUAL_VOLATILE) != 0; 6336 TYPE_RESTRICT (type) = (type_quals & TYPE_QUAL_RESTRICT) != 0; 6337 TYPE_ATOMIC (type) = (type_quals & TYPE_QUAL_ATOMIC) != 0; 6338 TYPE_ADDR_SPACE (type) = DECODE_QUAL_ADDR_SPACE (type_quals); 6339} 6340 6341/* Returns true iff unqualified CAND and BASE are equivalent. */ 6342 6343bool 6344check_base_type (const_tree cand, const_tree base) 6345{ 6346 return (TYPE_NAME (cand) == TYPE_NAME (base) 6347 /* Apparently this is needed for Objective-C. */ 6348 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base) 6349 /* Check alignment. */ 6350 && TYPE_ALIGN (cand) == TYPE_ALIGN (base) 6351 && attribute_list_equal (TYPE_ATTRIBUTES (cand), 6352 TYPE_ATTRIBUTES (base))); 6353} 6354 6355/* Returns true iff CAND is equivalent to BASE with TYPE_QUALS. */ 6356 6357bool 6358check_qualified_type (const_tree cand, const_tree base, int type_quals) 6359{ 6360 return (TYPE_QUALS (cand) == type_quals 6361 && check_base_type (cand, base)); 6362} 6363 6364/* Returns true iff CAND is equivalent to BASE with ALIGN. */ 6365 6366static bool 6367check_aligned_type (const_tree cand, const_tree base, unsigned int align) 6368{ 6369 return (TYPE_QUALS (cand) == TYPE_QUALS (base) 6370 && TYPE_NAME (cand) == TYPE_NAME (base) 6371 /* Apparently this is needed for Objective-C. */ 6372 && TYPE_CONTEXT (cand) == TYPE_CONTEXT (base) 6373 /* Check alignment. */ 6374 && TYPE_ALIGN (cand) == align 6375 && attribute_list_equal (TYPE_ATTRIBUTES (cand), 6376 TYPE_ATTRIBUTES (base))); 6377} 6378 6379/* This function checks to see if TYPE matches the size one of the built-in 6380 atomic types, and returns that core atomic type. */ 6381 6382static tree 6383find_atomic_core_type (tree type) 6384{ 6385 tree base_atomic_type; 6386 6387 /* Only handle complete types. */ 6388 if (TYPE_SIZE (type) == NULL_TREE) 6389 return NULL_TREE; 6390 6391 HOST_WIDE_INT type_size = tree_to_uhwi (TYPE_SIZE (type)); 6392 switch (type_size) 6393 { 6394 case 8: 6395 base_atomic_type = atomicQI_type_node; 6396 break; 6397 6398 case 16: 6399 base_atomic_type = atomicHI_type_node; 6400 break; 6401 6402 case 32: 6403 base_atomic_type = atomicSI_type_node; 6404 break; 6405 6406 case 64: 6407 base_atomic_type = atomicDI_type_node; 6408 break; 6409 6410 case 128: 6411 base_atomic_type = atomicTI_type_node; 6412 break; 6413 6414 default: 6415 base_atomic_type = NULL_TREE; 6416 } 6417 6418 return base_atomic_type; 6419} 6420 6421/* Return a version of the TYPE, qualified as indicated by the 6422 TYPE_QUALS, if one exists. If no qualified version exists yet, 6423 return NULL_TREE. */ 6424 6425tree 6426get_qualified_type (tree type, int type_quals) 6427{ 6428 tree t; 6429 6430 if (TYPE_QUALS (type) == type_quals) 6431 return type; 6432 6433 /* Search the chain of variants to see if there is already one there just 6434 like the one we need to have. If so, use that existing one. We must 6435 preserve the TYPE_NAME, since there is code that depends on this. */ 6436 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) 6437 if (check_qualified_type (t, type, type_quals)) 6438 return t; 6439 6440 return NULL_TREE; 6441} 6442 6443/* Like get_qualified_type, but creates the type if it does not 6444 exist. This function never returns NULL_TREE. */ 6445 6446tree 6447build_qualified_type (tree type, int type_quals) 6448{ 6449 tree t; 6450 6451 /* See if we already have the appropriate qualified variant. */ 6452 t = get_qualified_type (type, type_quals); 6453 6454 /* If not, build it. */ 6455 if (!t) 6456 { 6457 t = build_variant_type_copy (type); 6458 set_type_quals (t, type_quals); 6459 6460 if (((type_quals & TYPE_QUAL_ATOMIC) == TYPE_QUAL_ATOMIC)) 6461 { 6462 /* See if this object can map to a basic atomic type. */ 6463 tree atomic_type = find_atomic_core_type (type); 6464 if (atomic_type) 6465 { 6466 /* Ensure the alignment of this type is compatible with 6467 the required alignment of the atomic type. */ 6468 if (TYPE_ALIGN (atomic_type) > TYPE_ALIGN (t)) 6469 TYPE_ALIGN (t) = TYPE_ALIGN (atomic_type); 6470 } 6471 } 6472 6473 if (TYPE_STRUCTURAL_EQUALITY_P (type)) 6474 /* Propagate structural equality. */ 6475 SET_TYPE_STRUCTURAL_EQUALITY (t); 6476 else if (TYPE_CANONICAL (type) != type) 6477 /* Build the underlying canonical type, since it is different 6478 from TYPE. */ 6479 { 6480 tree c = build_qualified_type (TYPE_CANONICAL (type), type_quals); 6481 TYPE_CANONICAL (t) = TYPE_CANONICAL (c); 6482 } 6483 else 6484 /* T is its own canonical type. */ 6485 TYPE_CANONICAL (t) = t; 6486 6487 } 6488 6489 return t; 6490} 6491 6492/* Create a variant of type T with alignment ALIGN. */ 6493 6494tree 6495build_aligned_type (tree type, unsigned int align) 6496{ 6497 tree t; 6498 6499 if (TYPE_PACKED (type) 6500 || TYPE_ALIGN (type) == align) 6501 return type; 6502 6503 for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t)) 6504 if (check_aligned_type (t, type, align)) 6505 return t; 6506 6507 t = build_variant_type_copy (type); 6508 TYPE_ALIGN (t) = align; 6509 6510 return t; 6511} 6512 6513/* Create a new distinct copy of TYPE. The new type is made its own 6514 MAIN_VARIANT. If TYPE requires structural equality checks, the 6515 resulting type requires structural equality checks; otherwise, its 6516 TYPE_CANONICAL points to itself. */ 6517 6518tree 6519build_distinct_type_copy (tree type) 6520{ 6521 tree t = copy_node (type); 6522 6523 TYPE_POINTER_TO (t) = 0; 6524 TYPE_REFERENCE_TO (t) = 0; 6525 6526 /* Set the canonical type either to a new equivalence class, or 6527 propagate the need for structural equality checks. */ 6528 if (TYPE_STRUCTURAL_EQUALITY_P (type)) 6529 SET_TYPE_STRUCTURAL_EQUALITY (t); 6530 else 6531 TYPE_CANONICAL (t) = t; 6532 6533 /* Make it its own variant. */ 6534 TYPE_MAIN_VARIANT (t) = t; 6535 TYPE_NEXT_VARIANT (t) = 0; 6536 6537 /* Note that it is now possible for TYPE_MIN_VALUE to be a value 6538 whose TREE_TYPE is not t. This can also happen in the Ada 6539 frontend when using subtypes. */ 6540 6541 return t; 6542} 6543 6544/* Create a new variant of TYPE, equivalent but distinct. This is so 6545 the caller can modify it. TYPE_CANONICAL for the return type will 6546 be equivalent to TYPE_CANONICAL of TYPE, indicating that the types 6547 are considered equal by the language itself (or that both types 6548 require structural equality checks). */ 6549 6550tree 6551build_variant_type_copy (tree type) 6552{ 6553 tree t, m = TYPE_MAIN_VARIANT (type); 6554 6555 t = build_distinct_type_copy (type); 6556 6557 /* Since we're building a variant, assume that it is a non-semantic 6558 variant. This also propagates TYPE_STRUCTURAL_EQUALITY_P. */ 6559 TYPE_CANONICAL (t) = TYPE_CANONICAL (type); 6560 6561 /* Add the new type to the chain of variants of TYPE. */ 6562 TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m); 6563 TYPE_NEXT_VARIANT (m) = t; 6564 TYPE_MAIN_VARIANT (t) = m; 6565 6566 return t; 6567} 6568 6569/* Return true if the from tree in both tree maps are equal. */ 6570 6571int 6572tree_map_base_eq (const void *va, const void *vb) 6573{ 6574 const struct tree_map_base *const a = (const struct tree_map_base *) va, 6575 *const b = (const struct tree_map_base *) vb; 6576 return (a->from == b->from); 6577} 6578 6579/* Hash a from tree in a tree_base_map. */ 6580 6581unsigned int 6582tree_map_base_hash (const void *item) 6583{ 6584 return htab_hash_pointer (((const struct tree_map_base *)item)->from); 6585} 6586 6587/* Return true if this tree map structure is marked for garbage collection 6588 purposes. We simply return true if the from tree is marked, so that this 6589 structure goes away when the from tree goes away. */ 6590 6591int 6592tree_map_base_marked_p (const void *p) 6593{ 6594 return ggc_marked_p (((const struct tree_map_base *) p)->from); 6595} 6596 6597/* Hash a from tree in a tree_map. */ 6598 6599unsigned int 6600tree_map_hash (const void *item) 6601{ 6602 return (((const struct tree_map *) item)->hash); 6603} 6604 6605/* Hash a from tree in a tree_decl_map. */ 6606 6607unsigned int 6608tree_decl_map_hash (const void *item) 6609{ 6610 return DECL_UID (((const struct tree_decl_map *) item)->base.from); 6611} 6612 6613/* Return the initialization priority for DECL. */ 6614 6615priority_type 6616decl_init_priority_lookup (tree decl) 6617{ 6618 symtab_node *snode = symtab_node::get (decl); 6619 6620 if (!snode) 6621 return DEFAULT_INIT_PRIORITY; 6622 return 6623 snode->get_init_priority (); 6624} 6625 6626/* Return the finalization priority for DECL. */ 6627 6628priority_type 6629decl_fini_priority_lookup (tree decl) 6630{ 6631 cgraph_node *node = cgraph_node::get (decl); 6632 6633 if (!node) 6634 return DEFAULT_INIT_PRIORITY; 6635 return 6636 node->get_fini_priority (); 6637} 6638 6639/* Set the initialization priority for DECL to PRIORITY. */ 6640 6641void 6642decl_init_priority_insert (tree decl, priority_type priority) 6643{ 6644 struct symtab_node *snode; 6645 6646 if (priority == DEFAULT_INIT_PRIORITY) 6647 { 6648 snode = symtab_node::get (decl); 6649 if (!snode) 6650 return; 6651 } 6652 else if (TREE_CODE (decl) == VAR_DECL) 6653 snode = varpool_node::get_create (decl); 6654 else 6655 snode = cgraph_node::get_create (decl); 6656 snode->set_init_priority (priority); 6657} 6658 6659/* Set the finalization priority for DECL to PRIORITY. */ 6660 6661void 6662decl_fini_priority_insert (tree decl, priority_type priority) 6663{ 6664 struct cgraph_node *node; 6665 6666 if (priority == DEFAULT_INIT_PRIORITY) 6667 { 6668 node = cgraph_node::get (decl); 6669 if (!node) 6670 return; 6671 } 6672 else 6673 node = cgraph_node::get_create (decl); 6674 node->set_fini_priority (priority); 6675} 6676 6677/* Print out the statistics for the DECL_DEBUG_EXPR hash table. */ 6678 6679static void 6680print_debug_expr_statistics (void) 6681{ 6682 fprintf (stderr, "DECL_DEBUG_EXPR hash: size %ld, %ld elements, %f collisions\n", 6683 (long) debug_expr_for_decl->size (), 6684 (long) debug_expr_for_decl->elements (), 6685 debug_expr_for_decl->collisions ()); 6686} 6687 6688/* Print out the statistics for the DECL_VALUE_EXPR hash table. */ 6689 6690static void 6691print_value_expr_statistics (void) 6692{ 6693 fprintf (stderr, "DECL_VALUE_EXPR hash: size %ld, %ld elements, %f collisions\n", 6694 (long) value_expr_for_decl->size (), 6695 (long) value_expr_for_decl->elements (), 6696 value_expr_for_decl->collisions ()); 6697} 6698 6699/* Lookup a debug expression for FROM, and return it if we find one. */ 6700 6701tree 6702decl_debug_expr_lookup (tree from) 6703{ 6704 struct tree_decl_map *h, in; 6705 in.base.from = from; 6706 6707 h = debug_expr_for_decl->find_with_hash (&in, DECL_UID (from)); 6708 if (h) 6709 return h->to; 6710 return NULL_TREE; 6711} 6712 6713/* Insert a mapping FROM->TO in the debug expression hashtable. */ 6714 6715void 6716decl_debug_expr_insert (tree from, tree to) 6717{ 6718 struct tree_decl_map *h; 6719 6720 h = ggc_alloc<tree_decl_map> (); 6721 h->base.from = from; 6722 h->to = to; 6723 *debug_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h; 6724} 6725 6726/* Lookup a value expression for FROM, and return it if we find one. */ 6727 6728tree 6729decl_value_expr_lookup (tree from) 6730{ 6731 struct tree_decl_map *h, in; 6732 in.base.from = from; 6733 6734 h = value_expr_for_decl->find_with_hash (&in, DECL_UID (from)); 6735 if (h) 6736 return h->to; 6737 return NULL_TREE; 6738} 6739 6740/* Insert a mapping FROM->TO in the value expression hashtable. */ 6741 6742void 6743decl_value_expr_insert (tree from, tree to) 6744{ 6745 struct tree_decl_map *h; 6746 6747 h = ggc_alloc<tree_decl_map> (); 6748 h->base.from = from; 6749 h->to = to; 6750 *value_expr_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT) = h; 6751} 6752 6753/* Lookup a vector of debug arguments for FROM, and return it if we 6754 find one. */ 6755 6756vec<tree, va_gc> ** 6757decl_debug_args_lookup (tree from) 6758{ 6759 struct tree_vec_map *h, in; 6760 6761 if (!DECL_HAS_DEBUG_ARGS_P (from)) 6762 return NULL; 6763 gcc_checking_assert (debug_args_for_decl != NULL); 6764 in.base.from = from; 6765 h = debug_args_for_decl->find_with_hash (&in, DECL_UID (from)); 6766 if (h) 6767 return &h->to; 6768 return NULL; 6769} 6770 6771/* Insert a mapping FROM->empty vector of debug arguments in the value 6772 expression hashtable. */ 6773 6774vec<tree, va_gc> ** 6775decl_debug_args_insert (tree from) 6776{ 6777 struct tree_vec_map *h; 6778 tree_vec_map **loc; 6779 6780 if (DECL_HAS_DEBUG_ARGS_P (from)) 6781 return decl_debug_args_lookup (from); 6782 if (debug_args_for_decl == NULL) 6783 debug_args_for_decl = hash_table<tree_vec_map_cache_hasher>::create_ggc (64); 6784 h = ggc_alloc<tree_vec_map> (); 6785 h->base.from = from; 6786 h->to = NULL; 6787 loc = debug_args_for_decl->find_slot_with_hash (h, DECL_UID (from), INSERT); 6788 *loc = h; 6789 DECL_HAS_DEBUG_ARGS_P (from) = 1; 6790 return &h->to; 6791} 6792 6793/* Hashing of types so that we don't make duplicates. 6794 The entry point is `type_hash_canon'. */ 6795 6796/* Compute a hash code for a list of types (chain of TREE_LIST nodes 6797 with types in the TREE_VALUE slots), by adding the hash codes 6798 of the individual types. */ 6799 6800static void 6801type_hash_list (const_tree list, inchash::hash &hstate) 6802{ 6803 const_tree tail; 6804 6805 for (tail = list; tail; tail = TREE_CHAIN (tail)) 6806 if (TREE_VALUE (tail) != error_mark_node) 6807 hstate.add_object (TYPE_HASH (TREE_VALUE (tail))); 6808} 6809 6810/* These are the Hashtable callback functions. */ 6811 6812/* Returns true iff the types are equivalent. */ 6813 6814bool 6815type_cache_hasher::equal (type_hash *a, type_hash *b) 6816{ 6817 /* First test the things that are the same for all types. */ 6818 if (a->hash != b->hash 6819 || TREE_CODE (a->type) != TREE_CODE (b->type) 6820 || TREE_TYPE (a->type) != TREE_TYPE (b->type) 6821 || !attribute_list_equal (TYPE_ATTRIBUTES (a->type), 6822 TYPE_ATTRIBUTES (b->type)) 6823 || (TREE_CODE (a->type) != COMPLEX_TYPE 6824 && TYPE_NAME (a->type) != TYPE_NAME (b->type))) 6825 return 0; 6826 6827 /* Be careful about comparing arrays before and after the element type 6828 has been completed; don't compare TYPE_ALIGN unless both types are 6829 complete. */ 6830 if (COMPLETE_TYPE_P (a->type) && COMPLETE_TYPE_P (b->type) 6831 && (TYPE_ALIGN (a->type) != TYPE_ALIGN (b->type) 6832 || TYPE_MODE (a->type) != TYPE_MODE (b->type))) 6833 return 0; 6834 6835 switch (TREE_CODE (a->type)) 6836 { 6837 case VOID_TYPE: 6838 case COMPLEX_TYPE: 6839 case POINTER_TYPE: 6840 case REFERENCE_TYPE: 6841 case NULLPTR_TYPE: 6842 return 1; 6843 6844 case VECTOR_TYPE: 6845 return TYPE_VECTOR_SUBPARTS (a->type) == TYPE_VECTOR_SUBPARTS (b->type); 6846 6847 case ENUMERAL_TYPE: 6848 if (TYPE_VALUES (a->type) != TYPE_VALUES (b->type) 6849 && !(TYPE_VALUES (a->type) 6850 && TREE_CODE (TYPE_VALUES (a->type)) == TREE_LIST 6851 && TYPE_VALUES (b->type) 6852 && TREE_CODE (TYPE_VALUES (b->type)) == TREE_LIST 6853 && type_list_equal (TYPE_VALUES (a->type), 6854 TYPE_VALUES (b->type)))) 6855 return 0; 6856 6857 /* ... fall through ... */ 6858 6859 case INTEGER_TYPE: 6860 case REAL_TYPE: 6861 case BOOLEAN_TYPE: 6862 if (TYPE_PRECISION (a->type) != TYPE_PRECISION (b->type)) 6863 return false; 6864 return ((TYPE_MAX_VALUE (a->type) == TYPE_MAX_VALUE (b->type) 6865 || tree_int_cst_equal (TYPE_MAX_VALUE (a->type), 6866 TYPE_MAX_VALUE (b->type))) 6867 && (TYPE_MIN_VALUE (a->type) == TYPE_MIN_VALUE (b->type) 6868 || tree_int_cst_equal (TYPE_MIN_VALUE (a->type), 6869 TYPE_MIN_VALUE (b->type)))); 6870 6871 case FIXED_POINT_TYPE: 6872 return TYPE_SATURATING (a->type) == TYPE_SATURATING (b->type); 6873 6874 case OFFSET_TYPE: 6875 return TYPE_OFFSET_BASETYPE (a->type) == TYPE_OFFSET_BASETYPE (b->type); 6876 6877 case METHOD_TYPE: 6878 if (TYPE_METHOD_BASETYPE (a->type) == TYPE_METHOD_BASETYPE (b->type) 6879 && (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type) 6880 || (TYPE_ARG_TYPES (a->type) 6881 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST 6882 && TYPE_ARG_TYPES (b->type) 6883 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST 6884 && type_list_equal (TYPE_ARG_TYPES (a->type), 6885 TYPE_ARG_TYPES (b->type))))) 6886 break; 6887 return 0; 6888 case ARRAY_TYPE: 6889 return TYPE_DOMAIN (a->type) == TYPE_DOMAIN (b->type); 6890 6891 case RECORD_TYPE: 6892 case UNION_TYPE: 6893 case QUAL_UNION_TYPE: 6894 return (TYPE_FIELDS (a->type) == TYPE_FIELDS (b->type) 6895 || (TYPE_FIELDS (a->type) 6896 && TREE_CODE (TYPE_FIELDS (a->type)) == TREE_LIST 6897 && TYPE_FIELDS (b->type) 6898 && TREE_CODE (TYPE_FIELDS (b->type)) == TREE_LIST 6899 && type_list_equal (TYPE_FIELDS (a->type), 6900 TYPE_FIELDS (b->type)))); 6901 6902 case FUNCTION_TYPE: 6903 if (TYPE_ARG_TYPES (a->type) == TYPE_ARG_TYPES (b->type) 6904 || (TYPE_ARG_TYPES (a->type) 6905 && TREE_CODE (TYPE_ARG_TYPES (a->type)) == TREE_LIST 6906 && TYPE_ARG_TYPES (b->type) 6907 && TREE_CODE (TYPE_ARG_TYPES (b->type)) == TREE_LIST 6908 && type_list_equal (TYPE_ARG_TYPES (a->type), 6909 TYPE_ARG_TYPES (b->type)))) 6910 break; 6911 return 0; 6912 6913 default: 6914 return 0; 6915 } 6916 6917 if (lang_hooks.types.type_hash_eq != NULL) 6918 return lang_hooks.types.type_hash_eq (a->type, b->type); 6919 6920 return 1; 6921} 6922 6923/* Given TYPE, and HASHCODE its hash code, return the canonical 6924 object for an identical type if one already exists. 6925 Otherwise, return TYPE, and record it as the canonical object. 6926 6927 To use this function, first create a type of the sort you want. 6928 Then compute its hash code from the fields of the type that 6929 make it different from other similar types. 6930 Then call this function and use the value. */ 6931 6932tree 6933type_hash_canon (unsigned int hashcode, tree type) 6934{ 6935 type_hash in; 6936 type_hash **loc; 6937 6938 /* The hash table only contains main variants, so ensure that's what we're 6939 being passed. */ 6940 gcc_assert (TYPE_MAIN_VARIANT (type) == type); 6941 6942 /* The TYPE_ALIGN field of a type is set by layout_type(), so we 6943 must call that routine before comparing TYPE_ALIGNs. */ 6944 layout_type (type); 6945 6946 in.hash = hashcode; 6947 in.type = type; 6948 6949 loc = type_hash_table->find_slot_with_hash (&in, hashcode, INSERT); 6950 if (*loc) 6951 { 6952 tree t1 = ((type_hash *) *loc)->type; 6953 gcc_assert (TYPE_MAIN_VARIANT (t1) == t1); 6954 if (GATHER_STATISTICS) 6955 { 6956 tree_code_counts[(int) TREE_CODE (type)]--; 6957 tree_node_counts[(int) t_kind]--; 6958 tree_node_sizes[(int) t_kind] -= sizeof (struct tree_type_non_common); 6959 } 6960 return t1; 6961 } 6962 else 6963 { 6964 struct type_hash *h; 6965 6966 h = ggc_alloc<type_hash> (); 6967 h->hash = hashcode; 6968 h->type = type; 6969 *loc = h; 6970 6971 return type; 6972 } 6973} 6974 6975static void 6976print_type_hash_statistics (void) 6977{ 6978 fprintf (stderr, "Type hash: size %ld, %ld elements, %f collisions\n", 6979 (long) type_hash_table->size (), 6980 (long) type_hash_table->elements (), 6981 type_hash_table->collisions ()); 6982} 6983 6984/* Compute a hash code for a list of attributes (chain of TREE_LIST nodes 6985 with names in the TREE_PURPOSE slots and args in the TREE_VALUE slots), 6986 by adding the hash codes of the individual attributes. */ 6987 6988static void 6989attribute_hash_list (const_tree list, inchash::hash &hstate) 6990{ 6991 const_tree tail; 6992 6993 for (tail = list; tail; tail = TREE_CHAIN (tail)) 6994 /* ??? Do we want to add in TREE_VALUE too? */ 6995 hstate.add_object (IDENTIFIER_HASH_VALUE (get_attribute_name (tail))); 6996} 6997 6998/* Given two lists of attributes, return true if list l2 is 6999 equivalent to l1. */ 7000 7001int 7002attribute_list_equal (const_tree l1, const_tree l2) 7003{ 7004 if (l1 == l2) 7005 return 1; 7006 7007 return attribute_list_contained (l1, l2) 7008 && attribute_list_contained (l2, l1); 7009} 7010 7011/* Given two lists of attributes, return true if list L2 is 7012 completely contained within L1. */ 7013/* ??? This would be faster if attribute names were stored in a canonicalized 7014 form. Otherwise, if L1 uses `foo' and L2 uses `__foo__', the long method 7015 must be used to show these elements are equivalent (which they are). */ 7016/* ??? It's not clear that attributes with arguments will always be handled 7017 correctly. */ 7018 7019int 7020attribute_list_contained (const_tree l1, const_tree l2) 7021{ 7022 const_tree t1, t2; 7023 7024 /* First check the obvious, maybe the lists are identical. */ 7025 if (l1 == l2) 7026 return 1; 7027 7028 /* Maybe the lists are similar. */ 7029 for (t1 = l1, t2 = l2; 7030 t1 != 0 && t2 != 0 7031 && get_attribute_name (t1) == get_attribute_name (t2) 7032 && TREE_VALUE (t1) == TREE_VALUE (t2); 7033 t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2)) 7034 ; 7035 7036 /* Maybe the lists are equal. */ 7037 if (t1 == 0 && t2 == 0) 7038 return 1; 7039 7040 for (; t2 != 0; t2 = TREE_CHAIN (t2)) 7041 { 7042 const_tree attr; 7043 /* This CONST_CAST is okay because lookup_attribute does not 7044 modify its argument and the return value is assigned to a 7045 const_tree. */ 7046 for (attr = lookup_ident_attribute (get_attribute_name (t2), 7047 CONST_CAST_TREE (l1)); 7048 attr != NULL_TREE && !attribute_value_equal (t2, attr); 7049 attr = lookup_ident_attribute (get_attribute_name (t2), 7050 TREE_CHAIN (attr))) 7051 ; 7052 7053 if (attr == NULL_TREE) 7054 return 0; 7055 } 7056 7057 return 1; 7058} 7059 7060/* Given two lists of types 7061 (chains of TREE_LIST nodes with types in the TREE_VALUE slots) 7062 return 1 if the lists contain the same types in the same order. 7063 Also, the TREE_PURPOSEs must match. */ 7064 7065int 7066type_list_equal (const_tree l1, const_tree l2) 7067{ 7068 const_tree t1, t2; 7069 7070 for (t1 = l1, t2 = l2; t1 && t2; t1 = TREE_CHAIN (t1), t2 = TREE_CHAIN (t2)) 7071 if (TREE_VALUE (t1) != TREE_VALUE (t2) 7072 || (TREE_PURPOSE (t1) != TREE_PURPOSE (t2) 7073 && ! (1 == simple_cst_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2)) 7074 && (TREE_TYPE (TREE_PURPOSE (t1)) 7075 == TREE_TYPE (TREE_PURPOSE (t2)))))) 7076 return 0; 7077 7078 return t1 == t2; 7079} 7080 7081/* Returns the number of arguments to the FUNCTION_TYPE or METHOD_TYPE 7082 given by TYPE. If the argument list accepts variable arguments, 7083 then this function counts only the ordinary arguments. */ 7084 7085int 7086type_num_arguments (const_tree type) 7087{ 7088 int i = 0; 7089 tree t; 7090 7091 for (t = TYPE_ARG_TYPES (type); t; t = TREE_CHAIN (t)) 7092 /* If the function does not take a variable number of arguments, 7093 the last element in the list will have type `void'. */ 7094 if (VOID_TYPE_P (TREE_VALUE (t))) 7095 break; 7096 else 7097 ++i; 7098 7099 return i; 7100} 7101 7102/* Nonzero if integer constants T1 and T2 7103 represent the same constant value. */ 7104 7105int 7106tree_int_cst_equal (const_tree t1, const_tree t2) 7107{ 7108 if (t1 == t2) 7109 return 1; 7110 7111 if (t1 == 0 || t2 == 0) 7112 return 0; 7113 7114 if (TREE_CODE (t1) == INTEGER_CST 7115 && TREE_CODE (t2) == INTEGER_CST 7116 && wi::to_widest (t1) == wi::to_widest (t2)) 7117 return 1; 7118 7119 return 0; 7120} 7121 7122/* Return true if T is an INTEGER_CST whose numerical value (extended 7123 according to TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. */ 7124 7125bool 7126tree_fits_shwi_p (const_tree t) 7127{ 7128 return (t != NULL_TREE 7129 && TREE_CODE (t) == INTEGER_CST 7130 && wi::fits_shwi_p (wi::to_widest (t))); 7131} 7132 7133/* Return true if T is an INTEGER_CST whose numerical value (extended 7134 according to TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. */ 7135 7136bool 7137tree_fits_uhwi_p (const_tree t) 7138{ 7139 return (t != NULL_TREE 7140 && TREE_CODE (t) == INTEGER_CST 7141 && wi::fits_uhwi_p (wi::to_widest (t))); 7142} 7143 7144/* T is an INTEGER_CST whose numerical value (extended according to 7145 TYPE_UNSIGNED) fits in a signed HOST_WIDE_INT. Return that 7146 HOST_WIDE_INT. */ 7147 7148HOST_WIDE_INT 7149tree_to_shwi (const_tree t) 7150{ 7151 gcc_assert (tree_fits_shwi_p (t)); 7152 return TREE_INT_CST_LOW (t); 7153} 7154 7155/* T is an INTEGER_CST whose numerical value (extended according to 7156 TYPE_UNSIGNED) fits in an unsigned HOST_WIDE_INT. Return that 7157 HOST_WIDE_INT. */ 7158 7159unsigned HOST_WIDE_INT 7160tree_to_uhwi (const_tree t) 7161{ 7162 gcc_assert (tree_fits_uhwi_p (t)); 7163 return TREE_INT_CST_LOW (t); 7164} 7165 7166/* Return the most significant (sign) bit of T. */ 7167 7168int 7169tree_int_cst_sign_bit (const_tree t) 7170{ 7171 unsigned bitno = TYPE_PRECISION (TREE_TYPE (t)) - 1; 7172 7173 return wi::extract_uhwi (t, bitno, 1); 7174} 7175 7176/* Return an indication of the sign of the integer constant T. 7177 The return value is -1 if T < 0, 0 if T == 0, and 1 if T > 0. 7178 Note that -1 will never be returned if T's type is unsigned. */ 7179 7180int 7181tree_int_cst_sgn (const_tree t) 7182{ 7183 if (wi::eq_p (t, 0)) 7184 return 0; 7185 else if (TYPE_UNSIGNED (TREE_TYPE (t))) 7186 return 1; 7187 else if (wi::neg_p (t)) 7188 return -1; 7189 else 7190 return 1; 7191} 7192 7193/* Return the minimum number of bits needed to represent VALUE in a 7194 signed or unsigned type, UNSIGNEDP says which. */ 7195 7196unsigned int 7197tree_int_cst_min_precision (tree value, signop sgn) 7198{ 7199 /* If the value is negative, compute its negative minus 1. The latter 7200 adjustment is because the absolute value of the largest negative value 7201 is one larger than the largest positive value. This is equivalent to 7202 a bit-wise negation, so use that operation instead. */ 7203 7204 if (tree_int_cst_sgn (value) < 0) 7205 value = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (value), value); 7206 7207 /* Return the number of bits needed, taking into account the fact 7208 that we need one more bit for a signed than unsigned type. 7209 If value is 0 or -1, the minimum precision is 1 no matter 7210 whether unsignedp is true or false. */ 7211 7212 if (integer_zerop (value)) 7213 return 1; 7214 else 7215 return tree_floor_log2 (value) + 1 + (sgn == SIGNED ? 1 : 0) ; 7216} 7217 7218/* Return truthvalue of whether T1 is the same tree structure as T2. 7219 Return 1 if they are the same. 7220 Return 0 if they are understandably different. 7221 Return -1 if either contains tree structure not understood by 7222 this function. */ 7223 7224int 7225simple_cst_equal (const_tree t1, const_tree t2) 7226{ 7227 enum tree_code code1, code2; 7228 int cmp; 7229 int i; 7230 7231 if (t1 == t2) 7232 return 1; 7233 if (t1 == 0 || t2 == 0) 7234 return 0; 7235 7236 code1 = TREE_CODE (t1); 7237 code2 = TREE_CODE (t2); 7238 7239 if (CONVERT_EXPR_CODE_P (code1) || code1 == NON_LVALUE_EXPR) 7240 { 7241 if (CONVERT_EXPR_CODE_P (code2) 7242 || code2 == NON_LVALUE_EXPR) 7243 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); 7244 else 7245 return simple_cst_equal (TREE_OPERAND (t1, 0), t2); 7246 } 7247 7248 else if (CONVERT_EXPR_CODE_P (code2) 7249 || code2 == NON_LVALUE_EXPR) 7250 return simple_cst_equal (t1, TREE_OPERAND (t2, 0)); 7251 7252 if (code1 != code2) 7253 return 0; 7254 7255 switch (code1) 7256 { 7257 case INTEGER_CST: 7258 return wi::to_widest (t1) == wi::to_widest (t2); 7259 7260 case REAL_CST: 7261 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2)); 7262 7263 case FIXED_CST: 7264 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (t1), TREE_FIXED_CST (t2)); 7265 7266 case STRING_CST: 7267 return (TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2) 7268 && ! memcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2), 7269 TREE_STRING_LENGTH (t1))); 7270 7271 case CONSTRUCTOR: 7272 { 7273 unsigned HOST_WIDE_INT idx; 7274 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (t1); 7275 vec<constructor_elt, va_gc> *v2 = CONSTRUCTOR_ELTS (t2); 7276 7277 if (vec_safe_length (v1) != vec_safe_length (v2)) 7278 return false; 7279 7280 for (idx = 0; idx < vec_safe_length (v1); ++idx) 7281 /* ??? Should we handle also fields here? */ 7282 if (!simple_cst_equal ((*v1)[idx].value, (*v2)[idx].value)) 7283 return false; 7284 return true; 7285 } 7286 7287 case SAVE_EXPR: 7288 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); 7289 7290 case CALL_EXPR: 7291 cmp = simple_cst_equal (CALL_EXPR_FN (t1), CALL_EXPR_FN (t2)); 7292 if (cmp <= 0) 7293 return cmp; 7294 if (call_expr_nargs (t1) != call_expr_nargs (t2)) 7295 return 0; 7296 { 7297 const_tree arg1, arg2; 7298 const_call_expr_arg_iterator iter1, iter2; 7299 for (arg1 = first_const_call_expr_arg (t1, &iter1), 7300 arg2 = first_const_call_expr_arg (t2, &iter2); 7301 arg1 && arg2; 7302 arg1 = next_const_call_expr_arg (&iter1), 7303 arg2 = next_const_call_expr_arg (&iter2)) 7304 { 7305 cmp = simple_cst_equal (arg1, arg2); 7306 if (cmp <= 0) 7307 return cmp; 7308 } 7309 return arg1 == arg2; 7310 } 7311 7312 case TARGET_EXPR: 7313 /* Special case: if either target is an unallocated VAR_DECL, 7314 it means that it's going to be unified with whatever the 7315 TARGET_EXPR is really supposed to initialize, so treat it 7316 as being equivalent to anything. */ 7317 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL 7318 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE 7319 && !DECL_RTL_SET_P (TREE_OPERAND (t1, 0))) 7320 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL 7321 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE 7322 && !DECL_RTL_SET_P (TREE_OPERAND (t2, 0)))) 7323 cmp = 1; 7324 else 7325 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); 7326 7327 if (cmp <= 0) 7328 return cmp; 7329 7330 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1)); 7331 7332 case WITH_CLEANUP_EXPR: 7333 cmp = simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); 7334 if (cmp <= 0) 7335 return cmp; 7336 7337 return simple_cst_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t1, 1)); 7338 7339 case COMPONENT_REF: 7340 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1)) 7341 return simple_cst_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0)); 7342 7343 return 0; 7344 7345 case VAR_DECL: 7346 case PARM_DECL: 7347 case CONST_DECL: 7348 case FUNCTION_DECL: 7349 return 0; 7350 7351 default: 7352 break; 7353 } 7354 7355 /* This general rule works for most tree codes. All exceptions should be 7356 handled above. If this is a language-specific tree code, we can't 7357 trust what might be in the operand, so say we don't know 7358 the situation. */ 7359 if ((int) code1 >= (int) LAST_AND_UNUSED_TREE_CODE) 7360 return -1; 7361 7362 switch (TREE_CODE_CLASS (code1)) 7363 { 7364 case tcc_unary: 7365 case tcc_binary: 7366 case tcc_comparison: 7367 case tcc_expression: 7368 case tcc_reference: 7369 case tcc_statement: 7370 cmp = 1; 7371 for (i = 0; i < TREE_CODE_LENGTH (code1); i++) 7372 { 7373 cmp = simple_cst_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i)); 7374 if (cmp <= 0) 7375 return cmp; 7376 } 7377 7378 return cmp; 7379 7380 default: 7381 return -1; 7382 } 7383} 7384 7385/* Compare the value of T, an INTEGER_CST, with U, an unsigned integer value. 7386 Return -1, 0, or 1 if the value of T is less than, equal to, or greater 7387 than U, respectively. */ 7388 7389int 7390compare_tree_int (const_tree t, unsigned HOST_WIDE_INT u) 7391{ 7392 if (tree_int_cst_sgn (t) < 0) 7393 return -1; 7394 else if (!tree_fits_uhwi_p (t)) 7395 return 1; 7396 else if (TREE_INT_CST_LOW (t) == u) 7397 return 0; 7398 else if (TREE_INT_CST_LOW (t) < u) 7399 return -1; 7400 else 7401 return 1; 7402} 7403 7404/* Return true if SIZE represents a constant size that is in bounds of 7405 what the middle-end and the backend accepts (covering not more than 7406 half of the address-space). */ 7407 7408bool 7409valid_constant_size_p (const_tree size) 7410{ 7411 if (! tree_fits_uhwi_p (size) 7412 || TREE_OVERFLOW (size) 7413 || tree_int_cst_sign_bit (size) != 0) 7414 return false; 7415 return true; 7416} 7417 7418/* Return the precision of the type, or for a complex or vector type the 7419 precision of the type of its elements. */ 7420 7421unsigned int 7422element_precision (const_tree type) 7423{ 7424 enum tree_code code = TREE_CODE (type); 7425 if (code == COMPLEX_TYPE || code == VECTOR_TYPE) 7426 type = TREE_TYPE (type); 7427 7428 return TYPE_PRECISION (type); 7429} 7430 7431/* Return true if CODE represents an associative tree code. Otherwise 7432 return false. */ 7433bool 7434associative_tree_code (enum tree_code code) 7435{ 7436 switch (code) 7437 { 7438 case BIT_IOR_EXPR: 7439 case BIT_AND_EXPR: 7440 case BIT_XOR_EXPR: 7441 case PLUS_EXPR: 7442 case MULT_EXPR: 7443 case MIN_EXPR: 7444 case MAX_EXPR: 7445 return true; 7446 7447 default: 7448 break; 7449 } 7450 return false; 7451} 7452 7453/* Return true if CODE represents a commutative tree code. Otherwise 7454 return false. */ 7455bool 7456commutative_tree_code (enum tree_code code) 7457{ 7458 switch (code) 7459 { 7460 case PLUS_EXPR: 7461 case MULT_EXPR: 7462 case MULT_HIGHPART_EXPR: 7463 case MIN_EXPR: 7464 case MAX_EXPR: 7465 case BIT_IOR_EXPR: 7466 case BIT_XOR_EXPR: 7467 case BIT_AND_EXPR: 7468 case NE_EXPR: 7469 case EQ_EXPR: 7470 case UNORDERED_EXPR: 7471 case ORDERED_EXPR: 7472 case UNEQ_EXPR: 7473 case LTGT_EXPR: 7474 case TRUTH_AND_EXPR: 7475 case TRUTH_XOR_EXPR: 7476 case TRUTH_OR_EXPR: 7477 case WIDEN_MULT_EXPR: 7478 case VEC_WIDEN_MULT_HI_EXPR: 7479 case VEC_WIDEN_MULT_LO_EXPR: 7480 case VEC_WIDEN_MULT_EVEN_EXPR: 7481 case VEC_WIDEN_MULT_ODD_EXPR: 7482 return true; 7483 7484 default: 7485 break; 7486 } 7487 return false; 7488} 7489 7490/* Return true if CODE represents a ternary tree code for which the 7491 first two operands are commutative. Otherwise return false. */ 7492bool 7493commutative_ternary_tree_code (enum tree_code code) 7494{ 7495 switch (code) 7496 { 7497 case WIDEN_MULT_PLUS_EXPR: 7498 case WIDEN_MULT_MINUS_EXPR: 7499 case DOT_PROD_EXPR: 7500 case FMA_EXPR: 7501 return true; 7502 7503 default: 7504 break; 7505 } 7506 return false; 7507} 7508 7509namespace inchash 7510{ 7511 7512/* Generate a hash value for an expression. This can be used iteratively 7513 by passing a previous result as the HSTATE argument. 7514 7515 This function is intended to produce the same hash for expressions which 7516 would compare equal using operand_equal_p. */ 7517void 7518add_expr (const_tree t, inchash::hash &hstate) 7519{ 7520 int i; 7521 enum tree_code code; 7522 enum tree_code_class tclass; 7523 7524 if (t == NULL_TREE) 7525 { 7526 hstate.merge_hash (0); 7527 return; 7528 } 7529 7530 code = TREE_CODE (t); 7531 7532 switch (code) 7533 { 7534 /* Alas, constants aren't shared, so we can't rely on pointer 7535 identity. */ 7536 case VOID_CST: 7537 hstate.merge_hash (0); 7538 return; 7539 case INTEGER_CST: 7540 for (i = 0; i < TREE_INT_CST_NUNITS (t); i++) 7541 hstate.add_wide_int (TREE_INT_CST_ELT (t, i)); 7542 return; 7543 case REAL_CST: 7544 { 7545 unsigned int val2 = real_hash (TREE_REAL_CST_PTR (t)); 7546 hstate.merge_hash (val2); 7547 return; 7548 } 7549 case FIXED_CST: 7550 { 7551 unsigned int val2 = fixed_hash (TREE_FIXED_CST_PTR (t)); 7552 hstate.merge_hash (val2); 7553 return; 7554 } 7555 case STRING_CST: 7556 hstate.add ((const void *) TREE_STRING_POINTER (t), TREE_STRING_LENGTH (t)); 7557 return; 7558 case COMPLEX_CST: 7559 inchash::add_expr (TREE_REALPART (t), hstate); 7560 inchash::add_expr (TREE_IMAGPART (t), hstate); 7561 return; 7562 case VECTOR_CST: 7563 { 7564 unsigned i; 7565 for (i = 0; i < VECTOR_CST_NELTS (t); ++i) 7566 inchash::add_expr (VECTOR_CST_ELT (t, i), hstate); 7567 return; 7568 } 7569 case SSA_NAME: 7570 /* We can just compare by pointer. */ 7571 hstate.add_wide_int (SSA_NAME_VERSION (t)); 7572 return; 7573 case PLACEHOLDER_EXPR: 7574 /* The node itself doesn't matter. */ 7575 return; 7576 case TREE_LIST: 7577 /* A list of expressions, for a CALL_EXPR or as the elements of a 7578 VECTOR_CST. */ 7579 for (; t; t = TREE_CHAIN (t)) 7580 inchash::add_expr (TREE_VALUE (t), hstate); 7581 return; 7582 case CONSTRUCTOR: 7583 { 7584 unsigned HOST_WIDE_INT idx; 7585 tree field, value; 7586 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t), idx, field, value) 7587 { 7588 inchash::add_expr (field, hstate); 7589 inchash::add_expr (value, hstate); 7590 } 7591 return; 7592 } 7593 case FUNCTION_DECL: 7594 /* When referring to a built-in FUNCTION_DECL, use the __builtin__ form. 7595 Otherwise nodes that compare equal according to operand_equal_p might 7596 get different hash codes. However, don't do this for machine specific 7597 or front end builtins, since the function code is overloaded in those 7598 cases. */ 7599 if (DECL_BUILT_IN_CLASS (t) == BUILT_IN_NORMAL 7600 && builtin_decl_explicit_p (DECL_FUNCTION_CODE (t))) 7601 { 7602 t = builtin_decl_explicit (DECL_FUNCTION_CODE (t)); 7603 code = TREE_CODE (t); 7604 } 7605 /* FALL THROUGH */ 7606 default: 7607 tclass = TREE_CODE_CLASS (code); 7608 7609 if (tclass == tcc_declaration) 7610 { 7611 /* DECL's have a unique ID */ 7612 hstate.add_wide_int (DECL_UID (t)); 7613 } 7614 else 7615 { 7616 gcc_assert (IS_EXPR_CODE_CLASS (tclass)); 7617 7618 hstate.add_object (code); 7619 7620 /* Don't hash the type, that can lead to having nodes which 7621 compare equal according to operand_equal_p, but which 7622 have different hash codes. */ 7623 if (CONVERT_EXPR_CODE_P (code) 7624 || code == NON_LVALUE_EXPR) 7625 { 7626 /* Make sure to include signness in the hash computation. */ 7627 hstate.add_int (TYPE_UNSIGNED (TREE_TYPE (t))); 7628 inchash::add_expr (TREE_OPERAND (t, 0), hstate); 7629 } 7630 7631 else if (commutative_tree_code (code)) 7632 { 7633 /* It's a commutative expression. We want to hash it the same 7634 however it appears. We do this by first hashing both operands 7635 and then rehashing based on the order of their independent 7636 hashes. */ 7637 inchash::hash one, two; 7638 inchash::add_expr (TREE_OPERAND (t, 0), one); 7639 inchash::add_expr (TREE_OPERAND (t, 1), two); 7640 hstate.add_commutative (one, two); 7641 } 7642 else 7643 for (i = TREE_OPERAND_LENGTH (t) - 1; i >= 0; --i) 7644 inchash::add_expr (TREE_OPERAND (t, i), hstate); 7645 } 7646 return; 7647 } 7648} 7649 7650} 7651 7652/* Constructors for pointer, array and function types. 7653 (RECORD_TYPE, UNION_TYPE and ENUMERAL_TYPE nodes are 7654 constructed by language-dependent code, not here.) */ 7655 7656/* Construct, lay out and return the type of pointers to TO_TYPE with 7657 mode MODE. If CAN_ALIAS_ALL is TRUE, indicate this type can 7658 reference all of memory. If such a type has already been 7659 constructed, reuse it. */ 7660 7661tree 7662build_pointer_type_for_mode (tree to_type, machine_mode mode, 7663 bool can_alias_all) 7664{ 7665 tree t; 7666 7667 if (to_type == error_mark_node) 7668 return error_mark_node; 7669 7670 /* If the pointed-to type has the may_alias attribute set, force 7671 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */ 7672 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type))) 7673 can_alias_all = true; 7674 7675 /* In some cases, languages will have things that aren't a POINTER_TYPE 7676 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_POINTER_TO. 7677 In that case, return that type without regard to the rest of our 7678 operands. 7679 7680 ??? This is a kludge, but consistent with the way this function has 7681 always operated and there doesn't seem to be a good way to avoid this 7682 at the moment. */ 7683 if (TYPE_POINTER_TO (to_type) != 0 7684 && TREE_CODE (TYPE_POINTER_TO (to_type)) != POINTER_TYPE) 7685 return TYPE_POINTER_TO (to_type); 7686 7687 /* First, if we already have a type for pointers to TO_TYPE and it's 7688 the proper mode, use it. */ 7689 for (t = TYPE_POINTER_TO (to_type); t; t = TYPE_NEXT_PTR_TO (t)) 7690 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all) 7691 return t; 7692 7693 t = make_node (POINTER_TYPE); 7694 7695 TREE_TYPE (t) = to_type; 7696 SET_TYPE_MODE (t, mode); 7697 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all; 7698 TYPE_NEXT_PTR_TO (t) = TYPE_POINTER_TO (to_type); 7699 TYPE_POINTER_TO (to_type) = t; 7700 7701 if (TYPE_STRUCTURAL_EQUALITY_P (to_type)) 7702 SET_TYPE_STRUCTURAL_EQUALITY (t); 7703 else if (TYPE_CANONICAL (to_type) != to_type) 7704 TYPE_CANONICAL (t) 7705 = build_pointer_type_for_mode (TYPE_CANONICAL (to_type), 7706 mode, can_alias_all); 7707 7708 /* Lay out the type. This function has many callers that are concerned 7709 with expression-construction, and this simplifies them all. */ 7710 layout_type (t); 7711 7712 return t; 7713} 7714 7715/* By default build pointers in ptr_mode. */ 7716 7717tree 7718build_pointer_type (tree to_type) 7719{ 7720 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC 7721 : TYPE_ADDR_SPACE (to_type); 7722 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as); 7723 return build_pointer_type_for_mode (to_type, pointer_mode, false); 7724} 7725 7726/* Same as build_pointer_type_for_mode, but for REFERENCE_TYPE. */ 7727 7728tree 7729build_reference_type_for_mode (tree to_type, machine_mode mode, 7730 bool can_alias_all) 7731{ 7732 tree t; 7733 7734 if (to_type == error_mark_node) 7735 return error_mark_node; 7736 7737 /* If the pointed-to type has the may_alias attribute set, force 7738 a TYPE_REF_CAN_ALIAS_ALL pointer to be generated. */ 7739 if (lookup_attribute ("may_alias", TYPE_ATTRIBUTES (to_type))) 7740 can_alias_all = true; 7741 7742 /* In some cases, languages will have things that aren't a REFERENCE_TYPE 7743 (such as a RECORD_TYPE for fat pointers in Ada) as TYPE_REFERENCE_TO. 7744 In that case, return that type without regard to the rest of our 7745 operands. 7746 7747 ??? This is a kludge, but consistent with the way this function has 7748 always operated and there doesn't seem to be a good way to avoid this 7749 at the moment. */ 7750 if (TYPE_REFERENCE_TO (to_type) != 0 7751 && TREE_CODE (TYPE_REFERENCE_TO (to_type)) != REFERENCE_TYPE) 7752 return TYPE_REFERENCE_TO (to_type); 7753 7754 /* First, if we already have a type for pointers to TO_TYPE and it's 7755 the proper mode, use it. */ 7756 for (t = TYPE_REFERENCE_TO (to_type); t; t = TYPE_NEXT_REF_TO (t)) 7757 if (TYPE_MODE (t) == mode && TYPE_REF_CAN_ALIAS_ALL (t) == can_alias_all) 7758 return t; 7759 7760 t = make_node (REFERENCE_TYPE); 7761 7762 TREE_TYPE (t) = to_type; 7763 SET_TYPE_MODE (t, mode); 7764 TYPE_REF_CAN_ALIAS_ALL (t) = can_alias_all; 7765 TYPE_NEXT_REF_TO (t) = TYPE_REFERENCE_TO (to_type); 7766 TYPE_REFERENCE_TO (to_type) = t; 7767 7768 if (TYPE_STRUCTURAL_EQUALITY_P (to_type)) 7769 SET_TYPE_STRUCTURAL_EQUALITY (t); 7770 else if (TYPE_CANONICAL (to_type) != to_type) 7771 TYPE_CANONICAL (t) 7772 = build_reference_type_for_mode (TYPE_CANONICAL (to_type), 7773 mode, can_alias_all); 7774 7775 layout_type (t); 7776 7777 return t; 7778} 7779 7780 7781/* Build the node for the type of references-to-TO_TYPE by default 7782 in ptr_mode. */ 7783 7784tree 7785build_reference_type (tree to_type) 7786{ 7787 addr_space_t as = to_type == error_mark_node? ADDR_SPACE_GENERIC 7788 : TYPE_ADDR_SPACE (to_type); 7789 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as); 7790 return build_reference_type_for_mode (to_type, pointer_mode, false); 7791} 7792 7793#define MAX_INT_CACHED_PREC \ 7794 (HOST_BITS_PER_WIDE_INT > 64 ? HOST_BITS_PER_WIDE_INT : 64) 7795static GTY(()) tree nonstandard_integer_type_cache[2 * MAX_INT_CACHED_PREC + 2]; 7796 7797/* Builds a signed or unsigned integer type of precision PRECISION. 7798 Used for C bitfields whose precision does not match that of 7799 built-in target types. */ 7800tree 7801build_nonstandard_integer_type (unsigned HOST_WIDE_INT precision, 7802 int unsignedp) 7803{ 7804 tree itype, ret; 7805 7806 if (unsignedp) 7807 unsignedp = MAX_INT_CACHED_PREC + 1; 7808 7809 if (precision <= MAX_INT_CACHED_PREC) 7810 { 7811 itype = nonstandard_integer_type_cache[precision + unsignedp]; 7812 if (itype) 7813 return itype; 7814 } 7815 7816 itype = make_node (INTEGER_TYPE); 7817 TYPE_PRECISION (itype) = precision; 7818 7819 if (unsignedp) 7820 fixup_unsigned_type (itype); 7821 else 7822 fixup_signed_type (itype); 7823 7824 ret = itype; 7825 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (itype))) 7826 ret = type_hash_canon (tree_to_uhwi (TYPE_MAX_VALUE (itype)), itype); 7827 if (precision <= MAX_INT_CACHED_PREC) 7828 nonstandard_integer_type_cache[precision + unsignedp] = ret; 7829 7830 return ret; 7831} 7832 7833/* Create a range of some discrete type TYPE (an INTEGER_TYPE, ENUMERAL_TYPE 7834 or BOOLEAN_TYPE) with low bound LOWVAL and high bound HIGHVAL. If SHARED 7835 is true, reuse such a type that has already been constructed. */ 7836 7837static tree 7838build_range_type_1 (tree type, tree lowval, tree highval, bool shared) 7839{ 7840 tree itype = make_node (INTEGER_TYPE); 7841 inchash::hash hstate; 7842 7843 TREE_TYPE (itype) = type; 7844 7845 TYPE_MIN_VALUE (itype) = fold_convert (type, lowval); 7846 TYPE_MAX_VALUE (itype) = highval ? fold_convert (type, highval) : NULL; 7847 7848 TYPE_PRECISION (itype) = TYPE_PRECISION (type); 7849 SET_TYPE_MODE (itype, TYPE_MODE (type)); 7850 TYPE_SIZE (itype) = TYPE_SIZE (type); 7851 TYPE_SIZE_UNIT (itype) = TYPE_SIZE_UNIT (type); 7852 TYPE_ALIGN (itype) = TYPE_ALIGN (type); 7853 TYPE_USER_ALIGN (itype) = TYPE_USER_ALIGN (type); 7854 7855 if (!shared) 7856 return itype; 7857 7858 if ((TYPE_MIN_VALUE (itype) 7859 && TREE_CODE (TYPE_MIN_VALUE (itype)) != INTEGER_CST) 7860 || (TYPE_MAX_VALUE (itype) 7861 && TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)) 7862 { 7863 /* Since we cannot reliably merge this type, we need to compare it using 7864 structural equality checks. */ 7865 SET_TYPE_STRUCTURAL_EQUALITY (itype); 7866 return itype; 7867 } 7868 7869 inchash::add_expr (TYPE_MIN_VALUE (itype), hstate); 7870 inchash::add_expr (TYPE_MAX_VALUE (itype), hstate); 7871 hstate.merge_hash (TYPE_HASH (type)); 7872 itype = type_hash_canon (hstate.end (), itype); 7873 7874 return itype; 7875} 7876 7877/* Wrapper around build_range_type_1 with SHARED set to true. */ 7878 7879tree 7880build_range_type (tree type, tree lowval, tree highval) 7881{ 7882 return build_range_type_1 (type, lowval, highval, true); 7883} 7884 7885/* Wrapper around build_range_type_1 with SHARED set to false. */ 7886 7887tree 7888build_nonshared_range_type (tree type, tree lowval, tree highval) 7889{ 7890 return build_range_type_1 (type, lowval, highval, false); 7891} 7892 7893/* Create a type of integers to be the TYPE_DOMAIN of an ARRAY_TYPE. 7894 MAXVAL should be the maximum value in the domain 7895 (one less than the length of the array). 7896 7897 The maximum value that MAXVAL can have is INT_MAX for a HOST_WIDE_INT. 7898 We don't enforce this limit, that is up to caller (e.g. language front end). 7899 The limit exists because the result is a signed type and we don't handle 7900 sizes that use more than one HOST_WIDE_INT. */ 7901 7902tree 7903build_index_type (tree maxval) 7904{ 7905 return build_range_type (sizetype, size_zero_node, maxval); 7906} 7907 7908/* Return true if the debug information for TYPE, a subtype, should be emitted 7909 as a subrange type. If so, set LOWVAL to the low bound and HIGHVAL to the 7910 high bound, respectively. Sometimes doing so unnecessarily obfuscates the 7911 debug info and doesn't reflect the source code. */ 7912 7913bool 7914subrange_type_for_debug_p (const_tree type, tree *lowval, tree *highval) 7915{ 7916 tree base_type = TREE_TYPE (type), low, high; 7917 7918 /* Subrange types have a base type which is an integral type. */ 7919 if (!INTEGRAL_TYPE_P (base_type)) 7920 return false; 7921 7922 /* Get the real bounds of the subtype. */ 7923 if (lang_hooks.types.get_subrange_bounds) 7924 lang_hooks.types.get_subrange_bounds (type, &low, &high); 7925 else 7926 { 7927 low = TYPE_MIN_VALUE (type); 7928 high = TYPE_MAX_VALUE (type); 7929 } 7930 7931 /* If the type and its base type have the same representation and the same 7932 name, then the type is not a subrange but a copy of the base type. */ 7933 if ((TREE_CODE (base_type) == INTEGER_TYPE 7934 || TREE_CODE (base_type) == BOOLEAN_TYPE) 7935 && int_size_in_bytes (type) == int_size_in_bytes (base_type) 7936 && tree_int_cst_equal (low, TYPE_MIN_VALUE (base_type)) 7937 && tree_int_cst_equal (high, TYPE_MAX_VALUE (base_type)) 7938 && TYPE_IDENTIFIER (type) == TYPE_IDENTIFIER (base_type)) 7939 return false; 7940 7941 if (lowval) 7942 *lowval = low; 7943 if (highval) 7944 *highval = high; 7945 return true; 7946} 7947 7948/* Construct, lay out and return the type of arrays of elements with ELT_TYPE 7949 and number of elements specified by the range of values of INDEX_TYPE. 7950 If SHARED is true, reuse such a type that has already been constructed. */ 7951 7952static tree 7953build_array_type_1 (tree elt_type, tree index_type, bool shared) 7954{ 7955 tree t; 7956 7957 if (TREE_CODE (elt_type) == FUNCTION_TYPE) 7958 { 7959 error ("arrays of functions are not meaningful"); 7960 elt_type = integer_type_node; 7961 } 7962 7963 t = make_node (ARRAY_TYPE); 7964 TREE_TYPE (t) = elt_type; 7965 TYPE_DOMAIN (t) = index_type; 7966 TYPE_ADDR_SPACE (t) = TYPE_ADDR_SPACE (elt_type); 7967 layout_type (t); 7968 7969 /* If the element type is incomplete at this point we get marked for 7970 structural equality. Do not record these types in the canonical 7971 type hashtable. */ 7972 if (TYPE_STRUCTURAL_EQUALITY_P (t)) 7973 return t; 7974 7975 if (shared) 7976 { 7977 inchash::hash hstate; 7978 hstate.add_object (TYPE_HASH (elt_type)); 7979 if (index_type) 7980 hstate.add_object (TYPE_HASH (index_type)); 7981 t = type_hash_canon (hstate.end (), t); 7982 } 7983 7984 if (TYPE_CANONICAL (t) == t) 7985 { 7986 if (TYPE_STRUCTURAL_EQUALITY_P (elt_type) 7987 || (index_type && TYPE_STRUCTURAL_EQUALITY_P (index_type))) 7988 SET_TYPE_STRUCTURAL_EQUALITY (t); 7989 else if (TYPE_CANONICAL (elt_type) != elt_type 7990 || (index_type && TYPE_CANONICAL (index_type) != index_type)) 7991 TYPE_CANONICAL (t) 7992 = build_array_type_1 (TYPE_CANONICAL (elt_type), 7993 index_type 7994 ? TYPE_CANONICAL (index_type) : NULL_TREE, 7995 shared); 7996 } 7997 7998 return t; 7999} 8000 8001/* Wrapper around build_array_type_1 with SHARED set to true. */ 8002 8003tree 8004build_array_type (tree elt_type, tree index_type) 8005{ 8006 return build_array_type_1 (elt_type, index_type, true); 8007} 8008 8009/* Wrapper around build_array_type_1 with SHARED set to false. */ 8010 8011tree 8012build_nonshared_array_type (tree elt_type, tree index_type) 8013{ 8014 return build_array_type_1 (elt_type, index_type, false); 8015} 8016 8017/* Return a representation of ELT_TYPE[NELTS], using indices of type 8018 sizetype. */ 8019 8020tree 8021build_array_type_nelts (tree elt_type, unsigned HOST_WIDE_INT nelts) 8022{ 8023 return build_array_type (elt_type, build_index_type (size_int (nelts - 1))); 8024} 8025 8026/* Recursively examines the array elements of TYPE, until a non-array 8027 element type is found. */ 8028 8029tree 8030strip_array_types (tree type) 8031{ 8032 while (TREE_CODE (type) == ARRAY_TYPE) 8033 type = TREE_TYPE (type); 8034 8035 return type; 8036} 8037 8038/* Computes the canonical argument types from the argument type list 8039 ARGTYPES. 8040 8041 Upon return, *ANY_STRUCTURAL_P will be true iff either it was true 8042 on entry to this function, or if any of the ARGTYPES are 8043 structural. 8044 8045 Upon return, *ANY_NONCANONICAL_P will be true iff either it was 8046 true on entry to this function, or if any of the ARGTYPES are 8047 non-canonical. 8048 8049 Returns a canonical argument list, which may be ARGTYPES when the 8050 canonical argument list is unneeded (i.e., *ANY_STRUCTURAL_P is 8051 true) or would not differ from ARGTYPES. */ 8052 8053static tree 8054maybe_canonicalize_argtypes (tree argtypes, 8055 bool *any_structural_p, 8056 bool *any_noncanonical_p) 8057{ 8058 tree arg; 8059 bool any_noncanonical_argtypes_p = false; 8060 8061 for (arg = argtypes; arg && !(*any_structural_p); arg = TREE_CHAIN (arg)) 8062 { 8063 if (!TREE_VALUE (arg) || TREE_VALUE (arg) == error_mark_node) 8064 /* Fail gracefully by stating that the type is structural. */ 8065 *any_structural_p = true; 8066 else if (TYPE_STRUCTURAL_EQUALITY_P (TREE_VALUE (arg))) 8067 *any_structural_p = true; 8068 else if (TYPE_CANONICAL (TREE_VALUE (arg)) != TREE_VALUE (arg) 8069 || TREE_PURPOSE (arg)) 8070 /* If the argument has a default argument, we consider it 8071 non-canonical even though the type itself is canonical. 8072 That way, different variants of function and method types 8073 with default arguments will all point to the variant with 8074 no defaults as their canonical type. */ 8075 any_noncanonical_argtypes_p = true; 8076 } 8077 8078 if (*any_structural_p) 8079 return argtypes; 8080 8081 if (any_noncanonical_argtypes_p) 8082 { 8083 /* Build the canonical list of argument types. */ 8084 tree canon_argtypes = NULL_TREE; 8085 bool is_void = false; 8086 8087 for (arg = argtypes; arg; arg = TREE_CHAIN (arg)) 8088 { 8089 if (arg == void_list_node) 8090 is_void = true; 8091 else 8092 canon_argtypes = tree_cons (NULL_TREE, 8093 TYPE_CANONICAL (TREE_VALUE (arg)), 8094 canon_argtypes); 8095 } 8096 8097 canon_argtypes = nreverse (canon_argtypes); 8098 if (is_void) 8099 canon_argtypes = chainon (canon_argtypes, void_list_node); 8100 8101 /* There is a non-canonical type. */ 8102 *any_noncanonical_p = true; 8103 return canon_argtypes; 8104 } 8105 8106 /* The canonical argument types are the same as ARGTYPES. */ 8107 return argtypes; 8108} 8109 8110/* Construct, lay out and return 8111 the type of functions returning type VALUE_TYPE 8112 given arguments of types ARG_TYPES. 8113 ARG_TYPES is a chain of TREE_LIST nodes whose TREE_VALUEs 8114 are data type nodes for the arguments of the function. 8115 If such a type has already been constructed, reuse it. */ 8116 8117tree 8118build_function_type (tree value_type, tree arg_types) 8119{ 8120 tree t; 8121 inchash::hash hstate; 8122 bool any_structural_p, any_noncanonical_p; 8123 tree canon_argtypes; 8124 8125 if (TREE_CODE (value_type) == FUNCTION_TYPE) 8126 { 8127 error ("function return type cannot be function"); 8128 value_type = integer_type_node; 8129 } 8130 8131 /* Make a node of the sort we want. */ 8132 t = make_node (FUNCTION_TYPE); 8133 TREE_TYPE (t) = value_type; 8134 TYPE_ARG_TYPES (t) = arg_types; 8135 8136 /* If we already have such a type, use the old one. */ 8137 hstate.add_object (TYPE_HASH (value_type)); 8138 type_hash_list (arg_types, hstate); 8139 t = type_hash_canon (hstate.end (), t); 8140 8141 /* Set up the canonical type. */ 8142 any_structural_p = TYPE_STRUCTURAL_EQUALITY_P (value_type); 8143 any_noncanonical_p = TYPE_CANONICAL (value_type) != value_type; 8144 canon_argtypes = maybe_canonicalize_argtypes (arg_types, 8145 &any_structural_p, 8146 &any_noncanonical_p); 8147 if (any_structural_p) 8148 SET_TYPE_STRUCTURAL_EQUALITY (t); 8149 else if (any_noncanonical_p) 8150 TYPE_CANONICAL (t) = build_function_type (TYPE_CANONICAL (value_type), 8151 canon_argtypes); 8152 8153 if (!COMPLETE_TYPE_P (t)) 8154 layout_type (t); 8155 return t; 8156} 8157 8158/* Build a function type. The RETURN_TYPE is the type returned by the 8159 function. If VAARGS is set, no void_type_node is appended to the 8160 the list. ARGP must be always be terminated be a NULL_TREE. */ 8161 8162static tree 8163build_function_type_list_1 (bool vaargs, tree return_type, va_list argp) 8164{ 8165 tree t, args, last; 8166 8167 t = va_arg (argp, tree); 8168 for (args = NULL_TREE; t != NULL_TREE; t = va_arg (argp, tree)) 8169 args = tree_cons (NULL_TREE, t, args); 8170 8171 if (vaargs) 8172 { 8173 last = args; 8174 if (args != NULL_TREE) 8175 args = nreverse (args); 8176 gcc_assert (last != void_list_node); 8177 } 8178 else if (args == NULL_TREE) 8179 args = void_list_node; 8180 else 8181 { 8182 last = args; 8183 args = nreverse (args); 8184 TREE_CHAIN (last) = void_list_node; 8185 } 8186 args = build_function_type (return_type, args); 8187 8188 return args; 8189} 8190 8191/* Build a function type. The RETURN_TYPE is the type returned by the 8192 function. If additional arguments are provided, they are 8193 additional argument types. The list of argument types must always 8194 be terminated by NULL_TREE. */ 8195 8196tree 8197build_function_type_list (tree return_type, ...) 8198{ 8199 tree args; 8200 va_list p; 8201 8202 va_start (p, return_type); 8203 args = build_function_type_list_1 (false, return_type, p); 8204 va_end (p); 8205 return args; 8206} 8207 8208/* Build a variable argument function type. The RETURN_TYPE is the 8209 type returned by the function. If additional arguments are provided, 8210 they are additional argument types. The list of argument types must 8211 always be terminated by NULL_TREE. */ 8212 8213tree 8214build_varargs_function_type_list (tree return_type, ...) 8215{ 8216 tree args; 8217 va_list p; 8218 8219 va_start (p, return_type); 8220 args = build_function_type_list_1 (true, return_type, p); 8221 va_end (p); 8222 8223 return args; 8224} 8225 8226/* Build a function type. RETURN_TYPE is the type returned by the 8227 function; VAARGS indicates whether the function takes varargs. The 8228 function takes N named arguments, the types of which are provided in 8229 ARG_TYPES. */ 8230 8231static tree 8232build_function_type_array_1 (bool vaargs, tree return_type, int n, 8233 tree *arg_types) 8234{ 8235 int i; 8236 tree t = vaargs ? NULL_TREE : void_list_node; 8237 8238 for (i = n - 1; i >= 0; i--) 8239 t = tree_cons (NULL_TREE, arg_types[i], t); 8240 8241 return build_function_type (return_type, t); 8242} 8243 8244/* Build a function type. RETURN_TYPE is the type returned by the 8245 function. The function takes N named arguments, the types of which 8246 are provided in ARG_TYPES. */ 8247 8248tree 8249build_function_type_array (tree return_type, int n, tree *arg_types) 8250{ 8251 return build_function_type_array_1 (false, return_type, n, arg_types); 8252} 8253 8254/* Build a variable argument function type. RETURN_TYPE is the type 8255 returned by the function. The function takes N named arguments, the 8256 types of which are provided in ARG_TYPES. */ 8257 8258tree 8259build_varargs_function_type_array (tree return_type, int n, tree *arg_types) 8260{ 8261 return build_function_type_array_1 (true, return_type, n, arg_types); 8262} 8263 8264/* Build a METHOD_TYPE for a member of BASETYPE. The RETTYPE (a TYPE) 8265 and ARGTYPES (a TREE_LIST) are the return type and arguments types 8266 for the method. An implicit additional parameter (of type 8267 pointer-to-BASETYPE) is added to the ARGTYPES. */ 8268 8269tree 8270build_method_type_directly (tree basetype, 8271 tree rettype, 8272 tree argtypes) 8273{ 8274 tree t; 8275 tree ptype; 8276 inchash::hash hstate; 8277 bool any_structural_p, any_noncanonical_p; 8278 tree canon_argtypes; 8279 8280 /* Make a node of the sort we want. */ 8281 t = make_node (METHOD_TYPE); 8282 8283 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype); 8284 TREE_TYPE (t) = rettype; 8285 ptype = build_pointer_type (basetype); 8286 8287 /* The actual arglist for this function includes a "hidden" argument 8288 which is "this". Put it into the list of argument types. */ 8289 argtypes = tree_cons (NULL_TREE, ptype, argtypes); 8290 TYPE_ARG_TYPES (t) = argtypes; 8291 8292 /* If we already have such a type, use the old one. */ 8293 hstate.add_object (TYPE_HASH (basetype)); 8294 hstate.add_object (TYPE_HASH (rettype)); 8295 type_hash_list (argtypes, hstate); 8296 t = type_hash_canon (hstate.end (), t); 8297 8298 /* Set up the canonical type. */ 8299 any_structural_p 8300 = (TYPE_STRUCTURAL_EQUALITY_P (basetype) 8301 || TYPE_STRUCTURAL_EQUALITY_P (rettype)); 8302 any_noncanonical_p 8303 = (TYPE_CANONICAL (basetype) != basetype 8304 || TYPE_CANONICAL (rettype) != rettype); 8305 canon_argtypes = maybe_canonicalize_argtypes (TREE_CHAIN (argtypes), 8306 &any_structural_p, 8307 &any_noncanonical_p); 8308 if (any_structural_p) 8309 SET_TYPE_STRUCTURAL_EQUALITY (t); 8310 else if (any_noncanonical_p) 8311 TYPE_CANONICAL (t) 8312 = build_method_type_directly (TYPE_CANONICAL (basetype), 8313 TYPE_CANONICAL (rettype), 8314 canon_argtypes); 8315 if (!COMPLETE_TYPE_P (t)) 8316 layout_type (t); 8317 8318 return t; 8319} 8320 8321/* Construct, lay out and return the type of methods belonging to class 8322 BASETYPE and whose arguments and values are described by TYPE. 8323 If that type exists already, reuse it. 8324 TYPE must be a FUNCTION_TYPE node. */ 8325 8326tree 8327build_method_type (tree basetype, tree type) 8328{ 8329 gcc_assert (TREE_CODE (type) == FUNCTION_TYPE); 8330 8331 return build_method_type_directly (basetype, 8332 TREE_TYPE (type), 8333 TYPE_ARG_TYPES (type)); 8334} 8335 8336/* Construct, lay out and return the type of offsets to a value 8337 of type TYPE, within an object of type BASETYPE. 8338 If a suitable offset type exists already, reuse it. */ 8339 8340tree 8341build_offset_type (tree basetype, tree type) 8342{ 8343 tree t; 8344 inchash::hash hstate; 8345 8346 /* Make a node of the sort we want. */ 8347 t = make_node (OFFSET_TYPE); 8348 8349 TYPE_OFFSET_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype); 8350 TREE_TYPE (t) = type; 8351 8352 /* If we already have such a type, use the old one. */ 8353 hstate.add_object (TYPE_HASH (basetype)); 8354 hstate.add_object (TYPE_HASH (type)); 8355 t = type_hash_canon (hstate.end (), t); 8356 8357 if (!COMPLETE_TYPE_P (t)) 8358 layout_type (t); 8359 8360 if (TYPE_CANONICAL (t) == t) 8361 { 8362 if (TYPE_STRUCTURAL_EQUALITY_P (basetype) 8363 || TYPE_STRUCTURAL_EQUALITY_P (type)) 8364 SET_TYPE_STRUCTURAL_EQUALITY (t); 8365 else if (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)) != basetype 8366 || TYPE_CANONICAL (type) != type) 8367 TYPE_CANONICAL (t) 8368 = build_offset_type (TYPE_CANONICAL (TYPE_MAIN_VARIANT (basetype)), 8369 TYPE_CANONICAL (type)); 8370 } 8371 8372 return t; 8373} 8374 8375/* Create a complex type whose components are COMPONENT_TYPE. */ 8376 8377tree 8378build_complex_type (tree component_type) 8379{ 8380 tree t; 8381 inchash::hash hstate; 8382 8383 gcc_assert (INTEGRAL_TYPE_P (component_type) 8384 || SCALAR_FLOAT_TYPE_P (component_type) 8385 || FIXED_POINT_TYPE_P (component_type)); 8386 8387 /* Make a node of the sort we want. */ 8388 t = make_node (COMPLEX_TYPE); 8389 8390 TREE_TYPE (t) = TYPE_MAIN_VARIANT (component_type); 8391 8392 /* If we already have such a type, use the old one. */ 8393 hstate.add_object (TYPE_HASH (component_type)); 8394 t = type_hash_canon (hstate.end (), t); 8395 8396 if (!COMPLETE_TYPE_P (t)) 8397 layout_type (t); 8398 8399 if (TYPE_CANONICAL (t) == t) 8400 { 8401 if (TYPE_STRUCTURAL_EQUALITY_P (component_type)) 8402 SET_TYPE_STRUCTURAL_EQUALITY (t); 8403 else if (TYPE_CANONICAL (component_type) != component_type) 8404 TYPE_CANONICAL (t) 8405 = build_complex_type (TYPE_CANONICAL (component_type)); 8406 } 8407 8408 /* We need to create a name, since complex is a fundamental type. */ 8409 if (! TYPE_NAME (t)) 8410 { 8411 const char *name; 8412 if (component_type == char_type_node) 8413 name = "complex char"; 8414 else if (component_type == signed_char_type_node) 8415 name = "complex signed char"; 8416 else if (component_type == unsigned_char_type_node) 8417 name = "complex unsigned char"; 8418 else if (component_type == short_integer_type_node) 8419 name = "complex short int"; 8420 else if (component_type == short_unsigned_type_node) 8421 name = "complex short unsigned int"; 8422 else if (component_type == integer_type_node) 8423 name = "complex int"; 8424 else if (component_type == unsigned_type_node) 8425 name = "complex unsigned int"; 8426 else if (component_type == long_integer_type_node) 8427 name = "complex long int"; 8428 else if (component_type == long_unsigned_type_node) 8429 name = "complex long unsigned int"; 8430 else if (component_type == long_long_integer_type_node) 8431 name = "complex long long int"; 8432 else if (component_type == long_long_unsigned_type_node) 8433 name = "complex long long unsigned int"; 8434 else 8435 name = 0; 8436 8437 if (name != 0) 8438 TYPE_NAME (t) = build_decl (UNKNOWN_LOCATION, TYPE_DECL, 8439 get_identifier (name), t); 8440 } 8441 8442 return build_qualified_type (t, TYPE_QUALS (component_type)); 8443} 8444 8445/* If TYPE is a real or complex floating-point type and the target 8446 does not directly support arithmetic on TYPE then return the wider 8447 type to be used for arithmetic on TYPE. Otherwise, return 8448 NULL_TREE. */ 8449 8450tree 8451excess_precision_type (tree type) 8452{ 8453 if (flag_excess_precision != EXCESS_PRECISION_FAST) 8454 { 8455 int flt_eval_method = TARGET_FLT_EVAL_METHOD; 8456 switch (TREE_CODE (type)) 8457 { 8458 case REAL_TYPE: 8459 switch (flt_eval_method) 8460 { 8461 case 1: 8462 if (TYPE_MODE (type) == TYPE_MODE (float_type_node)) 8463 return double_type_node; 8464 break; 8465 case 2: 8466 if (TYPE_MODE (type) == TYPE_MODE (float_type_node) 8467 || TYPE_MODE (type) == TYPE_MODE (double_type_node)) 8468 return long_double_type_node; 8469 break; 8470 default: 8471 gcc_unreachable (); 8472 } 8473 break; 8474 case COMPLEX_TYPE: 8475 if (TREE_CODE (TREE_TYPE (type)) != REAL_TYPE) 8476 return NULL_TREE; 8477 switch (flt_eval_method) 8478 { 8479 case 1: 8480 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node)) 8481 return complex_double_type_node; 8482 break; 8483 case 2: 8484 if (TYPE_MODE (TREE_TYPE (type)) == TYPE_MODE (float_type_node) 8485 || (TYPE_MODE (TREE_TYPE (type)) 8486 == TYPE_MODE (double_type_node))) 8487 return complex_long_double_type_node; 8488 break; 8489 default: 8490 gcc_unreachable (); 8491 } 8492 break; 8493 default: 8494 break; 8495 } 8496 } 8497 return NULL_TREE; 8498} 8499 8500/* Return OP, stripped of any conversions to wider types as much as is safe. 8501 Converting the value back to OP's type makes a value equivalent to OP. 8502 8503 If FOR_TYPE is nonzero, we return a value which, if converted to 8504 type FOR_TYPE, would be equivalent to converting OP to type FOR_TYPE. 8505 8506 OP must have integer, real or enumeral type. Pointers are not allowed! 8507 8508 There are some cases where the obvious value we could return 8509 would regenerate to OP if converted to OP's type, 8510 but would not extend like OP to wider types. 8511 If FOR_TYPE indicates such extension is contemplated, we eschew such values. 8512 For example, if OP is (unsigned short)(signed char)-1, 8513 we avoid returning (signed char)-1 if FOR_TYPE is int, 8514 even though extending that to an unsigned short would regenerate OP, 8515 since the result of extending (signed char)-1 to (int) 8516 is different from (int) OP. */ 8517 8518tree 8519get_unwidened (tree op, tree for_type) 8520{ 8521 /* Set UNS initially if converting OP to FOR_TYPE is a zero-extension. */ 8522 tree type = TREE_TYPE (op); 8523 unsigned final_prec 8524 = TYPE_PRECISION (for_type != 0 ? for_type : type); 8525 int uns 8526 = (for_type != 0 && for_type != type 8527 && final_prec > TYPE_PRECISION (type) 8528 && TYPE_UNSIGNED (type)); 8529 tree win = op; 8530 8531 while (CONVERT_EXPR_P (op)) 8532 { 8533 int bitschange; 8534 8535 /* TYPE_PRECISION on vector types has different meaning 8536 (TYPE_VECTOR_SUBPARTS) and casts from vectors are view conversions, 8537 so avoid them here. */ 8538 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op, 0))) == VECTOR_TYPE) 8539 break; 8540 8541 bitschange = TYPE_PRECISION (TREE_TYPE (op)) 8542 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0))); 8543 8544 /* Truncations are many-one so cannot be removed. 8545 Unless we are later going to truncate down even farther. */ 8546 if (bitschange < 0 8547 && final_prec > TYPE_PRECISION (TREE_TYPE (op))) 8548 break; 8549 8550 /* See what's inside this conversion. If we decide to strip it, 8551 we will set WIN. */ 8552 op = TREE_OPERAND (op, 0); 8553 8554 /* If we have not stripped any zero-extensions (uns is 0), 8555 we can strip any kind of extension. 8556 If we have previously stripped a zero-extension, 8557 only zero-extensions can safely be stripped. 8558 Any extension can be stripped if the bits it would produce 8559 are all going to be discarded later by truncating to FOR_TYPE. */ 8560 8561 if (bitschange > 0) 8562 { 8563 if (! uns || final_prec <= TYPE_PRECISION (TREE_TYPE (op))) 8564 win = op; 8565 /* TYPE_UNSIGNED says whether this is a zero-extension. 8566 Let's avoid computing it if it does not affect WIN 8567 and if UNS will not be needed again. */ 8568 if ((uns 8569 || CONVERT_EXPR_P (op)) 8570 && TYPE_UNSIGNED (TREE_TYPE (op))) 8571 { 8572 uns = 1; 8573 win = op; 8574 } 8575 } 8576 } 8577 8578 /* If we finally reach a constant see if it fits in for_type and 8579 in that case convert it. */ 8580 if (for_type 8581 && TREE_CODE (win) == INTEGER_CST 8582 && TREE_TYPE (win) != for_type 8583 && int_fits_type_p (win, for_type)) 8584 win = fold_convert (for_type, win); 8585 8586 return win; 8587} 8588 8589/* Return OP or a simpler expression for a narrower value 8590 which can be sign-extended or zero-extended to give back OP. 8591 Store in *UNSIGNEDP_PTR either 1 if the value should be zero-extended 8592 or 0 if the value should be sign-extended. */ 8593 8594tree 8595get_narrower (tree op, int *unsignedp_ptr) 8596{ 8597 int uns = 0; 8598 int first = 1; 8599 tree win = op; 8600 bool integral_p = INTEGRAL_TYPE_P (TREE_TYPE (op)); 8601 8602 while (TREE_CODE (op) == NOP_EXPR) 8603 { 8604 int bitschange 8605 = (TYPE_PRECISION (TREE_TYPE (op)) 8606 - TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op, 0)))); 8607 8608 /* Truncations are many-one so cannot be removed. */ 8609 if (bitschange < 0) 8610 break; 8611 8612 /* See what's inside this conversion. If we decide to strip it, 8613 we will set WIN. */ 8614 8615 if (bitschange > 0) 8616 { 8617 op = TREE_OPERAND (op, 0); 8618 /* An extension: the outermost one can be stripped, 8619 but remember whether it is zero or sign extension. */ 8620 if (first) 8621 uns = TYPE_UNSIGNED (TREE_TYPE (op)); 8622 /* Otherwise, if a sign extension has been stripped, 8623 only sign extensions can now be stripped; 8624 if a zero extension has been stripped, only zero-extensions. */ 8625 else if (uns != TYPE_UNSIGNED (TREE_TYPE (op))) 8626 break; 8627 first = 0; 8628 } 8629 else /* bitschange == 0 */ 8630 { 8631 /* A change in nominal type can always be stripped, but we must 8632 preserve the unsignedness. */ 8633 if (first) 8634 uns = TYPE_UNSIGNED (TREE_TYPE (op)); 8635 first = 0; 8636 op = TREE_OPERAND (op, 0); 8637 /* Keep trying to narrow, but don't assign op to win if it 8638 would turn an integral type into something else. */ 8639 if (INTEGRAL_TYPE_P (TREE_TYPE (op)) != integral_p) 8640 continue; 8641 } 8642 8643 win = op; 8644 } 8645 8646 if (TREE_CODE (op) == COMPONENT_REF 8647 /* Since type_for_size always gives an integer type. */ 8648 && TREE_CODE (TREE_TYPE (op)) != REAL_TYPE 8649 && TREE_CODE (TREE_TYPE (op)) != FIXED_POINT_TYPE 8650 /* Ensure field is laid out already. */ 8651 && DECL_SIZE (TREE_OPERAND (op, 1)) != 0 8652 && tree_fits_uhwi_p (DECL_SIZE (TREE_OPERAND (op, 1)))) 8653 { 8654 unsigned HOST_WIDE_INT innerprec 8655 = tree_to_uhwi (DECL_SIZE (TREE_OPERAND (op, 1))); 8656 int unsignedp = (DECL_UNSIGNED (TREE_OPERAND (op, 1)) 8657 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (op, 1)))); 8658 tree type = lang_hooks.types.type_for_size (innerprec, unsignedp); 8659 8660 /* We can get this structure field in a narrower type that fits it, 8661 but the resulting extension to its nominal type (a fullword type) 8662 must satisfy the same conditions as for other extensions. 8663 8664 Do this only for fields that are aligned (not bit-fields), 8665 because when bit-field insns will be used there is no 8666 advantage in doing this. */ 8667 8668 if (innerprec < TYPE_PRECISION (TREE_TYPE (op)) 8669 && ! DECL_BIT_FIELD (TREE_OPERAND (op, 1)) 8670 && (first || uns == DECL_UNSIGNED (TREE_OPERAND (op, 1))) 8671 && type != 0) 8672 { 8673 if (first) 8674 uns = DECL_UNSIGNED (TREE_OPERAND (op, 1)); 8675 win = fold_convert (type, op); 8676 } 8677 } 8678 8679 *unsignedp_ptr = uns; 8680 return win; 8681} 8682 8683/* Returns true if integer constant C has a value that is permissible 8684 for type TYPE (an INTEGER_TYPE). */ 8685 8686bool 8687int_fits_type_p (const_tree c, const_tree type) 8688{ 8689 tree type_low_bound, type_high_bound; 8690 bool ok_for_low_bound, ok_for_high_bound; 8691 signop sgn_c = TYPE_SIGN (TREE_TYPE (c)); 8692 8693retry: 8694 type_low_bound = TYPE_MIN_VALUE (type); 8695 type_high_bound = TYPE_MAX_VALUE (type); 8696 8697 /* If at least one bound of the type is a constant integer, we can check 8698 ourselves and maybe make a decision. If no such decision is possible, but 8699 this type is a subtype, try checking against that. Otherwise, use 8700 fits_to_tree_p, which checks against the precision. 8701 8702 Compute the status for each possibly constant bound, and return if we see 8703 one does not match. Use ok_for_xxx_bound for this purpose, assigning -1 8704 for "unknown if constant fits", 0 for "constant known *not* to fit" and 1 8705 for "constant known to fit". */ 8706 8707 /* Check if c >= type_low_bound. */ 8708 if (type_low_bound && TREE_CODE (type_low_bound) == INTEGER_CST) 8709 { 8710 if (tree_int_cst_lt (c, type_low_bound)) 8711 return false; 8712 ok_for_low_bound = true; 8713 } 8714 else 8715 ok_for_low_bound = false; 8716 8717 /* Check if c <= type_high_bound. */ 8718 if (type_high_bound && TREE_CODE (type_high_bound) == INTEGER_CST) 8719 { 8720 if (tree_int_cst_lt (type_high_bound, c)) 8721 return false; 8722 ok_for_high_bound = true; 8723 } 8724 else 8725 ok_for_high_bound = false; 8726 8727 /* If the constant fits both bounds, the result is known. */ 8728 if (ok_for_low_bound && ok_for_high_bound) 8729 return true; 8730 8731 /* Perform some generic filtering which may allow making a decision 8732 even if the bounds are not constant. First, negative integers 8733 never fit in unsigned types, */ 8734 if (TYPE_UNSIGNED (type) && sgn_c == SIGNED && wi::neg_p (c)) 8735 return false; 8736 8737 /* Second, narrower types always fit in wider ones. */ 8738 if (TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (c))) 8739 return true; 8740 8741 /* Third, unsigned integers with top bit set never fit signed types. */ 8742 if (!TYPE_UNSIGNED (type) && sgn_c == UNSIGNED) 8743 { 8744 int prec = GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (c))) - 1; 8745 if (prec < TYPE_PRECISION (TREE_TYPE (c))) 8746 { 8747 /* When a tree_cst is converted to a wide-int, the precision 8748 is taken from the type. However, if the precision of the 8749 mode underneath the type is smaller than that, it is 8750 possible that the value will not fit. The test below 8751 fails if any bit is set between the sign bit of the 8752 underlying mode and the top bit of the type. */ 8753 if (wi::ne_p (wi::zext (c, prec - 1), c)) 8754 return false; 8755 } 8756 else if (wi::neg_p (c)) 8757 return false; 8758 } 8759 8760 /* If we haven't been able to decide at this point, there nothing more we 8761 can check ourselves here. Look at the base type if we have one and it 8762 has the same precision. */ 8763 if (TREE_CODE (type) == INTEGER_TYPE 8764 && TREE_TYPE (type) != 0 8765 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (type))) 8766 { 8767 type = TREE_TYPE (type); 8768 goto retry; 8769 } 8770 8771 /* Or to fits_to_tree_p, if nothing else. */ 8772 return wi::fits_to_tree_p (c, type); 8773} 8774 8775/* Stores bounds of an integer TYPE in MIN and MAX. If TYPE has non-constant 8776 bounds or is a POINTER_TYPE, the maximum and/or minimum values that can be 8777 represented (assuming two's-complement arithmetic) within the bit 8778 precision of the type are returned instead. */ 8779 8780void 8781get_type_static_bounds (const_tree type, mpz_t min, mpz_t max) 8782{ 8783 if (!POINTER_TYPE_P (type) && TYPE_MIN_VALUE (type) 8784 && TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST) 8785 wi::to_mpz (TYPE_MIN_VALUE (type), min, TYPE_SIGN (type)); 8786 else 8787 { 8788 if (TYPE_UNSIGNED (type)) 8789 mpz_set_ui (min, 0); 8790 else 8791 { 8792 wide_int mn = wi::min_value (TYPE_PRECISION (type), SIGNED); 8793 wi::to_mpz (mn, min, SIGNED); 8794 } 8795 } 8796 8797 if (!POINTER_TYPE_P (type) && TYPE_MAX_VALUE (type) 8798 && TREE_CODE (TYPE_MAX_VALUE (type)) == INTEGER_CST) 8799 wi::to_mpz (TYPE_MAX_VALUE (type), max, TYPE_SIGN (type)); 8800 else 8801 { 8802 wide_int mn = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type)); 8803 wi::to_mpz (mn, max, TYPE_SIGN (type)); 8804 } 8805} 8806 8807/* Return true if VAR is an automatic variable defined in function FN. */ 8808 8809bool 8810auto_var_in_fn_p (const_tree var, const_tree fn) 8811{ 8812 return (DECL_P (var) && DECL_CONTEXT (var) == fn 8813 && ((((TREE_CODE (var) == VAR_DECL && ! DECL_EXTERNAL (var)) 8814 || TREE_CODE (var) == PARM_DECL) 8815 && ! TREE_STATIC (var)) 8816 || TREE_CODE (var) == LABEL_DECL 8817 || TREE_CODE (var) == RESULT_DECL)); 8818} 8819 8820/* Subprogram of following function. Called by walk_tree. 8821 8822 Return *TP if it is an automatic variable or parameter of the 8823 function passed in as DATA. */ 8824 8825static tree 8826find_var_from_fn (tree *tp, int *walk_subtrees, void *data) 8827{ 8828 tree fn = (tree) data; 8829 8830 if (TYPE_P (*tp)) 8831 *walk_subtrees = 0; 8832 8833 else if (DECL_P (*tp) 8834 && auto_var_in_fn_p (*tp, fn)) 8835 return *tp; 8836 8837 return NULL_TREE; 8838} 8839 8840/* Returns true if T is, contains, or refers to a type with variable 8841 size. For METHOD_TYPEs and FUNCTION_TYPEs we exclude the 8842 arguments, but not the return type. If FN is nonzero, only return 8843 true if a modifier of the type or position of FN is a variable or 8844 parameter inside FN. 8845 8846 This concept is more general than that of C99 'variably modified types': 8847 in C99, a struct type is never variably modified because a VLA may not 8848 appear as a structure member. However, in GNU C code like: 8849 8850 struct S { int i[f()]; }; 8851 8852 is valid, and other languages may define similar constructs. */ 8853 8854bool 8855variably_modified_type_p (tree type, tree fn) 8856{ 8857 tree t; 8858 8859/* Test if T is either variable (if FN is zero) or an expression containing 8860 a variable in FN. If TYPE isn't gimplified, return true also if 8861 gimplify_one_sizepos would gimplify the expression into a local 8862 variable. */ 8863#define RETURN_TRUE_IF_VAR(T) \ 8864 do { tree _t = (T); \ 8865 if (_t != NULL_TREE \ 8866 && _t != error_mark_node \ 8867 && TREE_CODE (_t) != INTEGER_CST \ 8868 && TREE_CODE (_t) != PLACEHOLDER_EXPR \ 8869 && (!fn \ 8870 || (!TYPE_SIZES_GIMPLIFIED (type) \ 8871 && !is_gimple_sizepos (_t)) \ 8872 || walk_tree (&_t, find_var_from_fn, fn, NULL))) \ 8873 return true; } while (0) 8874 8875 if (type == error_mark_node) 8876 return false; 8877 8878 /* If TYPE itself has variable size, it is variably modified. */ 8879 RETURN_TRUE_IF_VAR (TYPE_SIZE (type)); 8880 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (type)); 8881 8882 switch (TREE_CODE (type)) 8883 { 8884 case POINTER_TYPE: 8885 case REFERENCE_TYPE: 8886 case VECTOR_TYPE: 8887 if (variably_modified_type_p (TREE_TYPE (type), fn)) 8888 return true; 8889 break; 8890 8891 case FUNCTION_TYPE: 8892 case METHOD_TYPE: 8893 /* If TYPE is a function type, it is variably modified if the 8894 return type is variably modified. */ 8895 if (variably_modified_type_p (TREE_TYPE (type), fn)) 8896 return true; 8897 break; 8898 8899 case INTEGER_TYPE: 8900 case REAL_TYPE: 8901 case FIXED_POINT_TYPE: 8902 case ENUMERAL_TYPE: 8903 case BOOLEAN_TYPE: 8904 /* Scalar types are variably modified if their end points 8905 aren't constant. */ 8906 RETURN_TRUE_IF_VAR (TYPE_MIN_VALUE (type)); 8907 RETURN_TRUE_IF_VAR (TYPE_MAX_VALUE (type)); 8908 break; 8909 8910 case RECORD_TYPE: 8911 case UNION_TYPE: 8912 case QUAL_UNION_TYPE: 8913 /* We can't see if any of the fields are variably-modified by the 8914 definition we normally use, since that would produce infinite 8915 recursion via pointers. */ 8916 /* This is variably modified if some field's type is. */ 8917 for (t = TYPE_FIELDS (type); t; t = DECL_CHAIN (t)) 8918 if (TREE_CODE (t) == FIELD_DECL) 8919 { 8920 RETURN_TRUE_IF_VAR (DECL_FIELD_OFFSET (t)); 8921 RETURN_TRUE_IF_VAR (DECL_SIZE (t)); 8922 RETURN_TRUE_IF_VAR (DECL_SIZE_UNIT (t)); 8923 8924 if (TREE_CODE (type) == QUAL_UNION_TYPE) 8925 RETURN_TRUE_IF_VAR (DECL_QUALIFIER (t)); 8926 } 8927 break; 8928 8929 case ARRAY_TYPE: 8930 /* Do not call ourselves to avoid infinite recursion. This is 8931 variably modified if the element type is. */ 8932 RETURN_TRUE_IF_VAR (TYPE_SIZE (TREE_TYPE (type))); 8933 RETURN_TRUE_IF_VAR (TYPE_SIZE_UNIT (TREE_TYPE (type))); 8934 break; 8935 8936 default: 8937 break; 8938 } 8939 8940 /* The current language may have other cases to check, but in general, 8941 all other types are not variably modified. */ 8942 return lang_hooks.tree_inlining.var_mod_type_p (type, fn); 8943 8944#undef RETURN_TRUE_IF_VAR 8945} 8946 8947/* Given a DECL or TYPE, return the scope in which it was declared, or 8948 NULL_TREE if there is no containing scope. */ 8949 8950tree 8951get_containing_scope (const_tree t) 8952{ 8953 return (TYPE_P (t) ? TYPE_CONTEXT (t) : DECL_CONTEXT (t)); 8954} 8955 8956/* Return the innermost context enclosing DECL that is 8957 a FUNCTION_DECL, or zero if none. */ 8958 8959tree 8960decl_function_context (const_tree decl) 8961{ 8962 tree context; 8963 8964 if (TREE_CODE (decl) == ERROR_MARK) 8965 return 0; 8966 8967 /* C++ virtual functions use DECL_CONTEXT for the class of the vtable 8968 where we look up the function at runtime. Such functions always take 8969 a first argument of type 'pointer to real context'. 8970 8971 C++ should really be fixed to use DECL_CONTEXT for the real context, 8972 and use something else for the "virtual context". */ 8973 else if (TREE_CODE (decl) == FUNCTION_DECL && DECL_VINDEX (decl)) 8974 context 8975 = TYPE_MAIN_VARIANT 8976 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (decl))))); 8977 else 8978 context = DECL_CONTEXT (decl); 8979 8980 while (context && TREE_CODE (context) != FUNCTION_DECL) 8981 { 8982 if (TREE_CODE (context) == BLOCK) 8983 context = BLOCK_SUPERCONTEXT (context); 8984 else 8985 context = get_containing_scope (context); 8986 } 8987 8988 return context; 8989} 8990 8991/* Return the innermost context enclosing DECL that is 8992 a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE, or zero if none. 8993 TYPE_DECLs and FUNCTION_DECLs are transparent to this function. */ 8994 8995tree 8996decl_type_context (const_tree decl) 8997{ 8998 tree context = DECL_CONTEXT (decl); 8999 9000 while (context) 9001 switch (TREE_CODE (context)) 9002 { 9003 case NAMESPACE_DECL: 9004 case TRANSLATION_UNIT_DECL: 9005 return NULL_TREE; 9006 9007 case RECORD_TYPE: 9008 case UNION_TYPE: 9009 case QUAL_UNION_TYPE: 9010 return context; 9011 9012 case TYPE_DECL: 9013 case FUNCTION_DECL: 9014 context = DECL_CONTEXT (context); 9015 break; 9016 9017 case BLOCK: 9018 context = BLOCK_SUPERCONTEXT (context); 9019 break; 9020 9021 default: 9022 gcc_unreachable (); 9023 } 9024 9025 return NULL_TREE; 9026} 9027 9028/* CALL is a CALL_EXPR. Return the declaration for the function 9029 called, or NULL_TREE if the called function cannot be 9030 determined. */ 9031 9032tree 9033get_callee_fndecl (const_tree call) 9034{ 9035 tree addr; 9036 9037 if (call == error_mark_node) 9038 return error_mark_node; 9039 9040 /* It's invalid to call this function with anything but a 9041 CALL_EXPR. */ 9042 gcc_assert (TREE_CODE (call) == CALL_EXPR); 9043 9044 /* The first operand to the CALL is the address of the function 9045 called. */ 9046 addr = CALL_EXPR_FN (call); 9047 9048 /* If there is no function, return early. */ 9049 if (addr == NULL_TREE) 9050 return NULL_TREE; 9051 9052 STRIP_NOPS (addr); 9053 9054 /* If this is a readonly function pointer, extract its initial value. */ 9055 if (DECL_P (addr) && TREE_CODE (addr) != FUNCTION_DECL 9056 && TREE_READONLY (addr) && ! TREE_THIS_VOLATILE (addr) 9057 && DECL_INITIAL (addr)) 9058 addr = DECL_INITIAL (addr); 9059 9060 /* If the address is just `&f' for some function `f', then we know 9061 that `f' is being called. */ 9062 if (TREE_CODE (addr) == ADDR_EXPR 9063 && TREE_CODE (TREE_OPERAND (addr, 0)) == FUNCTION_DECL) 9064 return TREE_OPERAND (addr, 0); 9065 9066 /* We couldn't figure out what was being called. */ 9067 return NULL_TREE; 9068} 9069 9070/* Print debugging information about tree nodes generated during the compile, 9071 and any language-specific information. */ 9072 9073void 9074dump_tree_statistics (void) 9075{ 9076 if (GATHER_STATISTICS) 9077 { 9078 int i; 9079 int total_nodes, total_bytes; 9080 fprintf (stderr, "Kind Nodes Bytes\n"); 9081 fprintf (stderr, "---------------------------------------\n"); 9082 total_nodes = total_bytes = 0; 9083 for (i = 0; i < (int) all_kinds; i++) 9084 { 9085 fprintf (stderr, "%-20s %7d %10d\n", tree_node_kind_names[i], 9086 tree_node_counts[i], tree_node_sizes[i]); 9087 total_nodes += tree_node_counts[i]; 9088 total_bytes += tree_node_sizes[i]; 9089 } 9090 fprintf (stderr, "---------------------------------------\n"); 9091 fprintf (stderr, "%-20s %7d %10d\n", "Total", total_nodes, total_bytes); 9092 fprintf (stderr, "---------------------------------------\n"); 9093 fprintf (stderr, "Code Nodes\n"); 9094 fprintf (stderr, "----------------------------\n"); 9095 for (i = 0; i < (int) MAX_TREE_CODES; i++) 9096 fprintf (stderr, "%-20s %7d\n", get_tree_code_name ((enum tree_code) i), 9097 tree_code_counts[i]); 9098 fprintf (stderr, "----------------------------\n"); 9099 ssanames_print_statistics (); 9100 phinodes_print_statistics (); 9101 } 9102 else 9103 fprintf (stderr, "(No per-node statistics)\n"); 9104 9105 print_type_hash_statistics (); 9106 print_debug_expr_statistics (); 9107 print_value_expr_statistics (); 9108 lang_hooks.print_statistics (); 9109} 9110 9111#define FILE_FUNCTION_FORMAT "_GLOBAL__%s_%s" 9112 9113/* Generate a crc32 of a byte. */ 9114 9115static unsigned 9116crc32_unsigned_bits (unsigned chksum, unsigned value, unsigned bits) 9117{ 9118 unsigned ix; 9119 9120 for (ix = bits; ix--; value <<= 1) 9121 { 9122 unsigned feedback; 9123 9124 feedback = (value ^ chksum) & 0x80000000 ? 0x04c11db7 : 0; 9125 chksum <<= 1; 9126 chksum ^= feedback; 9127 } 9128 return chksum; 9129} 9130 9131/* Generate a crc32 of a 32-bit unsigned. */ 9132 9133unsigned 9134crc32_unsigned (unsigned chksum, unsigned value) 9135{ 9136 return crc32_unsigned_bits (chksum, value, 32); 9137} 9138 9139/* Generate a crc32 of a byte. */ 9140 9141unsigned 9142crc32_byte (unsigned chksum, char byte) 9143{ 9144 return crc32_unsigned_bits (chksum, (unsigned) byte << 24, 8); 9145} 9146 9147/* Generate a crc32 of a string. */ 9148 9149unsigned 9150crc32_string (unsigned chksum, const char *string) 9151{ 9152 do 9153 { 9154 chksum = crc32_byte (chksum, *string); 9155 } 9156 while (*string++); 9157 return chksum; 9158} 9159 9160/* P is a string that will be used in a symbol. Mask out any characters 9161 that are not valid in that context. */ 9162 9163void 9164clean_symbol_name (char *p) 9165{ 9166 for (; *p; p++) 9167 if (! (ISALNUM (*p) 9168#ifndef NO_DOLLAR_IN_LABEL /* this for `$'; unlikely, but... -- kr */ 9169 || *p == '$' 9170#endif 9171#ifndef NO_DOT_IN_LABEL /* this for `.'; unlikely, but... */ 9172 || *p == '.' 9173#endif 9174 )) 9175 *p = '_'; 9176} 9177 9178/* Generate a name for a special-purpose function. 9179 The generated name may need to be unique across the whole link. 9180 Changes to this function may also require corresponding changes to 9181 xstrdup_mask_random. 9182 TYPE is some string to identify the purpose of this function to the 9183 linker or collect2; it must start with an uppercase letter, 9184 one of: 9185 I - for constructors 9186 D - for destructors 9187 N - for C++ anonymous namespaces 9188 F - for DWARF unwind frame information. */ 9189 9190tree 9191get_file_function_name (const char *type) 9192{ 9193 char *buf; 9194 const char *p; 9195 char *q; 9196 9197 /* If we already have a name we know to be unique, just use that. */ 9198 if (first_global_object_name) 9199 p = q = ASTRDUP (first_global_object_name); 9200 /* If the target is handling the constructors/destructors, they 9201 will be local to this file and the name is only necessary for 9202 debugging purposes. 9203 We also assign sub_I and sub_D sufixes to constructors called from 9204 the global static constructors. These are always local. */ 9205 else if (((type[0] == 'I' || type[0] == 'D') && targetm.have_ctors_dtors) 9206 || (strncmp (type, "sub_", 4) == 0 9207 && (type[4] == 'I' || type[4] == 'D'))) 9208 { 9209 const char *file = main_input_filename; 9210 if (! file) 9211 file = LOCATION_FILE (input_location); 9212 /* Just use the file's basename, because the full pathname 9213 might be quite long. */ 9214 p = q = ASTRDUP (lbasename (file)); 9215 } 9216 else 9217 { 9218 /* Otherwise, the name must be unique across the entire link. 9219 We don't have anything that we know to be unique to this translation 9220 unit, so use what we do have and throw in some randomness. */ 9221 unsigned len; 9222 const char *name = weak_global_object_name; 9223 const char *file = main_input_filename; 9224 9225 if (! name) 9226 name = ""; 9227 if (! file) 9228 file = LOCATION_FILE (input_location); 9229 9230 len = strlen (file); 9231 q = (char *) alloca (9 + 17 + len + 1); 9232 memcpy (q, file, len + 1); 9233 9234 snprintf (q + len, 9 + 17 + 1, "_%08X_" HOST_WIDE_INT_PRINT_HEX, 9235 crc32_string (0, name), get_random_seed (false)); 9236 9237 p = q; 9238 } 9239 9240 clean_symbol_name (q); 9241 buf = (char *) alloca (sizeof (FILE_FUNCTION_FORMAT) + strlen (p) 9242 + strlen (type)); 9243 9244 /* Set up the name of the file-level functions we may need. 9245 Use a global object (which is already required to be unique over 9246 the program) rather than the file name (which imposes extra 9247 constraints). */ 9248 sprintf (buf, FILE_FUNCTION_FORMAT, type, p); 9249 9250 return get_identifier (buf); 9251} 9252 9253#if defined ENABLE_TREE_CHECKING && (GCC_VERSION >= 2007) 9254 9255/* Complain that the tree code of NODE does not match the expected 0 9256 terminated list of trailing codes. The trailing code list can be 9257 empty, for a more vague error message. FILE, LINE, and FUNCTION 9258 are of the caller. */ 9259 9260void 9261tree_check_failed (const_tree node, const char *file, 9262 int line, const char *function, ...) 9263{ 9264 va_list args; 9265 const char *buffer; 9266 unsigned length = 0; 9267 enum tree_code code; 9268 9269 va_start (args, function); 9270 while ((code = (enum tree_code) va_arg (args, int))) 9271 length += 4 + strlen (get_tree_code_name (code)); 9272 va_end (args); 9273 if (length) 9274 { 9275 char *tmp; 9276 va_start (args, function); 9277 length += strlen ("expected "); 9278 buffer = tmp = (char *) alloca (length); 9279 length = 0; 9280 while ((code = (enum tree_code) va_arg (args, int))) 9281 { 9282 const char *prefix = length ? " or " : "expected "; 9283 9284 strcpy (tmp + length, prefix); 9285 length += strlen (prefix); 9286 strcpy (tmp + length, get_tree_code_name (code)); 9287 length += strlen (get_tree_code_name (code)); 9288 } 9289 va_end (args); 9290 } 9291 else 9292 buffer = "unexpected node"; 9293 9294 internal_error ("tree check: %s, have %s in %s, at %s:%d", 9295 buffer, get_tree_code_name (TREE_CODE (node)), 9296 function, trim_filename (file), line); 9297} 9298 9299/* Complain that the tree code of NODE does match the expected 0 9300 terminated list of trailing codes. FILE, LINE, and FUNCTION are of 9301 the caller. */ 9302 9303void 9304tree_not_check_failed (const_tree node, const char *file, 9305 int line, const char *function, ...) 9306{ 9307 va_list args; 9308 char *buffer; 9309 unsigned length = 0; 9310 enum tree_code code; 9311 9312 va_start (args, function); 9313 while ((code = (enum tree_code) va_arg (args, int))) 9314 length += 4 + strlen (get_tree_code_name (code)); 9315 va_end (args); 9316 va_start (args, function); 9317 buffer = (char *) alloca (length); 9318 length = 0; 9319 while ((code = (enum tree_code) va_arg (args, int))) 9320 { 9321 if (length) 9322 { 9323 strcpy (buffer + length, " or "); 9324 length += 4; 9325 } 9326 strcpy (buffer + length, get_tree_code_name (code)); 9327 length += strlen (get_tree_code_name (code)); 9328 } 9329 va_end (args); 9330 9331 internal_error ("tree check: expected none of %s, have %s in %s, at %s:%d", 9332 buffer, get_tree_code_name (TREE_CODE (node)), 9333 function, trim_filename (file), line); 9334} 9335 9336/* Similar to tree_check_failed, except that we check for a class of tree 9337 code, given in CL. */ 9338 9339void 9340tree_class_check_failed (const_tree node, const enum tree_code_class cl, 9341 const char *file, int line, const char *function) 9342{ 9343 internal_error 9344 ("tree check: expected class %qs, have %qs (%s) in %s, at %s:%d", 9345 TREE_CODE_CLASS_STRING (cl), 9346 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))), 9347 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line); 9348} 9349 9350/* Similar to tree_check_failed, except that instead of specifying a 9351 dozen codes, use the knowledge that they're all sequential. */ 9352 9353void 9354tree_range_check_failed (const_tree node, const char *file, int line, 9355 const char *function, enum tree_code c1, 9356 enum tree_code c2) 9357{ 9358 char *buffer; 9359 unsigned length = 0; 9360 unsigned int c; 9361 9362 for (c = c1; c <= c2; ++c) 9363 length += 4 + strlen (get_tree_code_name ((enum tree_code) c)); 9364 9365 length += strlen ("expected "); 9366 buffer = (char *) alloca (length); 9367 length = 0; 9368 9369 for (c = c1; c <= c2; ++c) 9370 { 9371 const char *prefix = length ? " or " : "expected "; 9372 9373 strcpy (buffer + length, prefix); 9374 length += strlen (prefix); 9375 strcpy (buffer + length, get_tree_code_name ((enum tree_code) c)); 9376 length += strlen (get_tree_code_name ((enum tree_code) c)); 9377 } 9378 9379 internal_error ("tree check: %s, have %s in %s, at %s:%d", 9380 buffer, get_tree_code_name (TREE_CODE (node)), 9381 function, trim_filename (file), line); 9382} 9383 9384 9385/* Similar to tree_check_failed, except that we check that a tree does 9386 not have the specified code, given in CL. */ 9387 9388void 9389tree_not_class_check_failed (const_tree node, const enum tree_code_class cl, 9390 const char *file, int line, const char *function) 9391{ 9392 internal_error 9393 ("tree check: did not expect class %qs, have %qs (%s) in %s, at %s:%d", 9394 TREE_CODE_CLASS_STRING (cl), 9395 TREE_CODE_CLASS_STRING (TREE_CODE_CLASS (TREE_CODE (node))), 9396 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line); 9397} 9398 9399 9400/* Similar to tree_check_failed but applied to OMP_CLAUSE codes. */ 9401 9402void 9403omp_clause_check_failed (const_tree node, const char *file, int line, 9404 const char *function, enum omp_clause_code code) 9405{ 9406 internal_error ("tree check: expected omp_clause %s, have %s in %s, at %s:%d", 9407 omp_clause_code_name[code], get_tree_code_name (TREE_CODE (node)), 9408 function, trim_filename (file), line); 9409} 9410 9411 9412/* Similar to tree_range_check_failed but applied to OMP_CLAUSE codes. */ 9413 9414void 9415omp_clause_range_check_failed (const_tree node, const char *file, int line, 9416 const char *function, enum omp_clause_code c1, 9417 enum omp_clause_code c2) 9418{ 9419 char *buffer; 9420 unsigned length = 0; 9421 unsigned int c; 9422 9423 for (c = c1; c <= c2; ++c) 9424 length += 4 + strlen (omp_clause_code_name[c]); 9425 9426 length += strlen ("expected "); 9427 buffer = (char *) alloca (length); 9428 length = 0; 9429 9430 for (c = c1; c <= c2; ++c) 9431 { 9432 const char *prefix = length ? " or " : "expected "; 9433 9434 strcpy (buffer + length, prefix); 9435 length += strlen (prefix); 9436 strcpy (buffer + length, omp_clause_code_name[c]); 9437 length += strlen (omp_clause_code_name[c]); 9438 } 9439 9440 internal_error ("tree check: %s, have %s in %s, at %s:%d", 9441 buffer, omp_clause_code_name[TREE_CODE (node)], 9442 function, trim_filename (file), line); 9443} 9444 9445 9446#undef DEFTREESTRUCT 9447#define DEFTREESTRUCT(VAL, NAME) NAME, 9448 9449static const char *ts_enum_names[] = { 9450#include "treestruct.def" 9451}; 9452#undef DEFTREESTRUCT 9453 9454#define TS_ENUM_NAME(EN) (ts_enum_names[(EN)]) 9455 9456/* Similar to tree_class_check_failed, except that we check for 9457 whether CODE contains the tree structure identified by EN. */ 9458 9459void 9460tree_contains_struct_check_failed (const_tree node, 9461 const enum tree_node_structure_enum en, 9462 const char *file, int line, 9463 const char *function) 9464{ 9465 internal_error 9466 ("tree check: expected tree that contains %qs structure, have %qs in %s, at %s:%d", 9467 TS_ENUM_NAME (en), 9468 get_tree_code_name (TREE_CODE (node)), function, trim_filename (file), line); 9469} 9470 9471 9472/* Similar to above, except that the check is for the bounds of a TREE_VEC's 9473 (dynamically sized) vector. */ 9474 9475void 9476tree_int_cst_elt_check_failed (int idx, int len, const char *file, int line, 9477 const char *function) 9478{ 9479 internal_error 9480 ("tree check: accessed elt %d of tree_int_cst with %d elts in %s, at %s:%d", 9481 idx + 1, len, function, trim_filename (file), line); 9482} 9483 9484/* Similar to above, except that the check is for the bounds of a TREE_VEC's 9485 (dynamically sized) vector. */ 9486 9487void 9488tree_vec_elt_check_failed (int idx, int len, const char *file, int line, 9489 const char *function) 9490{ 9491 internal_error 9492 ("tree check: accessed elt %d of tree_vec with %d elts in %s, at %s:%d", 9493 idx + 1, len, function, trim_filename (file), line); 9494} 9495 9496/* Similar to above, except that the check is for the bounds of the operand 9497 vector of an expression node EXP. */ 9498 9499void 9500tree_operand_check_failed (int idx, const_tree exp, const char *file, 9501 int line, const char *function) 9502{ 9503 enum tree_code code = TREE_CODE (exp); 9504 internal_error 9505 ("tree check: accessed operand %d of %s with %d operands in %s, at %s:%d", 9506 idx + 1, get_tree_code_name (code), TREE_OPERAND_LENGTH (exp), 9507 function, trim_filename (file), line); 9508} 9509 9510/* Similar to above, except that the check is for the number of 9511 operands of an OMP_CLAUSE node. */ 9512 9513void 9514omp_clause_operand_check_failed (int idx, const_tree t, const char *file, 9515 int line, const char *function) 9516{ 9517 internal_error 9518 ("tree check: accessed operand %d of omp_clause %s with %d operands " 9519 "in %s, at %s:%d", idx + 1, omp_clause_code_name[OMP_CLAUSE_CODE (t)], 9520 omp_clause_num_ops [OMP_CLAUSE_CODE (t)], function, 9521 trim_filename (file), line); 9522} 9523#endif /* ENABLE_TREE_CHECKING */ 9524 9525/* Create a new vector type node holding SUBPARTS units of type INNERTYPE, 9526 and mapped to the machine mode MODE. Initialize its fields and build 9527 the information necessary for debugging output. */ 9528 9529static tree 9530make_vector_type (tree innertype, int nunits, machine_mode mode) 9531{ 9532 tree t; 9533 inchash::hash hstate; 9534 9535 t = make_node (VECTOR_TYPE); 9536 TREE_TYPE (t) = TYPE_MAIN_VARIANT (innertype); 9537 SET_TYPE_VECTOR_SUBPARTS (t, nunits); 9538 SET_TYPE_MODE (t, mode); 9539 9540 if (TYPE_STRUCTURAL_EQUALITY_P (innertype)) 9541 SET_TYPE_STRUCTURAL_EQUALITY (t); 9542 else if (TYPE_CANONICAL (innertype) != innertype 9543 || mode != VOIDmode) 9544 TYPE_CANONICAL (t) 9545 = make_vector_type (TYPE_CANONICAL (innertype), nunits, VOIDmode); 9546 9547 layout_type (t); 9548 9549 hstate.add_wide_int (VECTOR_TYPE); 9550 hstate.add_wide_int (nunits); 9551 hstate.add_wide_int (mode); 9552 hstate.add_object (TYPE_HASH (TREE_TYPE (t))); 9553 t = type_hash_canon (hstate.end (), t); 9554 9555 /* We have built a main variant, based on the main variant of the 9556 inner type. Use it to build the variant we return. */ 9557 if ((TYPE_ATTRIBUTES (innertype) || TYPE_QUALS (innertype)) 9558 && TREE_TYPE (t) != innertype) 9559 return build_type_attribute_qual_variant (t, 9560 TYPE_ATTRIBUTES (innertype), 9561 TYPE_QUALS (innertype)); 9562 9563 return t; 9564} 9565 9566static tree 9567make_or_reuse_type (unsigned size, int unsignedp) 9568{ 9569 int i; 9570 9571 if (size == INT_TYPE_SIZE) 9572 return unsignedp ? unsigned_type_node : integer_type_node; 9573 if (size == CHAR_TYPE_SIZE) 9574 return unsignedp ? unsigned_char_type_node : signed_char_type_node; 9575 if (size == SHORT_TYPE_SIZE) 9576 return unsignedp ? short_unsigned_type_node : short_integer_type_node; 9577 if (size == LONG_TYPE_SIZE) 9578 return unsignedp ? long_unsigned_type_node : long_integer_type_node; 9579 if (size == LONG_LONG_TYPE_SIZE) 9580 return (unsignedp ? long_long_unsigned_type_node 9581 : long_long_integer_type_node); 9582 9583 for (i = 0; i < NUM_INT_N_ENTS; i ++) 9584 if (size == int_n_data[i].bitsize 9585 && int_n_enabled_p[i]) 9586 return (unsignedp ? int_n_trees[i].unsigned_type 9587 : int_n_trees[i].signed_type); 9588 9589 if (unsignedp) 9590 return make_unsigned_type (size); 9591 else 9592 return make_signed_type (size); 9593} 9594 9595/* Create or reuse a fract type by SIZE, UNSIGNEDP, and SATP. */ 9596 9597static tree 9598make_or_reuse_fract_type (unsigned size, int unsignedp, int satp) 9599{ 9600 if (satp) 9601 { 9602 if (size == SHORT_FRACT_TYPE_SIZE) 9603 return unsignedp ? sat_unsigned_short_fract_type_node 9604 : sat_short_fract_type_node; 9605 if (size == FRACT_TYPE_SIZE) 9606 return unsignedp ? sat_unsigned_fract_type_node : sat_fract_type_node; 9607 if (size == LONG_FRACT_TYPE_SIZE) 9608 return unsignedp ? sat_unsigned_long_fract_type_node 9609 : sat_long_fract_type_node; 9610 if (size == LONG_LONG_FRACT_TYPE_SIZE) 9611 return unsignedp ? sat_unsigned_long_long_fract_type_node 9612 : sat_long_long_fract_type_node; 9613 } 9614 else 9615 { 9616 if (size == SHORT_FRACT_TYPE_SIZE) 9617 return unsignedp ? unsigned_short_fract_type_node 9618 : short_fract_type_node; 9619 if (size == FRACT_TYPE_SIZE) 9620 return unsignedp ? unsigned_fract_type_node : fract_type_node; 9621 if (size == LONG_FRACT_TYPE_SIZE) 9622 return unsignedp ? unsigned_long_fract_type_node 9623 : long_fract_type_node; 9624 if (size == LONG_LONG_FRACT_TYPE_SIZE) 9625 return unsignedp ? unsigned_long_long_fract_type_node 9626 : long_long_fract_type_node; 9627 } 9628 9629 return make_fract_type (size, unsignedp, satp); 9630} 9631 9632/* Create or reuse an accum type by SIZE, UNSIGNEDP, and SATP. */ 9633 9634static tree 9635make_or_reuse_accum_type (unsigned size, int unsignedp, int satp) 9636{ 9637 if (satp) 9638 { 9639 if (size == SHORT_ACCUM_TYPE_SIZE) 9640 return unsignedp ? sat_unsigned_short_accum_type_node 9641 : sat_short_accum_type_node; 9642 if (size == ACCUM_TYPE_SIZE) 9643 return unsignedp ? sat_unsigned_accum_type_node : sat_accum_type_node; 9644 if (size == LONG_ACCUM_TYPE_SIZE) 9645 return unsignedp ? sat_unsigned_long_accum_type_node 9646 : sat_long_accum_type_node; 9647 if (size == LONG_LONG_ACCUM_TYPE_SIZE) 9648 return unsignedp ? sat_unsigned_long_long_accum_type_node 9649 : sat_long_long_accum_type_node; 9650 } 9651 else 9652 { 9653 if (size == SHORT_ACCUM_TYPE_SIZE) 9654 return unsignedp ? unsigned_short_accum_type_node 9655 : short_accum_type_node; 9656 if (size == ACCUM_TYPE_SIZE) 9657 return unsignedp ? unsigned_accum_type_node : accum_type_node; 9658 if (size == LONG_ACCUM_TYPE_SIZE) 9659 return unsignedp ? unsigned_long_accum_type_node 9660 : long_accum_type_node; 9661 if (size == LONG_LONG_ACCUM_TYPE_SIZE) 9662 return unsignedp ? unsigned_long_long_accum_type_node 9663 : long_long_accum_type_node; 9664 } 9665 9666 return make_accum_type (size, unsignedp, satp); 9667} 9668 9669 9670/* Create an atomic variant node for TYPE. This routine is called 9671 during initialization of data types to create the 5 basic atomic 9672 types. The generic build_variant_type function requires these to 9673 already be set up in order to function properly, so cannot be 9674 called from there. If ALIGN is non-zero, then ensure alignment is 9675 overridden to this value. */ 9676 9677static tree 9678build_atomic_base (tree type, unsigned int align) 9679{ 9680 tree t; 9681 9682 /* Make sure its not already registered. */ 9683 if ((t = get_qualified_type (type, TYPE_QUAL_ATOMIC))) 9684 return t; 9685 9686 t = build_variant_type_copy (type); 9687 set_type_quals (t, TYPE_QUAL_ATOMIC); 9688 9689 if (align) 9690 TYPE_ALIGN (t) = align; 9691 9692 return t; 9693} 9694 9695/* Create nodes for all integer types (and error_mark_node) using the sizes 9696 of C datatypes. SIGNED_CHAR specifies whether char is signed, 9697 SHORT_DOUBLE specifies whether double should be of the same precision 9698 as float. */ 9699 9700void 9701build_common_tree_nodes (bool signed_char, bool short_double) 9702{ 9703 int i; 9704 9705 error_mark_node = make_node (ERROR_MARK); 9706 TREE_TYPE (error_mark_node) = error_mark_node; 9707 9708 initialize_sizetypes (); 9709 9710 /* Define both `signed char' and `unsigned char'. */ 9711 signed_char_type_node = make_signed_type (CHAR_TYPE_SIZE); 9712 TYPE_STRING_FLAG (signed_char_type_node) = 1; 9713 unsigned_char_type_node = make_unsigned_type (CHAR_TYPE_SIZE); 9714 TYPE_STRING_FLAG (unsigned_char_type_node) = 1; 9715 9716 /* Define `char', which is like either `signed char' or `unsigned char' 9717 but not the same as either. */ 9718 char_type_node 9719 = (signed_char 9720 ? make_signed_type (CHAR_TYPE_SIZE) 9721 : make_unsigned_type (CHAR_TYPE_SIZE)); 9722 TYPE_STRING_FLAG (char_type_node) = 1; 9723 9724 short_integer_type_node = make_signed_type (SHORT_TYPE_SIZE); 9725 short_unsigned_type_node = make_unsigned_type (SHORT_TYPE_SIZE); 9726 integer_type_node = make_signed_type (INT_TYPE_SIZE); 9727 unsigned_type_node = make_unsigned_type (INT_TYPE_SIZE); 9728 long_integer_type_node = make_signed_type (LONG_TYPE_SIZE); 9729 long_unsigned_type_node = make_unsigned_type (LONG_TYPE_SIZE); 9730 long_long_integer_type_node = make_signed_type (LONG_LONG_TYPE_SIZE); 9731 long_long_unsigned_type_node = make_unsigned_type (LONG_LONG_TYPE_SIZE); 9732 9733 for (i = 0; i < NUM_INT_N_ENTS; i ++) 9734 { 9735 int_n_trees[i].signed_type = make_signed_type (int_n_data[i].bitsize); 9736 int_n_trees[i].unsigned_type = make_unsigned_type (int_n_data[i].bitsize); 9737 TYPE_SIZE (int_n_trees[i].signed_type) = bitsize_int (int_n_data[i].bitsize); 9738 TYPE_SIZE (int_n_trees[i].unsigned_type) = bitsize_int (int_n_data[i].bitsize); 9739 9740 if (int_n_data[i].bitsize > LONG_LONG_TYPE_SIZE 9741 && int_n_enabled_p[i]) 9742 { 9743 integer_types[itk_intN_0 + i * 2] = int_n_trees[i].signed_type; 9744 integer_types[itk_unsigned_intN_0 + i * 2] = int_n_trees[i].unsigned_type; 9745 } 9746 } 9747 9748 /* Define a boolean type. This type only represents boolean values but 9749 may be larger than char depending on the value of BOOL_TYPE_SIZE. */ 9750 boolean_type_node = make_unsigned_type (BOOL_TYPE_SIZE); 9751 TREE_SET_CODE (boolean_type_node, BOOLEAN_TYPE); 9752 TYPE_PRECISION (boolean_type_node) = 1; 9753 TYPE_MAX_VALUE (boolean_type_node) = build_int_cst (boolean_type_node, 1); 9754 9755 /* Define what type to use for size_t. */ 9756 if (strcmp (SIZE_TYPE, "unsigned int") == 0) 9757 size_type_node = unsigned_type_node; 9758 else if (strcmp (SIZE_TYPE, "long unsigned int") == 0) 9759 size_type_node = long_unsigned_type_node; 9760 else if (strcmp (SIZE_TYPE, "long long unsigned int") == 0) 9761 size_type_node = long_long_unsigned_type_node; 9762 else if (strcmp (SIZE_TYPE, "short unsigned int") == 0) 9763 size_type_node = short_unsigned_type_node; 9764 else 9765 { 9766 int i; 9767 9768 size_type_node = NULL_TREE; 9769 for (i = 0; i < NUM_INT_N_ENTS; i++) 9770 if (int_n_enabled_p[i]) 9771 { 9772 char name[50]; 9773 sprintf (name, "__int%d unsigned", int_n_data[i].bitsize); 9774 9775 if (strcmp (name, SIZE_TYPE) == 0) 9776 { 9777 size_type_node = int_n_trees[i].unsigned_type; 9778 } 9779 } 9780 if (size_type_node == NULL_TREE) 9781 gcc_unreachable (); 9782 } 9783 9784 /* Fill in the rest of the sized types. Reuse existing type nodes 9785 when possible. */ 9786 intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 0); 9787 intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 0); 9788 intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 0); 9789 intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 0); 9790 intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 0); 9791 9792 unsigned_intQI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (QImode), 1); 9793 unsigned_intHI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (HImode), 1); 9794 unsigned_intSI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (SImode), 1); 9795 unsigned_intDI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (DImode), 1); 9796 unsigned_intTI_type_node = make_or_reuse_type (GET_MODE_BITSIZE (TImode), 1); 9797 9798 /* Don't call build_qualified type for atomics. That routine does 9799 special processing for atomics, and until they are initialized 9800 it's better not to make that call. 9801 9802 Check to see if there is a target override for atomic types. */ 9803 9804 atomicQI_type_node = build_atomic_base (unsigned_intQI_type_node, 9805 targetm.atomic_align_for_mode (QImode)); 9806 atomicHI_type_node = build_atomic_base (unsigned_intHI_type_node, 9807 targetm.atomic_align_for_mode (HImode)); 9808 atomicSI_type_node = build_atomic_base (unsigned_intSI_type_node, 9809 targetm.atomic_align_for_mode (SImode)); 9810 atomicDI_type_node = build_atomic_base (unsigned_intDI_type_node, 9811 targetm.atomic_align_for_mode (DImode)); 9812 atomicTI_type_node = build_atomic_base (unsigned_intTI_type_node, 9813 targetm.atomic_align_for_mode (TImode)); 9814 9815 access_public_node = get_identifier ("public"); 9816 access_protected_node = get_identifier ("protected"); 9817 access_private_node = get_identifier ("private"); 9818 9819 /* Define these next since types below may used them. */ 9820 integer_zero_node = build_int_cst (integer_type_node, 0); 9821 integer_one_node = build_int_cst (integer_type_node, 1); 9822 integer_three_node = build_int_cst (integer_type_node, 3); 9823 integer_minus_one_node = build_int_cst (integer_type_node, -1); 9824 9825 size_zero_node = size_int (0); 9826 size_one_node = size_int (1); 9827 bitsize_zero_node = bitsize_int (0); 9828 bitsize_one_node = bitsize_int (1); 9829 bitsize_unit_node = bitsize_int (BITS_PER_UNIT); 9830 9831 boolean_false_node = TYPE_MIN_VALUE (boolean_type_node); 9832 boolean_true_node = TYPE_MAX_VALUE (boolean_type_node); 9833 9834 void_type_node = make_node (VOID_TYPE); 9835 layout_type (void_type_node); 9836 9837 pointer_bounds_type_node = targetm.chkp_bound_type (); 9838 9839 /* We are not going to have real types in C with less than byte alignment, 9840 so we might as well not have any types that claim to have it. */ 9841 TYPE_ALIGN (void_type_node) = BITS_PER_UNIT; 9842 TYPE_USER_ALIGN (void_type_node) = 0; 9843 9844 void_node = make_node (VOID_CST); 9845 TREE_TYPE (void_node) = void_type_node; 9846 9847 null_pointer_node = build_int_cst (build_pointer_type (void_type_node), 0); 9848 layout_type (TREE_TYPE (null_pointer_node)); 9849 9850 ptr_type_node = build_pointer_type (void_type_node); 9851 const_ptr_type_node 9852 = build_pointer_type (build_type_variant (void_type_node, 1, 0)); 9853 fileptr_type_node = ptr_type_node; 9854 9855 pointer_sized_int_node = build_nonstandard_integer_type (POINTER_SIZE, 1); 9856 9857 float_type_node = make_node (REAL_TYPE); 9858 TYPE_PRECISION (float_type_node) = FLOAT_TYPE_SIZE; 9859 layout_type (float_type_node); 9860 9861 double_type_node = make_node (REAL_TYPE); 9862 if (short_double) 9863 TYPE_PRECISION (double_type_node) = FLOAT_TYPE_SIZE; 9864 else 9865 TYPE_PRECISION (double_type_node) = DOUBLE_TYPE_SIZE; 9866 layout_type (double_type_node); 9867 9868 long_double_type_node = make_node (REAL_TYPE); 9869 TYPE_PRECISION (long_double_type_node) = LONG_DOUBLE_TYPE_SIZE; 9870 layout_type (long_double_type_node); 9871 9872 float_ptr_type_node = build_pointer_type (float_type_node); 9873 double_ptr_type_node = build_pointer_type (double_type_node); 9874 long_double_ptr_type_node = build_pointer_type (long_double_type_node); 9875 integer_ptr_type_node = build_pointer_type (integer_type_node); 9876 9877 /* Fixed size integer types. */ 9878 uint16_type_node = make_or_reuse_type (16, 1); 9879 uint32_type_node = make_or_reuse_type (32, 1); 9880 uint64_type_node = make_or_reuse_type (64, 1); 9881 9882 /* Decimal float types. */ 9883 dfloat32_type_node = make_node (REAL_TYPE); 9884 TYPE_PRECISION (dfloat32_type_node) = DECIMAL32_TYPE_SIZE; 9885 layout_type (dfloat32_type_node); 9886 SET_TYPE_MODE (dfloat32_type_node, SDmode); 9887 dfloat32_ptr_type_node = build_pointer_type (dfloat32_type_node); 9888 9889 dfloat64_type_node = make_node (REAL_TYPE); 9890 TYPE_PRECISION (dfloat64_type_node) = DECIMAL64_TYPE_SIZE; 9891 layout_type (dfloat64_type_node); 9892 SET_TYPE_MODE (dfloat64_type_node, DDmode); 9893 dfloat64_ptr_type_node = build_pointer_type (dfloat64_type_node); 9894 9895 dfloat128_type_node = make_node (REAL_TYPE); 9896 TYPE_PRECISION (dfloat128_type_node) = DECIMAL128_TYPE_SIZE; 9897 layout_type (dfloat128_type_node); 9898 SET_TYPE_MODE (dfloat128_type_node, TDmode); 9899 dfloat128_ptr_type_node = build_pointer_type (dfloat128_type_node); 9900 9901 complex_integer_type_node = build_complex_type (integer_type_node); 9902 complex_float_type_node = build_complex_type (float_type_node); 9903 complex_double_type_node = build_complex_type (double_type_node); 9904 complex_long_double_type_node = build_complex_type (long_double_type_node); 9905 9906/* Make fixed-point nodes based on sat/non-sat and signed/unsigned. */ 9907#define MAKE_FIXED_TYPE_NODE(KIND,SIZE) \ 9908 sat_ ## KIND ## _type_node = \ 9909 make_sat_signed_ ## KIND ## _type (SIZE); \ 9910 sat_unsigned_ ## KIND ## _type_node = \ 9911 make_sat_unsigned_ ## KIND ## _type (SIZE); \ 9912 KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \ 9913 unsigned_ ## KIND ## _type_node = \ 9914 make_unsigned_ ## KIND ## _type (SIZE); 9915 9916#define MAKE_FIXED_TYPE_NODE_WIDTH(KIND,WIDTH,SIZE) \ 9917 sat_ ## WIDTH ## KIND ## _type_node = \ 9918 make_sat_signed_ ## KIND ## _type (SIZE); \ 9919 sat_unsigned_ ## WIDTH ## KIND ## _type_node = \ 9920 make_sat_unsigned_ ## KIND ## _type (SIZE); \ 9921 WIDTH ## KIND ## _type_node = make_signed_ ## KIND ## _type (SIZE); \ 9922 unsigned_ ## WIDTH ## KIND ## _type_node = \ 9923 make_unsigned_ ## KIND ## _type (SIZE); 9924 9925/* Make fixed-point type nodes based on four different widths. */ 9926#define MAKE_FIXED_TYPE_NODE_FAMILY(N1,N2) \ 9927 MAKE_FIXED_TYPE_NODE_WIDTH (N1, short_, SHORT_ ## N2 ## _TYPE_SIZE) \ 9928 MAKE_FIXED_TYPE_NODE (N1, N2 ## _TYPE_SIZE) \ 9929 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_, LONG_ ## N2 ## _TYPE_SIZE) \ 9930 MAKE_FIXED_TYPE_NODE_WIDTH (N1, long_long_, LONG_LONG_ ## N2 ## _TYPE_SIZE) 9931 9932/* Make fixed-point mode nodes based on sat/non-sat and signed/unsigned. */ 9933#define MAKE_FIXED_MODE_NODE(KIND,NAME,MODE) \ 9934 NAME ## _type_node = \ 9935 make_or_reuse_signed_ ## KIND ## _type (GET_MODE_BITSIZE (MODE ## mode)); \ 9936 u ## NAME ## _type_node = \ 9937 make_or_reuse_unsigned_ ## KIND ## _type \ 9938 (GET_MODE_BITSIZE (U ## MODE ## mode)); \ 9939 sat_ ## NAME ## _type_node = \ 9940 make_or_reuse_sat_signed_ ## KIND ## _type \ 9941 (GET_MODE_BITSIZE (MODE ## mode)); \ 9942 sat_u ## NAME ## _type_node = \ 9943 make_or_reuse_sat_unsigned_ ## KIND ## _type \ 9944 (GET_MODE_BITSIZE (U ## MODE ## mode)); 9945 9946 /* Fixed-point type and mode nodes. */ 9947 MAKE_FIXED_TYPE_NODE_FAMILY (fract, FRACT) 9948 MAKE_FIXED_TYPE_NODE_FAMILY (accum, ACCUM) 9949 MAKE_FIXED_MODE_NODE (fract, qq, QQ) 9950 MAKE_FIXED_MODE_NODE (fract, hq, HQ) 9951 MAKE_FIXED_MODE_NODE (fract, sq, SQ) 9952 MAKE_FIXED_MODE_NODE (fract, dq, DQ) 9953 MAKE_FIXED_MODE_NODE (fract, tq, TQ) 9954 MAKE_FIXED_MODE_NODE (accum, ha, HA) 9955 MAKE_FIXED_MODE_NODE (accum, sa, SA) 9956 MAKE_FIXED_MODE_NODE (accum, da, DA) 9957 MAKE_FIXED_MODE_NODE (accum, ta, TA) 9958 9959 { 9960 tree t = targetm.build_builtin_va_list (); 9961 9962 /* Many back-ends define record types without setting TYPE_NAME. 9963 If we copied the record type here, we'd keep the original 9964 record type without a name. This breaks name mangling. So, 9965 don't copy record types and let c_common_nodes_and_builtins() 9966 declare the type to be __builtin_va_list. */ 9967 if (TREE_CODE (t) != RECORD_TYPE) 9968 t = build_variant_type_copy (t); 9969 9970 va_list_type_node = t; 9971 } 9972} 9973 9974/* Modify DECL for given flags. 9975 TM_PURE attribute is set only on types, so the function will modify 9976 DECL's type when ECF_TM_PURE is used. */ 9977 9978void 9979set_call_expr_flags (tree decl, int flags) 9980{ 9981 if (flags & ECF_NOTHROW) 9982 TREE_NOTHROW (decl) = 1; 9983 if (flags & ECF_CONST) 9984 TREE_READONLY (decl) = 1; 9985 if (flags & ECF_PURE) 9986 DECL_PURE_P (decl) = 1; 9987 if (flags & ECF_LOOPING_CONST_OR_PURE) 9988 DECL_LOOPING_CONST_OR_PURE_P (decl) = 1; 9989 if (flags & ECF_NOVOPS) 9990 DECL_IS_NOVOPS (decl) = 1; 9991 if (flags & ECF_NORETURN) 9992 TREE_THIS_VOLATILE (decl) = 1; 9993 if (flags & ECF_MALLOC) 9994 DECL_IS_MALLOC (decl) = 1; 9995 if (flags & ECF_RETURNS_TWICE) 9996 DECL_IS_RETURNS_TWICE (decl) = 1; 9997 if (flags & ECF_LEAF) 9998 DECL_ATTRIBUTES (decl) = tree_cons (get_identifier ("leaf"), 9999 NULL, DECL_ATTRIBUTES (decl)); 10000 if ((flags & ECF_TM_PURE) && flag_tm) 10001 apply_tm_attr (decl, get_identifier ("transaction_pure")); 10002 /* Looping const or pure is implied by noreturn. 10003 There is currently no way to declare looping const or looping pure alone. */ 10004 gcc_assert (!(flags & ECF_LOOPING_CONST_OR_PURE) 10005 || ((flags & ECF_NORETURN) && (flags & (ECF_CONST | ECF_PURE)))); 10006} 10007 10008 10009/* A subroutine of build_common_builtin_nodes. Define a builtin function. */ 10010 10011static void 10012local_define_builtin (const char *name, tree type, enum built_in_function code, 10013 const char *library_name, int ecf_flags) 10014{ 10015 tree decl; 10016 10017 decl = add_builtin_function (name, type, code, BUILT_IN_NORMAL, 10018 library_name, NULL_TREE); 10019 set_call_expr_flags (decl, ecf_flags); 10020 10021 set_builtin_decl (code, decl, true); 10022} 10023 10024/* Call this function after instantiating all builtins that the language 10025 front end cares about. This will build the rest of the builtins 10026 and internal functions that are relied upon by the tree optimizers and 10027 the middle-end. */ 10028 10029void 10030build_common_builtin_nodes (void) 10031{ 10032 tree tmp, ftype; 10033 int ecf_flags; 10034 10035 if (!builtin_decl_explicit_p (BUILT_IN_UNREACHABLE)) 10036 { 10037 ftype = build_function_type (void_type_node, void_list_node); 10038 local_define_builtin ("__builtin_unreachable", ftype, BUILT_IN_UNREACHABLE, 10039 "__builtin_unreachable", 10040 ECF_NOTHROW | ECF_LEAF | ECF_NORETURN 10041 | ECF_CONST); 10042 } 10043 10044 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY) 10045 || !builtin_decl_explicit_p (BUILT_IN_MEMMOVE)) 10046 { 10047 ftype = build_function_type_list (ptr_type_node, 10048 ptr_type_node, const_ptr_type_node, 10049 size_type_node, NULL_TREE); 10050 10051 if (!builtin_decl_explicit_p (BUILT_IN_MEMCPY)) 10052 local_define_builtin ("__builtin_memcpy", ftype, BUILT_IN_MEMCPY, 10053 "memcpy", ECF_NOTHROW | ECF_LEAF); 10054 if (!builtin_decl_explicit_p (BUILT_IN_MEMMOVE)) 10055 local_define_builtin ("__builtin_memmove", ftype, BUILT_IN_MEMMOVE, 10056 "memmove", ECF_NOTHROW | ECF_LEAF); 10057 } 10058 10059 if (!builtin_decl_explicit_p (BUILT_IN_MEMCMP)) 10060 { 10061 ftype = build_function_type_list (integer_type_node, const_ptr_type_node, 10062 const_ptr_type_node, size_type_node, 10063 NULL_TREE); 10064 local_define_builtin ("__builtin_memcmp", ftype, BUILT_IN_MEMCMP, 10065 "memcmp", ECF_PURE | ECF_NOTHROW | ECF_LEAF); 10066 } 10067 10068 if (!builtin_decl_explicit_p (BUILT_IN_MEMSET)) 10069 { 10070 ftype = build_function_type_list (ptr_type_node, 10071 ptr_type_node, integer_type_node, 10072 size_type_node, NULL_TREE); 10073 local_define_builtin ("__builtin_memset", ftype, BUILT_IN_MEMSET, 10074 "memset", ECF_NOTHROW | ECF_LEAF); 10075 } 10076 10077 if (!builtin_decl_explicit_p (BUILT_IN_ALLOCA)) 10078 { 10079 ftype = build_function_type_list (ptr_type_node, 10080 size_type_node, NULL_TREE); 10081 local_define_builtin ("__builtin_alloca", ftype, BUILT_IN_ALLOCA, 10082 "alloca", ECF_MALLOC | ECF_NOTHROW | ECF_LEAF); 10083 } 10084 10085 ftype = build_function_type_list (ptr_type_node, size_type_node, 10086 size_type_node, NULL_TREE); 10087 local_define_builtin ("__builtin_alloca_with_align", ftype, 10088 BUILT_IN_ALLOCA_WITH_ALIGN, "alloca", 10089 ECF_MALLOC | ECF_NOTHROW | ECF_LEAF); 10090 10091 /* If we're checking the stack, `alloca' can throw. */ 10092 if (flag_stack_check) 10093 { 10094 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA)) = 0; 10095 TREE_NOTHROW (builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN)) = 0; 10096 } 10097 10098 ftype = build_function_type_list (void_type_node, 10099 ptr_type_node, ptr_type_node, 10100 ptr_type_node, NULL_TREE); 10101 local_define_builtin ("__builtin_init_trampoline", ftype, 10102 BUILT_IN_INIT_TRAMPOLINE, 10103 "__builtin_init_trampoline", ECF_NOTHROW | ECF_LEAF); 10104 local_define_builtin ("__builtin_init_heap_trampoline", ftype, 10105 BUILT_IN_INIT_HEAP_TRAMPOLINE, 10106 "__builtin_init_heap_trampoline", 10107 ECF_NOTHROW | ECF_LEAF); 10108 10109 ftype = build_function_type_list (ptr_type_node, ptr_type_node, NULL_TREE); 10110 local_define_builtin ("__builtin_adjust_trampoline", ftype, 10111 BUILT_IN_ADJUST_TRAMPOLINE, 10112 "__builtin_adjust_trampoline", 10113 ECF_CONST | ECF_NOTHROW); 10114 10115 ftype = build_function_type_list (void_type_node, 10116 ptr_type_node, ptr_type_node, NULL_TREE); 10117 local_define_builtin ("__builtin_nonlocal_goto", ftype, 10118 BUILT_IN_NONLOCAL_GOTO, 10119 "__builtin_nonlocal_goto", 10120 ECF_NORETURN | ECF_NOTHROW); 10121 10122 ftype = build_function_type_list (void_type_node, 10123 ptr_type_node, ptr_type_node, NULL_TREE); 10124 local_define_builtin ("__builtin_setjmp_setup", ftype, 10125 BUILT_IN_SETJMP_SETUP, 10126 "__builtin_setjmp_setup", ECF_NOTHROW); 10127 10128 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE); 10129 local_define_builtin ("__builtin_setjmp_receiver", ftype, 10130 BUILT_IN_SETJMP_RECEIVER, 10131 "__builtin_setjmp_receiver", ECF_NOTHROW | ECF_LEAF); 10132 10133 ftype = build_function_type_list (ptr_type_node, NULL_TREE); 10134 local_define_builtin ("__builtin_stack_save", ftype, BUILT_IN_STACK_SAVE, 10135 "__builtin_stack_save", ECF_NOTHROW | ECF_LEAF); 10136 10137 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE); 10138 local_define_builtin ("__builtin_stack_restore", ftype, 10139 BUILT_IN_STACK_RESTORE, 10140 "__builtin_stack_restore", ECF_NOTHROW | ECF_LEAF); 10141 10142 /* If there's a possibility that we might use the ARM EABI, build the 10143 alternate __cxa_end_cleanup node used to resume from C++ and Java. */ 10144 if (targetm.arm_eabi_unwinder) 10145 { 10146 ftype = build_function_type_list (void_type_node, NULL_TREE); 10147 local_define_builtin ("__builtin_cxa_end_cleanup", ftype, 10148 BUILT_IN_CXA_END_CLEANUP, 10149 "__cxa_end_cleanup", ECF_NORETURN | ECF_LEAF); 10150 } 10151 10152 ftype = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE); 10153 local_define_builtin ("__builtin_unwind_resume", ftype, 10154 BUILT_IN_UNWIND_RESUME, 10155 ((targetm_common.except_unwind_info (&global_options) 10156 == UI_SJLJ) 10157 ? "_Unwind_SjLj_Resume" : "_Unwind_Resume"), 10158 ECF_NORETURN); 10159 10160 if (builtin_decl_explicit (BUILT_IN_RETURN_ADDRESS) == NULL_TREE) 10161 { 10162 ftype = build_function_type_list (ptr_type_node, integer_type_node, 10163 NULL_TREE); 10164 local_define_builtin ("__builtin_return_address", ftype, 10165 BUILT_IN_RETURN_ADDRESS, 10166 "__builtin_return_address", 10167 ECF_NOTHROW); 10168 } 10169 10170 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER) 10171 || !builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT)) 10172 { 10173 ftype = build_function_type_list (void_type_node, ptr_type_node, 10174 ptr_type_node, NULL_TREE); 10175 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_ENTER)) 10176 local_define_builtin ("__cyg_profile_func_enter", ftype, 10177 BUILT_IN_PROFILE_FUNC_ENTER, 10178 "__cyg_profile_func_enter", 0); 10179 if (!builtin_decl_explicit_p (BUILT_IN_PROFILE_FUNC_EXIT)) 10180 local_define_builtin ("__cyg_profile_func_exit", ftype, 10181 BUILT_IN_PROFILE_FUNC_EXIT, 10182 "__cyg_profile_func_exit", 0); 10183 } 10184 10185 /* The exception object and filter values from the runtime. The argument 10186 must be zero before exception lowering, i.e. from the front end. After 10187 exception lowering, it will be the region number for the exception 10188 landing pad. These functions are PURE instead of CONST to prevent 10189 them from being hoisted past the exception edge that will initialize 10190 its value in the landing pad. */ 10191 ftype = build_function_type_list (ptr_type_node, 10192 integer_type_node, NULL_TREE); 10193 ecf_flags = ECF_PURE | ECF_NOTHROW | ECF_LEAF; 10194 /* Only use TM_PURE if we we have TM language support. */ 10195 if (builtin_decl_explicit_p (BUILT_IN_TM_LOAD_1)) 10196 ecf_flags |= ECF_TM_PURE; 10197 local_define_builtin ("__builtin_eh_pointer", ftype, BUILT_IN_EH_POINTER, 10198 "__builtin_eh_pointer", ecf_flags); 10199 10200 tmp = lang_hooks.types.type_for_mode (targetm.eh_return_filter_mode (), 0); 10201 ftype = build_function_type_list (tmp, integer_type_node, NULL_TREE); 10202 local_define_builtin ("__builtin_eh_filter", ftype, BUILT_IN_EH_FILTER, 10203 "__builtin_eh_filter", ECF_PURE | ECF_NOTHROW | ECF_LEAF); 10204 10205 ftype = build_function_type_list (void_type_node, 10206 integer_type_node, integer_type_node, 10207 NULL_TREE); 10208 local_define_builtin ("__builtin_eh_copy_values", ftype, 10209 BUILT_IN_EH_COPY_VALUES, 10210 "__builtin_eh_copy_values", ECF_NOTHROW); 10211 10212 /* Complex multiplication and division. These are handled as builtins 10213 rather than optabs because emit_library_call_value doesn't support 10214 complex. Further, we can do slightly better with folding these 10215 beasties if the real and complex parts of the arguments are separate. */ 10216 { 10217 int mode; 10218 10219 for (mode = MIN_MODE_COMPLEX_FLOAT; mode <= MAX_MODE_COMPLEX_FLOAT; ++mode) 10220 { 10221 char mode_name_buf[4], *q; 10222 const char *p; 10223 enum built_in_function mcode, dcode; 10224 tree type, inner_type; 10225 const char *prefix = "__"; 10226 10227 if (targetm.libfunc_gnu_prefix) 10228 prefix = "__gnu_"; 10229 10230 type = lang_hooks.types.type_for_mode ((machine_mode) mode, 0); 10231 if (type == NULL) 10232 continue; 10233 inner_type = TREE_TYPE (type); 10234 10235 ftype = build_function_type_list (type, inner_type, inner_type, 10236 inner_type, inner_type, NULL_TREE); 10237 10238 mcode = ((enum built_in_function) 10239 (BUILT_IN_COMPLEX_MUL_MIN + mode - MIN_MODE_COMPLEX_FLOAT)); 10240 dcode = ((enum built_in_function) 10241 (BUILT_IN_COMPLEX_DIV_MIN + mode - MIN_MODE_COMPLEX_FLOAT)); 10242 10243 for (p = GET_MODE_NAME (mode), q = mode_name_buf; *p; p++, q++) 10244 *q = TOLOWER (*p); 10245 *q = '\0'; 10246 10247 built_in_names[mcode] = concat (prefix, "mul", mode_name_buf, "3", 10248 NULL); 10249 local_define_builtin (built_in_names[mcode], ftype, mcode, 10250 built_in_names[mcode], 10251 ECF_CONST | ECF_NOTHROW | ECF_LEAF); 10252 10253 built_in_names[dcode] = concat (prefix, "div", mode_name_buf, "3", 10254 NULL); 10255 local_define_builtin (built_in_names[dcode], ftype, dcode, 10256 built_in_names[dcode], 10257 ECF_CONST | ECF_NOTHROW | ECF_LEAF); 10258 } 10259 } 10260 10261 init_internal_fns (); 10262} 10263 10264/* HACK. GROSS. This is absolutely disgusting. I wish there was a 10265 better way. 10266 10267 If we requested a pointer to a vector, build up the pointers that 10268 we stripped off while looking for the inner type. Similarly for 10269 return values from functions. 10270 10271 The argument TYPE is the top of the chain, and BOTTOM is the 10272 new type which we will point to. */ 10273 10274tree 10275reconstruct_complex_type (tree type, tree bottom) 10276{ 10277 tree inner, outer; 10278 10279 if (TREE_CODE (type) == POINTER_TYPE) 10280 { 10281 inner = reconstruct_complex_type (TREE_TYPE (type), bottom); 10282 outer = build_pointer_type_for_mode (inner, TYPE_MODE (type), 10283 TYPE_REF_CAN_ALIAS_ALL (type)); 10284 } 10285 else if (TREE_CODE (type) == REFERENCE_TYPE) 10286 { 10287 inner = reconstruct_complex_type (TREE_TYPE (type), bottom); 10288 outer = build_reference_type_for_mode (inner, TYPE_MODE (type), 10289 TYPE_REF_CAN_ALIAS_ALL (type)); 10290 } 10291 else if (TREE_CODE (type) == ARRAY_TYPE) 10292 { 10293 inner = reconstruct_complex_type (TREE_TYPE (type), bottom); 10294 outer = build_array_type (inner, TYPE_DOMAIN (type)); 10295 } 10296 else if (TREE_CODE (type) == FUNCTION_TYPE) 10297 { 10298 inner = reconstruct_complex_type (TREE_TYPE (type), bottom); 10299 outer = build_function_type (inner, TYPE_ARG_TYPES (type)); 10300 } 10301 else if (TREE_CODE (type) == METHOD_TYPE) 10302 { 10303 inner = reconstruct_complex_type (TREE_TYPE (type), bottom); 10304 /* The build_method_type_directly() routine prepends 'this' to argument list, 10305 so we must compensate by getting rid of it. */ 10306 outer 10307 = build_method_type_directly 10308 (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (type))), 10309 inner, 10310 TREE_CHAIN (TYPE_ARG_TYPES (type))); 10311 } 10312 else if (TREE_CODE (type) == OFFSET_TYPE) 10313 { 10314 inner = reconstruct_complex_type (TREE_TYPE (type), bottom); 10315 outer = build_offset_type (TYPE_OFFSET_BASETYPE (type), inner); 10316 } 10317 else 10318 return bottom; 10319 10320 return build_type_attribute_qual_variant (outer, TYPE_ATTRIBUTES (type), 10321 TYPE_QUALS (type)); 10322} 10323 10324/* Returns a vector tree node given a mode (integer, vector, or BLKmode) and 10325 the inner type. */ 10326tree 10327build_vector_type_for_mode (tree innertype, machine_mode mode) 10328{ 10329 int nunits; 10330 10331 switch (GET_MODE_CLASS (mode)) 10332 { 10333 case MODE_VECTOR_INT: 10334 case MODE_VECTOR_FLOAT: 10335 case MODE_VECTOR_FRACT: 10336 case MODE_VECTOR_UFRACT: 10337 case MODE_VECTOR_ACCUM: 10338 case MODE_VECTOR_UACCUM: 10339 nunits = GET_MODE_NUNITS (mode); 10340 break; 10341 10342 case MODE_INT: 10343 /* Check that there are no leftover bits. */ 10344 gcc_assert (GET_MODE_BITSIZE (mode) 10345 % TREE_INT_CST_LOW (TYPE_SIZE (innertype)) == 0); 10346 10347 nunits = GET_MODE_BITSIZE (mode) 10348 / TREE_INT_CST_LOW (TYPE_SIZE (innertype)); 10349 break; 10350 10351 default: 10352 gcc_unreachable (); 10353 } 10354 10355 return make_vector_type (innertype, nunits, mode); 10356} 10357 10358/* Similarly, but takes the inner type and number of units, which must be 10359 a power of two. */ 10360 10361tree 10362build_vector_type (tree innertype, int nunits) 10363{ 10364 return make_vector_type (innertype, nunits, VOIDmode); 10365} 10366 10367/* Similarly, but builds a variant type with TYPE_VECTOR_OPAQUE set. */ 10368 10369tree 10370build_opaque_vector_type (tree innertype, int nunits) 10371{ 10372 tree t = make_vector_type (innertype, nunits, VOIDmode); 10373 tree cand; 10374 /* We always build the non-opaque variant before the opaque one, 10375 so if it already exists, it is TYPE_NEXT_VARIANT of this one. */ 10376 cand = TYPE_NEXT_VARIANT (t); 10377 if (cand 10378 && TYPE_VECTOR_OPAQUE (cand) 10379 && check_qualified_type (cand, t, TYPE_QUALS (t))) 10380 return cand; 10381 /* Othewise build a variant type and make sure to queue it after 10382 the non-opaque type. */ 10383 cand = build_distinct_type_copy (t); 10384 TYPE_VECTOR_OPAQUE (cand) = true; 10385 TYPE_CANONICAL (cand) = TYPE_CANONICAL (t); 10386 TYPE_NEXT_VARIANT (cand) = TYPE_NEXT_VARIANT (t); 10387 TYPE_NEXT_VARIANT (t) = cand; 10388 TYPE_MAIN_VARIANT (cand) = TYPE_MAIN_VARIANT (t); 10389 return cand; 10390} 10391 10392 10393/* Given an initializer INIT, return TRUE if INIT is zero or some 10394 aggregate of zeros. Otherwise return FALSE. */ 10395bool 10396initializer_zerop (const_tree init) 10397{ 10398 tree elt; 10399 10400 STRIP_NOPS (init); 10401 10402 switch (TREE_CODE (init)) 10403 { 10404 case INTEGER_CST: 10405 return integer_zerop (init); 10406 10407 case REAL_CST: 10408 /* ??? Note that this is not correct for C4X float formats. There, 10409 a bit pattern of all zeros is 1.0; 0.0 is encoded with the most 10410 negative exponent. */ 10411 return real_zerop (init) 10412 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (init)); 10413 10414 case FIXED_CST: 10415 return fixed_zerop (init); 10416 10417 case COMPLEX_CST: 10418 return integer_zerop (init) 10419 || (real_zerop (init) 10420 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_REALPART (init))) 10421 && ! REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (TREE_IMAGPART (init)))); 10422 10423 case VECTOR_CST: 10424 { 10425 unsigned i; 10426 for (i = 0; i < VECTOR_CST_NELTS (init); ++i) 10427 if (!initializer_zerop (VECTOR_CST_ELT (init, i))) 10428 return false; 10429 return true; 10430 } 10431 10432 case CONSTRUCTOR: 10433 { 10434 unsigned HOST_WIDE_INT idx; 10435 10436 if (TREE_CLOBBER_P (init)) 10437 return false; 10438 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (init), idx, elt) 10439 if (!initializer_zerop (elt)) 10440 return false; 10441 return true; 10442 } 10443 10444 case STRING_CST: 10445 { 10446 int i; 10447 10448 /* We need to loop through all elements to handle cases like 10449 "\0" and "\0foobar". */ 10450 for (i = 0; i < TREE_STRING_LENGTH (init); ++i) 10451 if (TREE_STRING_POINTER (init)[i] != '\0') 10452 return false; 10453 10454 return true; 10455 } 10456 10457 default: 10458 return false; 10459 } 10460} 10461 10462/* Check if vector VEC consists of all the equal elements and 10463 that the number of elements corresponds to the type of VEC. 10464 The function returns first element of the vector 10465 or NULL_TREE if the vector is not uniform. */ 10466tree 10467uniform_vector_p (const_tree vec) 10468{ 10469 tree first, t; 10470 unsigned i; 10471 10472 if (vec == NULL_TREE) 10473 return NULL_TREE; 10474 10475 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (vec))); 10476 10477 if (TREE_CODE (vec) == VECTOR_CST) 10478 { 10479 first = VECTOR_CST_ELT (vec, 0); 10480 for (i = 1; i < VECTOR_CST_NELTS (vec); ++i) 10481 if (!operand_equal_p (first, VECTOR_CST_ELT (vec, i), 0)) 10482 return NULL_TREE; 10483 10484 return first; 10485 } 10486 10487 else if (TREE_CODE (vec) == CONSTRUCTOR) 10488 { 10489 first = error_mark_node; 10490 10491 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (vec), i, t) 10492 { 10493 if (i == 0) 10494 { 10495 first = t; 10496 continue; 10497 } 10498 if (!operand_equal_p (first, t, 0)) 10499 return NULL_TREE; 10500 } 10501 if (i != TYPE_VECTOR_SUBPARTS (TREE_TYPE (vec))) 10502 return NULL_TREE; 10503 10504 return first; 10505 } 10506 10507 return NULL_TREE; 10508} 10509 10510/* Build an empty statement at location LOC. */ 10511 10512tree 10513build_empty_stmt (location_t loc) 10514{ 10515 tree t = build1 (NOP_EXPR, void_type_node, size_zero_node); 10516 SET_EXPR_LOCATION (t, loc); 10517 return t; 10518} 10519 10520 10521/* Build an OpenMP clause with code CODE. LOC is the location of the 10522 clause. */ 10523 10524tree 10525build_omp_clause (location_t loc, enum omp_clause_code code) 10526{ 10527 tree t; 10528 int size, length; 10529 10530 length = omp_clause_num_ops[code]; 10531 size = (sizeof (struct tree_omp_clause) + (length - 1) * sizeof (tree)); 10532 10533 record_node_allocation_statistics (OMP_CLAUSE, size); 10534 10535 t = (tree) ggc_internal_alloc (size); 10536 memset (t, 0, size); 10537 TREE_SET_CODE (t, OMP_CLAUSE); 10538 OMP_CLAUSE_SET_CODE (t, code); 10539 OMP_CLAUSE_LOCATION (t) = loc; 10540 10541 return t; 10542} 10543 10544/* Build a tcc_vl_exp object with code CODE and room for LEN operands. LEN 10545 includes the implicit operand count in TREE_OPERAND 0, and so must be >= 1. 10546 Except for the CODE and operand count field, other storage for the 10547 object is initialized to zeros. */ 10548 10549tree 10550build_vl_exp_stat (enum tree_code code, int len MEM_STAT_DECL) 10551{ 10552 tree t; 10553 int length = (len - 1) * sizeof (tree) + sizeof (struct tree_exp); 10554 10555 gcc_assert (TREE_CODE_CLASS (code) == tcc_vl_exp); 10556 gcc_assert (len >= 1); 10557 10558 record_node_allocation_statistics (code, length); 10559 10560 t = ggc_alloc_cleared_tree_node_stat (length PASS_MEM_STAT); 10561 10562 TREE_SET_CODE (t, code); 10563 10564 /* Can't use TREE_OPERAND to store the length because if checking is 10565 enabled, it will try to check the length before we store it. :-P */ 10566 t->exp.operands[0] = build_int_cst (sizetype, len); 10567 10568 return t; 10569} 10570 10571/* Helper function for build_call_* functions; build a CALL_EXPR with 10572 indicated RETURN_TYPE, FN, and NARGS, but do not initialize any of 10573 the argument slots. */ 10574 10575static tree 10576build_call_1 (tree return_type, tree fn, int nargs) 10577{ 10578 tree t; 10579 10580 t = build_vl_exp (CALL_EXPR, nargs + 3); 10581 TREE_TYPE (t) = return_type; 10582 CALL_EXPR_FN (t) = fn; 10583 CALL_EXPR_STATIC_CHAIN (t) = NULL; 10584 10585 return t; 10586} 10587 10588/* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and 10589 FN and a null static chain slot. NARGS is the number of call arguments 10590 which are specified as "..." arguments. */ 10591 10592tree 10593build_call_nary (tree return_type, tree fn, int nargs, ...) 10594{ 10595 tree ret; 10596 va_list args; 10597 va_start (args, nargs); 10598 ret = build_call_valist (return_type, fn, nargs, args); 10599 va_end (args); 10600 return ret; 10601} 10602 10603/* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and 10604 FN and a null static chain slot. NARGS is the number of call arguments 10605 which are specified as a va_list ARGS. */ 10606 10607tree 10608build_call_valist (tree return_type, tree fn, int nargs, va_list args) 10609{ 10610 tree t; 10611 int i; 10612 10613 t = build_call_1 (return_type, fn, nargs); 10614 for (i = 0; i < nargs; i++) 10615 CALL_EXPR_ARG (t, i) = va_arg (args, tree); 10616 process_call_operands (t); 10617 return t; 10618} 10619 10620/* Build a CALL_EXPR of class tcc_vl_exp with the indicated RETURN_TYPE and 10621 FN and a null static chain slot. NARGS is the number of call arguments 10622 which are specified as a tree array ARGS. */ 10623 10624tree 10625build_call_array_loc (location_t loc, tree return_type, tree fn, 10626 int nargs, const tree *args) 10627{ 10628 tree t; 10629 int i; 10630 10631 t = build_call_1 (return_type, fn, nargs); 10632 for (i = 0; i < nargs; i++) 10633 CALL_EXPR_ARG (t, i) = args[i]; 10634 process_call_operands (t); 10635 SET_EXPR_LOCATION (t, loc); 10636 return t; 10637} 10638 10639/* Like build_call_array, but takes a vec. */ 10640 10641tree 10642build_call_vec (tree return_type, tree fn, vec<tree, va_gc> *args) 10643{ 10644 tree ret, t; 10645 unsigned int ix; 10646 10647 ret = build_call_1 (return_type, fn, vec_safe_length (args)); 10648 FOR_EACH_VEC_SAFE_ELT (args, ix, t) 10649 CALL_EXPR_ARG (ret, ix) = t; 10650 process_call_operands (ret); 10651 return ret; 10652} 10653 10654/* Conveniently construct a function call expression. FNDECL names the 10655 function to be called and N arguments are passed in the array 10656 ARGARRAY. */ 10657 10658tree 10659build_call_expr_loc_array (location_t loc, tree fndecl, int n, tree *argarray) 10660{ 10661 tree fntype = TREE_TYPE (fndecl); 10662 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl); 10663 10664 return fold_build_call_array_loc (loc, TREE_TYPE (fntype), fn, n, argarray); 10665} 10666 10667/* Conveniently construct a function call expression. FNDECL names the 10668 function to be called and the arguments are passed in the vector 10669 VEC. */ 10670 10671tree 10672build_call_expr_loc_vec (location_t loc, tree fndecl, vec<tree, va_gc> *vec) 10673{ 10674 return build_call_expr_loc_array (loc, fndecl, vec_safe_length (vec), 10675 vec_safe_address (vec)); 10676} 10677 10678 10679/* Conveniently construct a function call expression. FNDECL names the 10680 function to be called, N is the number of arguments, and the "..." 10681 parameters are the argument expressions. */ 10682 10683tree 10684build_call_expr_loc (location_t loc, tree fndecl, int n, ...) 10685{ 10686 va_list ap; 10687 tree *argarray = XALLOCAVEC (tree, n); 10688 int i; 10689 10690 va_start (ap, n); 10691 for (i = 0; i < n; i++) 10692 argarray[i] = va_arg (ap, tree); 10693 va_end (ap); 10694 return build_call_expr_loc_array (loc, fndecl, n, argarray); 10695} 10696 10697/* Like build_call_expr_loc (UNKNOWN_LOCATION, ...). Duplicated because 10698 varargs macros aren't supported by all bootstrap compilers. */ 10699 10700tree 10701build_call_expr (tree fndecl, int n, ...) 10702{ 10703 va_list ap; 10704 tree *argarray = XALLOCAVEC (tree, n); 10705 int i; 10706 10707 va_start (ap, n); 10708 for (i = 0; i < n; i++) 10709 argarray[i] = va_arg (ap, tree); 10710 va_end (ap); 10711 return build_call_expr_loc_array (UNKNOWN_LOCATION, fndecl, n, argarray); 10712} 10713 10714/* Build internal call expression. This is just like CALL_EXPR, except 10715 its CALL_EXPR_FN is NULL. It will get gimplified later into ordinary 10716 internal function. */ 10717 10718tree 10719build_call_expr_internal_loc (location_t loc, enum internal_fn ifn, 10720 tree type, int n, ...) 10721{ 10722 va_list ap; 10723 int i; 10724 10725 tree fn = build_call_1 (type, NULL_TREE, n); 10726 va_start (ap, n); 10727 for (i = 0; i < n; i++) 10728 CALL_EXPR_ARG (fn, i) = va_arg (ap, tree); 10729 va_end (ap); 10730 SET_EXPR_LOCATION (fn, loc); 10731 CALL_EXPR_IFN (fn) = ifn; 10732 return fn; 10733} 10734 10735/* Create a new constant string literal and return a char* pointer to it. 10736 The STRING_CST value is the LEN characters at STR. */ 10737tree 10738build_string_literal (int len, const char *str) 10739{ 10740 tree t, elem, index, type; 10741 10742 t = build_string (len, str); 10743 elem = build_type_variant (char_type_node, 1, 0); 10744 index = build_index_type (size_int (len - 1)); 10745 type = build_array_type (elem, index); 10746 TREE_TYPE (t) = type; 10747 TREE_CONSTANT (t) = 1; 10748 TREE_READONLY (t) = 1; 10749 TREE_STATIC (t) = 1; 10750 10751 type = build_pointer_type (elem); 10752 t = build1 (ADDR_EXPR, type, 10753 build4 (ARRAY_REF, elem, 10754 t, integer_zero_node, NULL_TREE, NULL_TREE)); 10755 return t; 10756} 10757 10758 10759 10760/* Return true if T (assumed to be a DECL) must be assigned a memory 10761 location. */ 10762 10763bool 10764needs_to_live_in_memory (const_tree t) 10765{ 10766 return (TREE_ADDRESSABLE (t) 10767 || is_global_var (t) 10768 || (TREE_CODE (t) == RESULT_DECL 10769 && !DECL_BY_REFERENCE (t) 10770 && aggregate_value_p (t, current_function_decl))); 10771} 10772 10773/* Return value of a constant X and sign-extend it. */ 10774 10775HOST_WIDE_INT 10776int_cst_value (const_tree x) 10777{ 10778 unsigned bits = TYPE_PRECISION (TREE_TYPE (x)); 10779 unsigned HOST_WIDE_INT val = TREE_INT_CST_LOW (x); 10780 10781 /* Make sure the sign-extended value will fit in a HOST_WIDE_INT. */ 10782 gcc_assert (cst_and_fits_in_hwi (x)); 10783 10784 if (bits < HOST_BITS_PER_WIDE_INT) 10785 { 10786 bool negative = ((val >> (bits - 1)) & 1) != 0; 10787 if (negative) 10788 val |= (~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1; 10789 else 10790 val &= ~((~(unsigned HOST_WIDE_INT) 0) << (bits - 1) << 1); 10791 } 10792 10793 return val; 10794} 10795 10796/* If TYPE is an integral or pointer type, return an integer type with 10797 the same precision which is unsigned iff UNSIGNEDP is true, or itself 10798 if TYPE is already an integer type of signedness UNSIGNEDP. */ 10799 10800tree 10801signed_or_unsigned_type_for (int unsignedp, tree type) 10802{ 10803 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_UNSIGNED (type) == unsignedp) 10804 return type; 10805 10806 if (TREE_CODE (type) == VECTOR_TYPE) 10807 { 10808 tree inner = TREE_TYPE (type); 10809 tree inner2 = signed_or_unsigned_type_for (unsignedp, inner); 10810 if (!inner2) 10811 return NULL_TREE; 10812 if (inner == inner2) 10813 return type; 10814 return build_vector_type (inner2, TYPE_VECTOR_SUBPARTS (type)); 10815 } 10816 10817 if (!INTEGRAL_TYPE_P (type) 10818 && !POINTER_TYPE_P (type) 10819 && TREE_CODE (type) != OFFSET_TYPE) 10820 return NULL_TREE; 10821 10822 return build_nonstandard_integer_type (TYPE_PRECISION (type), unsignedp); 10823} 10824 10825/* If TYPE is an integral or pointer type, return an integer type with 10826 the same precision which is unsigned, or itself if TYPE is already an 10827 unsigned integer type. */ 10828 10829tree 10830unsigned_type_for (tree type) 10831{ 10832 return signed_or_unsigned_type_for (1, type); 10833} 10834 10835/* If TYPE is an integral or pointer type, return an integer type with 10836 the same precision which is signed, or itself if TYPE is already a 10837 signed integer type. */ 10838 10839tree 10840signed_type_for (tree type) 10841{ 10842 return signed_or_unsigned_type_for (0, type); 10843} 10844 10845/* If TYPE is a vector type, return a signed integer vector type with the 10846 same width and number of subparts. Otherwise return boolean_type_node. */ 10847 10848tree 10849truth_type_for (tree type) 10850{ 10851 if (TREE_CODE (type) == VECTOR_TYPE) 10852 { 10853 tree elem = lang_hooks.types.type_for_size 10854 (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))), 0); 10855 return build_opaque_vector_type (elem, TYPE_VECTOR_SUBPARTS (type)); 10856 } 10857 else 10858 return boolean_type_node; 10859} 10860 10861/* Returns the largest value obtainable by casting something in INNER type to 10862 OUTER type. */ 10863 10864tree 10865upper_bound_in_type (tree outer, tree inner) 10866{ 10867 unsigned int det = 0; 10868 unsigned oprec = TYPE_PRECISION (outer); 10869 unsigned iprec = TYPE_PRECISION (inner); 10870 unsigned prec; 10871 10872 /* Compute a unique number for every combination. */ 10873 det |= (oprec > iprec) ? 4 : 0; 10874 det |= TYPE_UNSIGNED (outer) ? 2 : 0; 10875 det |= TYPE_UNSIGNED (inner) ? 1 : 0; 10876 10877 /* Determine the exponent to use. */ 10878 switch (det) 10879 { 10880 case 0: 10881 case 1: 10882 /* oprec <= iprec, outer: signed, inner: don't care. */ 10883 prec = oprec - 1; 10884 break; 10885 case 2: 10886 case 3: 10887 /* oprec <= iprec, outer: unsigned, inner: don't care. */ 10888 prec = oprec; 10889 break; 10890 case 4: 10891 /* oprec > iprec, outer: signed, inner: signed. */ 10892 prec = iprec - 1; 10893 break; 10894 case 5: 10895 /* oprec > iprec, outer: signed, inner: unsigned. */ 10896 prec = iprec; 10897 break; 10898 case 6: 10899 /* oprec > iprec, outer: unsigned, inner: signed. */ 10900 prec = oprec; 10901 break; 10902 case 7: 10903 /* oprec > iprec, outer: unsigned, inner: unsigned. */ 10904 prec = iprec; 10905 break; 10906 default: 10907 gcc_unreachable (); 10908 } 10909 10910 return wide_int_to_tree (outer, 10911 wi::mask (prec, false, TYPE_PRECISION (outer))); 10912} 10913 10914/* Returns the smallest value obtainable by casting something in INNER type to 10915 OUTER type. */ 10916 10917tree 10918lower_bound_in_type (tree outer, tree inner) 10919{ 10920 unsigned oprec = TYPE_PRECISION (outer); 10921 unsigned iprec = TYPE_PRECISION (inner); 10922 10923 /* If OUTER type is unsigned, we can definitely cast 0 to OUTER type 10924 and obtain 0. */ 10925 if (TYPE_UNSIGNED (outer) 10926 /* If we are widening something of an unsigned type, OUTER type 10927 contains all values of INNER type. In particular, both INNER 10928 and OUTER types have zero in common. */ 10929 || (oprec > iprec && TYPE_UNSIGNED (inner))) 10930 return build_int_cst (outer, 0); 10931 else 10932 { 10933 /* If we are widening a signed type to another signed type, we 10934 want to obtain -2^^(iprec-1). If we are keeping the 10935 precision or narrowing to a signed type, we want to obtain 10936 -2^(oprec-1). */ 10937 unsigned prec = oprec > iprec ? iprec : oprec; 10938 return wide_int_to_tree (outer, 10939 wi::mask (prec - 1, true, 10940 TYPE_PRECISION (outer))); 10941 } 10942} 10943 10944/* Return nonzero if two operands that are suitable for PHI nodes are 10945 necessarily equal. Specifically, both ARG0 and ARG1 must be either 10946 SSA_NAME or invariant. Note that this is strictly an optimization. 10947 That is, callers of this function can directly call operand_equal_p 10948 and get the same result, only slower. */ 10949 10950int 10951operand_equal_for_phi_arg_p (const_tree arg0, const_tree arg1) 10952{ 10953 if (arg0 == arg1) 10954 return 1; 10955 if (TREE_CODE (arg0) == SSA_NAME || TREE_CODE (arg1) == SSA_NAME) 10956 return 0; 10957 return operand_equal_p (arg0, arg1, 0); 10958} 10959 10960/* Returns number of zeros at the end of binary representation of X. */ 10961 10962tree 10963num_ending_zeros (const_tree x) 10964{ 10965 return build_int_cst (TREE_TYPE (x), wi::ctz (x)); 10966} 10967 10968 10969#define WALK_SUBTREE(NODE) \ 10970 do \ 10971 { \ 10972 result = walk_tree_1 (&(NODE), func, data, pset, lh); \ 10973 if (result) \ 10974 return result; \ 10975 } \ 10976 while (0) 10977 10978/* This is a subroutine of walk_tree that walks field of TYPE that are to 10979 be walked whenever a type is seen in the tree. Rest of operands and return 10980 value are as for walk_tree. */ 10981 10982static tree 10983walk_type_fields (tree type, walk_tree_fn func, void *data, 10984 hash_set<tree> *pset, walk_tree_lh lh) 10985{ 10986 tree result = NULL_TREE; 10987 10988 switch (TREE_CODE (type)) 10989 { 10990 case POINTER_TYPE: 10991 case REFERENCE_TYPE: 10992 case VECTOR_TYPE: 10993 /* We have to worry about mutually recursive pointers. These can't 10994 be written in C. They can in Ada. It's pathological, but 10995 there's an ACATS test (c38102a) that checks it. Deal with this 10996 by checking if we're pointing to another pointer, that one 10997 points to another pointer, that one does too, and we have no htab. 10998 If so, get a hash table. We check three levels deep to avoid 10999 the cost of the hash table if we don't need one. */ 11000 if (POINTER_TYPE_P (TREE_TYPE (type)) 11001 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (type))) 11002 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (TREE_TYPE (type)))) 11003 && !pset) 11004 { 11005 result = walk_tree_without_duplicates (&TREE_TYPE (type), 11006 func, data); 11007 if (result) 11008 return result; 11009 11010 break; 11011 } 11012 11013 /* ... fall through ... */ 11014 11015 case COMPLEX_TYPE: 11016 WALK_SUBTREE (TREE_TYPE (type)); 11017 break; 11018 11019 case METHOD_TYPE: 11020 WALK_SUBTREE (TYPE_METHOD_BASETYPE (type)); 11021 11022 /* Fall through. */ 11023 11024 case FUNCTION_TYPE: 11025 WALK_SUBTREE (TREE_TYPE (type)); 11026 { 11027 tree arg; 11028 11029 /* We never want to walk into default arguments. */ 11030 for (arg = TYPE_ARG_TYPES (type); arg; arg = TREE_CHAIN (arg)) 11031 WALK_SUBTREE (TREE_VALUE (arg)); 11032 } 11033 break; 11034 11035 case ARRAY_TYPE: 11036 /* Don't follow this nodes's type if a pointer for fear that 11037 we'll have infinite recursion. If we have a PSET, then we 11038 need not fear. */ 11039 if (pset 11040 || (!POINTER_TYPE_P (TREE_TYPE (type)) 11041 && TREE_CODE (TREE_TYPE (type)) != OFFSET_TYPE)) 11042 WALK_SUBTREE (TREE_TYPE (type)); 11043 WALK_SUBTREE (TYPE_DOMAIN (type)); 11044 break; 11045 11046 case OFFSET_TYPE: 11047 WALK_SUBTREE (TREE_TYPE (type)); 11048 WALK_SUBTREE (TYPE_OFFSET_BASETYPE (type)); 11049 break; 11050 11051 default: 11052 break; 11053 } 11054 11055 return NULL_TREE; 11056} 11057 11058/* Apply FUNC to all the sub-trees of TP in a pre-order traversal. FUNC is 11059 called with the DATA and the address of each sub-tree. If FUNC returns a 11060 non-NULL value, the traversal is stopped, and the value returned by FUNC 11061 is returned. If PSET is non-NULL it is used to record the nodes visited, 11062 and to avoid visiting a node more than once. */ 11063 11064tree 11065walk_tree_1 (tree *tp, walk_tree_fn func, void *data, 11066 hash_set<tree> *pset, walk_tree_lh lh) 11067{ 11068 enum tree_code code; 11069 int walk_subtrees; 11070 tree result; 11071 11072#define WALK_SUBTREE_TAIL(NODE) \ 11073 do \ 11074 { \ 11075 tp = & (NODE); \ 11076 goto tail_recurse; \ 11077 } \ 11078 while (0) 11079 11080 tail_recurse: 11081 /* Skip empty subtrees. */ 11082 if (!*tp) 11083 return NULL_TREE; 11084 11085 /* Don't walk the same tree twice, if the user has requested 11086 that we avoid doing so. */ 11087 if (pset && pset->add (*tp)) 11088 return NULL_TREE; 11089 11090 /* Call the function. */ 11091 walk_subtrees = 1; 11092 result = (*func) (tp, &walk_subtrees, data); 11093 11094 /* If we found something, return it. */ 11095 if (result) 11096 return result; 11097 11098 code = TREE_CODE (*tp); 11099 11100 /* Even if we didn't, FUNC may have decided that there was nothing 11101 interesting below this point in the tree. */ 11102 if (!walk_subtrees) 11103 { 11104 /* But we still need to check our siblings. */ 11105 if (code == TREE_LIST) 11106 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp)); 11107 else if (code == OMP_CLAUSE) 11108 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 11109 else 11110 return NULL_TREE; 11111 } 11112 11113 if (lh) 11114 { 11115 result = (*lh) (tp, &walk_subtrees, func, data, pset); 11116 if (result || !walk_subtrees) 11117 return result; 11118 } 11119 11120 switch (code) 11121 { 11122 case ERROR_MARK: 11123 case IDENTIFIER_NODE: 11124 case INTEGER_CST: 11125 case REAL_CST: 11126 case FIXED_CST: 11127 case VECTOR_CST: 11128 case STRING_CST: 11129 case BLOCK: 11130 case PLACEHOLDER_EXPR: 11131 case SSA_NAME: 11132 case FIELD_DECL: 11133 case RESULT_DECL: 11134 /* None of these have subtrees other than those already walked 11135 above. */ 11136 break; 11137 11138 case TREE_LIST: 11139 WALK_SUBTREE (TREE_VALUE (*tp)); 11140 WALK_SUBTREE_TAIL (TREE_CHAIN (*tp)); 11141 break; 11142 11143 case TREE_VEC: 11144 { 11145 int len = TREE_VEC_LENGTH (*tp); 11146 11147 if (len == 0) 11148 break; 11149 11150 /* Walk all elements but the first. */ 11151 while (--len) 11152 WALK_SUBTREE (TREE_VEC_ELT (*tp, len)); 11153 11154 /* Now walk the first one as a tail call. */ 11155 WALK_SUBTREE_TAIL (TREE_VEC_ELT (*tp, 0)); 11156 } 11157 11158 case COMPLEX_CST: 11159 WALK_SUBTREE (TREE_REALPART (*tp)); 11160 WALK_SUBTREE_TAIL (TREE_IMAGPART (*tp)); 11161 11162 case CONSTRUCTOR: 11163 { 11164 unsigned HOST_WIDE_INT idx; 11165 constructor_elt *ce; 11166 11167 for (idx = 0; vec_safe_iterate (CONSTRUCTOR_ELTS (*tp), idx, &ce); 11168 idx++) 11169 WALK_SUBTREE (ce->value); 11170 } 11171 break; 11172 11173 case SAVE_EXPR: 11174 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, 0)); 11175 11176 case BIND_EXPR: 11177 { 11178 tree decl; 11179 for (decl = BIND_EXPR_VARS (*tp); decl; decl = DECL_CHAIN (decl)) 11180 { 11181 /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk 11182 into declarations that are just mentioned, rather than 11183 declared; they don't really belong to this part of the tree. 11184 And, we can see cycles: the initializer for a declaration 11185 can refer to the declaration itself. */ 11186 WALK_SUBTREE (DECL_INITIAL (decl)); 11187 WALK_SUBTREE (DECL_SIZE (decl)); 11188 WALK_SUBTREE (DECL_SIZE_UNIT (decl)); 11189 } 11190 WALK_SUBTREE_TAIL (BIND_EXPR_BODY (*tp)); 11191 } 11192 11193 case STATEMENT_LIST: 11194 { 11195 tree_stmt_iterator i; 11196 for (i = tsi_start (*tp); !tsi_end_p (i); tsi_next (&i)) 11197 WALK_SUBTREE (*tsi_stmt_ptr (i)); 11198 } 11199 break; 11200 11201 case OMP_CLAUSE: 11202 switch (OMP_CLAUSE_CODE (*tp)) 11203 { 11204 case OMP_CLAUSE_GANG: 11205 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1)); 11206 /* FALLTHRU */ 11207 11208 case OMP_CLAUSE_DEVICE_RESIDENT: 11209 case OMP_CLAUSE_USE_DEVICE: 11210 case OMP_CLAUSE_ASYNC: 11211 case OMP_CLAUSE_WAIT: 11212 case OMP_CLAUSE_WORKER: 11213 case OMP_CLAUSE_VECTOR: 11214 case OMP_CLAUSE_NUM_GANGS: 11215 case OMP_CLAUSE_NUM_WORKERS: 11216 case OMP_CLAUSE_VECTOR_LENGTH: 11217 case OMP_CLAUSE_PRIVATE: 11218 case OMP_CLAUSE_SHARED: 11219 case OMP_CLAUSE_FIRSTPRIVATE: 11220 case OMP_CLAUSE_COPYIN: 11221 case OMP_CLAUSE_COPYPRIVATE: 11222 case OMP_CLAUSE_FINAL: 11223 case OMP_CLAUSE_IF: 11224 case OMP_CLAUSE_NUM_THREADS: 11225 case OMP_CLAUSE_SCHEDULE: 11226 case OMP_CLAUSE_UNIFORM: 11227 case OMP_CLAUSE_DEPEND: 11228 case OMP_CLAUSE_NUM_TEAMS: 11229 case OMP_CLAUSE_THREAD_LIMIT: 11230 case OMP_CLAUSE_DEVICE: 11231 case OMP_CLAUSE_DIST_SCHEDULE: 11232 case OMP_CLAUSE_SAFELEN: 11233 case OMP_CLAUSE_SIMDLEN: 11234 case OMP_CLAUSE__LOOPTEMP_: 11235 case OMP_CLAUSE__SIMDUID_: 11236 case OMP_CLAUSE__CILK_FOR_COUNT_: 11237 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 0)); 11238 /* FALLTHRU */ 11239 11240 case OMP_CLAUSE_INDEPENDENT: 11241 case OMP_CLAUSE_NOWAIT: 11242 case OMP_CLAUSE_ORDERED: 11243 case OMP_CLAUSE_DEFAULT: 11244 case OMP_CLAUSE_UNTIED: 11245 case OMP_CLAUSE_MERGEABLE: 11246 case OMP_CLAUSE_PROC_BIND: 11247 case OMP_CLAUSE_INBRANCH: 11248 case OMP_CLAUSE_NOTINBRANCH: 11249 case OMP_CLAUSE_FOR: 11250 case OMP_CLAUSE_PARALLEL: 11251 case OMP_CLAUSE_SECTIONS: 11252 case OMP_CLAUSE_TASKGROUP: 11253 case OMP_CLAUSE_AUTO: 11254 case OMP_CLAUSE_SEQ: 11255 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 11256 11257 case OMP_CLAUSE_LASTPRIVATE: 11258 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp)); 11259 WALK_SUBTREE (OMP_CLAUSE_LASTPRIVATE_STMT (*tp)); 11260 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 11261 11262 case OMP_CLAUSE_COLLAPSE: 11263 { 11264 int i; 11265 for (i = 0; i < 3; i++) 11266 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i)); 11267 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 11268 } 11269 11270 case OMP_CLAUSE_LINEAR: 11271 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp)); 11272 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STEP (*tp)); 11273 WALK_SUBTREE (OMP_CLAUSE_LINEAR_STMT (*tp)); 11274 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 11275 11276 case OMP_CLAUSE_ALIGNED: 11277 case OMP_CLAUSE_FROM: 11278 case OMP_CLAUSE_TO: 11279 case OMP_CLAUSE_MAP: 11280 case OMP_CLAUSE__CACHE_: 11281 WALK_SUBTREE (OMP_CLAUSE_DECL (*tp)); 11282 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, 1)); 11283 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 11284 11285 case OMP_CLAUSE_REDUCTION: 11286 { 11287 int i; 11288 for (i = 0; i < 4; i++) 11289 WALK_SUBTREE (OMP_CLAUSE_OPERAND (*tp, i)); 11290 WALK_SUBTREE_TAIL (OMP_CLAUSE_CHAIN (*tp)); 11291 } 11292 11293 default: 11294 gcc_unreachable (); 11295 } 11296 break; 11297 11298 case TARGET_EXPR: 11299 { 11300 int i, len; 11301 11302 /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same. 11303 But, we only want to walk once. */ 11304 len = (TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1)) ? 2 : 3; 11305 for (i = 0; i < len; ++i) 11306 WALK_SUBTREE (TREE_OPERAND (*tp, i)); 11307 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len)); 11308 } 11309 11310 case DECL_EXPR: 11311 /* If this is a TYPE_DECL, walk into the fields of the type that it's 11312 defining. We only want to walk into these fields of a type in this 11313 case and not in the general case of a mere reference to the type. 11314 11315 The criterion is as follows: if the field can be an expression, it 11316 must be walked only here. This should be in keeping with the fields 11317 that are directly gimplified in gimplify_type_sizes in order for the 11318 mark/copy-if-shared/unmark machinery of the gimplifier to work with 11319 variable-sized types. 11320 11321 Note that DECLs get walked as part of processing the BIND_EXPR. */ 11322 if (TREE_CODE (DECL_EXPR_DECL (*tp)) == TYPE_DECL) 11323 { 11324 tree *type_p = &TREE_TYPE (DECL_EXPR_DECL (*tp)); 11325 if (TREE_CODE (*type_p) == ERROR_MARK) 11326 return NULL_TREE; 11327 11328 /* Call the function for the type. See if it returns anything or 11329 doesn't want us to continue. If we are to continue, walk both 11330 the normal fields and those for the declaration case. */ 11331 result = (*func) (type_p, &walk_subtrees, data); 11332 if (result || !walk_subtrees) 11333 return result; 11334 11335 /* But do not walk a pointed-to type since it may itself need to 11336 be walked in the declaration case if it isn't anonymous. */ 11337 if (!POINTER_TYPE_P (*type_p)) 11338 { 11339 result = walk_type_fields (*type_p, func, data, pset, lh); 11340 if (result) 11341 return result; 11342 } 11343 11344 /* If this is a record type, also walk the fields. */ 11345 if (RECORD_OR_UNION_TYPE_P (*type_p)) 11346 { 11347 tree field; 11348 11349 for (field = TYPE_FIELDS (*type_p); field; 11350 field = DECL_CHAIN (field)) 11351 { 11352 /* We'd like to look at the type of the field, but we can 11353 easily get infinite recursion. So assume it's pointed 11354 to elsewhere in the tree. Also, ignore things that 11355 aren't fields. */ 11356 if (TREE_CODE (field) != FIELD_DECL) 11357 continue; 11358 11359 WALK_SUBTREE (DECL_FIELD_OFFSET (field)); 11360 WALK_SUBTREE (DECL_SIZE (field)); 11361 WALK_SUBTREE (DECL_SIZE_UNIT (field)); 11362 if (TREE_CODE (*type_p) == QUAL_UNION_TYPE) 11363 WALK_SUBTREE (DECL_QUALIFIER (field)); 11364 } 11365 } 11366 11367 /* Same for scalar types. */ 11368 else if (TREE_CODE (*type_p) == BOOLEAN_TYPE 11369 || TREE_CODE (*type_p) == ENUMERAL_TYPE 11370 || TREE_CODE (*type_p) == INTEGER_TYPE 11371 || TREE_CODE (*type_p) == FIXED_POINT_TYPE 11372 || TREE_CODE (*type_p) == REAL_TYPE) 11373 { 11374 WALK_SUBTREE (TYPE_MIN_VALUE (*type_p)); 11375 WALK_SUBTREE (TYPE_MAX_VALUE (*type_p)); 11376 } 11377 11378 WALK_SUBTREE (TYPE_SIZE (*type_p)); 11379 WALK_SUBTREE_TAIL (TYPE_SIZE_UNIT (*type_p)); 11380 } 11381 /* FALLTHRU */ 11382 11383 default: 11384 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))) 11385 { 11386 int i, len; 11387 11388 /* Walk over all the sub-trees of this operand. */ 11389 len = TREE_OPERAND_LENGTH (*tp); 11390 11391 /* Go through the subtrees. We need to do this in forward order so 11392 that the scope of a FOR_EXPR is handled properly. */ 11393 if (len) 11394 { 11395 for (i = 0; i < len - 1; ++i) 11396 WALK_SUBTREE (TREE_OPERAND (*tp, i)); 11397 WALK_SUBTREE_TAIL (TREE_OPERAND (*tp, len - 1)); 11398 } 11399 } 11400 /* If this is a type, walk the needed fields in the type. */ 11401 else if (TYPE_P (*tp)) 11402 return walk_type_fields (*tp, func, data, pset, lh); 11403 break; 11404 } 11405 11406 /* We didn't find what we were looking for. */ 11407 return NULL_TREE; 11408 11409#undef WALK_SUBTREE_TAIL 11410} 11411#undef WALK_SUBTREE 11412 11413/* Like walk_tree, but does not walk duplicate nodes more than once. */ 11414 11415tree 11416walk_tree_without_duplicates_1 (tree *tp, walk_tree_fn func, void *data, 11417 walk_tree_lh lh) 11418{ 11419 tree result; 11420 11421 hash_set<tree> pset; 11422 result = walk_tree_1 (tp, func, data, &pset, lh); 11423 return result; 11424} 11425 11426 11427tree 11428tree_block (tree t) 11429{ 11430 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t)); 11431 11432 if (IS_EXPR_CODE_CLASS (c)) 11433 return LOCATION_BLOCK (t->exp.locus); 11434 gcc_unreachable (); 11435 return NULL; 11436} 11437 11438void 11439tree_set_block (tree t, tree b) 11440{ 11441 const enum tree_code_class c = TREE_CODE_CLASS (TREE_CODE (t)); 11442 11443 if (IS_EXPR_CODE_CLASS (c)) 11444 { 11445 if (b) 11446 t->exp.locus = COMBINE_LOCATION_DATA (line_table, t->exp.locus, b); 11447 else 11448 t->exp.locus = LOCATION_LOCUS (t->exp.locus); 11449 } 11450 else 11451 gcc_unreachable (); 11452} 11453 11454/* Create a nameless artificial label and put it in the current 11455 function context. The label has a location of LOC. Returns the 11456 newly created label. */ 11457 11458tree 11459create_artificial_label (location_t loc) 11460{ 11461 tree lab = build_decl (loc, 11462 LABEL_DECL, NULL_TREE, void_type_node); 11463 11464 DECL_ARTIFICIAL (lab) = 1; 11465 DECL_IGNORED_P (lab) = 1; 11466 DECL_CONTEXT (lab) = current_function_decl; 11467 return lab; 11468} 11469 11470/* Given a tree, try to return a useful variable name that we can use 11471 to prefix a temporary that is being assigned the value of the tree. 11472 I.E. given <temp> = &A, return A. */ 11473 11474const char * 11475get_name (tree t) 11476{ 11477 tree stripped_decl; 11478 11479 stripped_decl = t; 11480 STRIP_NOPS (stripped_decl); 11481 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl)) 11482 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl)); 11483 else if (TREE_CODE (stripped_decl) == SSA_NAME) 11484 { 11485 tree name = SSA_NAME_IDENTIFIER (stripped_decl); 11486 if (!name) 11487 return NULL; 11488 return IDENTIFIER_POINTER (name); 11489 } 11490 else 11491 { 11492 switch (TREE_CODE (stripped_decl)) 11493 { 11494 case ADDR_EXPR: 11495 return get_name (TREE_OPERAND (stripped_decl, 0)); 11496 default: 11497 return NULL; 11498 } 11499 } 11500} 11501 11502/* Return true if TYPE has a variable argument list. */ 11503 11504bool 11505stdarg_p (const_tree fntype) 11506{ 11507 function_args_iterator args_iter; 11508 tree n = NULL_TREE, t; 11509 11510 if (!fntype) 11511 return false; 11512 11513 FOREACH_FUNCTION_ARGS (fntype, t, args_iter) 11514 { 11515 n = t; 11516 } 11517 11518 return n != NULL_TREE && n != void_type_node; 11519} 11520 11521/* Return true if TYPE has a prototype. */ 11522 11523bool 11524prototype_p (tree fntype) 11525{ 11526 tree t; 11527 11528 gcc_assert (fntype != NULL_TREE); 11529 11530 t = TYPE_ARG_TYPES (fntype); 11531 return (t != NULL_TREE); 11532} 11533 11534/* If BLOCK is inlined from an __attribute__((__artificial__)) 11535 routine, return pointer to location from where it has been 11536 called. */ 11537location_t * 11538block_nonartificial_location (tree block) 11539{ 11540 location_t *ret = NULL; 11541 11542 while (block && TREE_CODE (block) == BLOCK 11543 && BLOCK_ABSTRACT_ORIGIN (block)) 11544 { 11545 tree ao = BLOCK_ABSTRACT_ORIGIN (block); 11546 11547 while (TREE_CODE (ao) == BLOCK 11548 && BLOCK_ABSTRACT_ORIGIN (ao) 11549 && BLOCK_ABSTRACT_ORIGIN (ao) != ao) 11550 ao = BLOCK_ABSTRACT_ORIGIN (ao); 11551 11552 if (TREE_CODE (ao) == FUNCTION_DECL) 11553 { 11554 /* If AO is an artificial inline, point RET to the 11555 call site locus at which it has been inlined and continue 11556 the loop, in case AO's caller is also an artificial 11557 inline. */ 11558 if (DECL_DECLARED_INLINE_P (ao) 11559 && lookup_attribute ("artificial", DECL_ATTRIBUTES (ao))) 11560 ret = &BLOCK_SOURCE_LOCATION (block); 11561 else 11562 break; 11563 } 11564 else if (TREE_CODE (ao) != BLOCK) 11565 break; 11566 11567 block = BLOCK_SUPERCONTEXT (block); 11568 } 11569 return ret; 11570} 11571 11572 11573/* If EXP is inlined from an __attribute__((__artificial__)) 11574 function, return the location of the original call expression. */ 11575 11576location_t 11577tree_nonartificial_location (tree exp) 11578{ 11579 location_t *loc = block_nonartificial_location (TREE_BLOCK (exp)); 11580 11581 if (loc) 11582 return *loc; 11583 else 11584 return EXPR_LOCATION (exp); 11585} 11586 11587 11588/* These are the hash table functions for the hash table of OPTIMIZATION_NODEq 11589 nodes. */ 11590 11591/* Return the hash code code X, an OPTIMIZATION_NODE or TARGET_OPTION code. */ 11592 11593hashval_t 11594cl_option_hasher::hash (tree x) 11595{ 11596 const_tree const t = x; 11597 const char *p; 11598 size_t i; 11599 size_t len = 0; 11600 hashval_t hash = 0; 11601 11602 if (TREE_CODE (t) == OPTIMIZATION_NODE) 11603 { 11604 p = (const char *)TREE_OPTIMIZATION (t); 11605 len = sizeof (struct cl_optimization); 11606 } 11607 11608 else if (TREE_CODE (t) == TARGET_OPTION_NODE) 11609 return cl_target_option_hash (TREE_TARGET_OPTION (t)); 11610 11611 else 11612 gcc_unreachable (); 11613 11614 /* assume most opt flags are just 0/1, some are 2-3, and a few might be 11615 something else. */ 11616 for (i = 0; i < len; i++) 11617 if (p[i]) 11618 hash = (hash << 4) ^ ((i << 2) | p[i]); 11619 11620 return hash; 11621} 11622 11623/* Return nonzero if the value represented by *X (an OPTIMIZATION or 11624 TARGET_OPTION tree node) is the same as that given by *Y, which is the 11625 same. */ 11626 11627bool 11628cl_option_hasher::equal (tree x, tree y) 11629{ 11630 const_tree const xt = x; 11631 const_tree const yt = y; 11632 const char *xp; 11633 const char *yp; 11634 size_t len; 11635 11636 if (TREE_CODE (xt) != TREE_CODE (yt)) 11637 return 0; 11638 11639 if (TREE_CODE (xt) == OPTIMIZATION_NODE) 11640 { 11641 xp = (const char *)TREE_OPTIMIZATION (xt); 11642 yp = (const char *)TREE_OPTIMIZATION (yt); 11643 len = sizeof (struct cl_optimization); 11644 } 11645 11646 else if (TREE_CODE (xt) == TARGET_OPTION_NODE) 11647 { 11648 return cl_target_option_eq (TREE_TARGET_OPTION (xt), 11649 TREE_TARGET_OPTION (yt)); 11650 } 11651 11652 else 11653 gcc_unreachable (); 11654 11655 return (memcmp (xp, yp, len) == 0); 11656} 11657 11658/* Build an OPTIMIZATION_NODE based on the options in OPTS. */ 11659 11660tree 11661build_optimization_node (struct gcc_options *opts) 11662{ 11663 tree t; 11664 11665 /* Use the cache of optimization nodes. */ 11666 11667 cl_optimization_save (TREE_OPTIMIZATION (cl_optimization_node), 11668 opts); 11669 11670 tree *slot = cl_option_hash_table->find_slot (cl_optimization_node, INSERT); 11671 t = *slot; 11672 if (!t) 11673 { 11674 /* Insert this one into the hash table. */ 11675 t = cl_optimization_node; 11676 *slot = t; 11677 11678 /* Make a new node for next time round. */ 11679 cl_optimization_node = make_node (OPTIMIZATION_NODE); 11680 } 11681 11682 return t; 11683} 11684 11685/* Build a TARGET_OPTION_NODE based on the options in OPTS. */ 11686 11687tree 11688build_target_option_node (struct gcc_options *opts) 11689{ 11690 tree t; 11691 11692 /* Use the cache of optimization nodes. */ 11693 11694 cl_target_option_save (TREE_TARGET_OPTION (cl_target_option_node), 11695 opts); 11696 11697 tree *slot = cl_option_hash_table->find_slot (cl_target_option_node, INSERT); 11698 t = *slot; 11699 if (!t) 11700 { 11701 /* Insert this one into the hash table. */ 11702 t = cl_target_option_node; 11703 *slot = t; 11704 11705 /* Make a new node for next time round. */ 11706 cl_target_option_node = make_node (TARGET_OPTION_NODE); 11707 } 11708 11709 return t; 11710} 11711 11712/* Clear TREE_TARGET_GLOBALS of all TARGET_OPTION_NODE trees, 11713 so that they aren't saved during PCH writing. */ 11714 11715void 11716prepare_target_option_nodes_for_pch (void) 11717{ 11718 hash_table<cl_option_hasher>::iterator iter = cl_option_hash_table->begin (); 11719 for (; iter != cl_option_hash_table->end (); ++iter) 11720 if (TREE_CODE (*iter) == TARGET_OPTION_NODE) 11721 TREE_TARGET_GLOBALS (*iter) = NULL; 11722} 11723 11724/* Determine the "ultimate origin" of a block. The block may be an inlined 11725 instance of an inlined instance of a block which is local to an inline 11726 function, so we have to trace all of the way back through the origin chain 11727 to find out what sort of node actually served as the original seed for the 11728 given block. */ 11729 11730tree 11731block_ultimate_origin (const_tree block) 11732{ 11733 tree immediate_origin = BLOCK_ABSTRACT_ORIGIN (block); 11734 11735 /* BLOCK_ABSTRACT_ORIGIN can point to itself; ignore that if 11736 we're trying to output the abstract instance of this function. */ 11737 if (BLOCK_ABSTRACT (block) && immediate_origin == block) 11738 return NULL_TREE; 11739 11740 if (immediate_origin == NULL_TREE) 11741 return NULL_TREE; 11742 else 11743 { 11744 tree ret_val; 11745 tree lookahead = immediate_origin; 11746 11747 do 11748 { 11749 ret_val = lookahead; 11750 lookahead = (TREE_CODE (ret_val) == BLOCK 11751 ? BLOCK_ABSTRACT_ORIGIN (ret_val) : NULL); 11752 } 11753 while (lookahead != NULL && lookahead != ret_val); 11754 11755 /* The block's abstract origin chain may not be the *ultimate* origin of 11756 the block. It could lead to a DECL that has an abstract origin set. 11757 If so, we want that DECL's abstract origin (which is what DECL_ORIGIN 11758 will give us if it has one). Note that DECL's abstract origins are 11759 supposed to be the most distant ancestor (or so decl_ultimate_origin 11760 claims), so we don't need to loop following the DECL origins. */ 11761 if (DECL_P (ret_val)) 11762 return DECL_ORIGIN (ret_val); 11763 11764 return ret_val; 11765 } 11766} 11767 11768/* Return true iff conversion from INNER_TYPE to OUTER_TYPE generates 11769 no instruction. */ 11770 11771bool 11772tree_nop_conversion_p (const_tree outer_type, const_tree inner_type) 11773{ 11774 /* Use precision rather then machine mode when we can, which gives 11775 the correct answer even for submode (bit-field) types. */ 11776 if ((INTEGRAL_TYPE_P (outer_type) 11777 || POINTER_TYPE_P (outer_type) 11778 || TREE_CODE (outer_type) == OFFSET_TYPE) 11779 && (INTEGRAL_TYPE_P (inner_type) 11780 || POINTER_TYPE_P (inner_type) 11781 || TREE_CODE (inner_type) == OFFSET_TYPE)) 11782 return TYPE_PRECISION (outer_type) == TYPE_PRECISION (inner_type); 11783 11784 /* Otherwise fall back on comparing machine modes (e.g. for 11785 aggregate types, floats). */ 11786 return TYPE_MODE (outer_type) == TYPE_MODE (inner_type); 11787} 11788 11789/* Return true iff conversion in EXP generates no instruction. Mark 11790 it inline so that we fully inline into the stripping functions even 11791 though we have two uses of this function. */ 11792 11793static inline bool 11794tree_nop_conversion (const_tree exp) 11795{ 11796 tree outer_type, inner_type; 11797 11798 if (!CONVERT_EXPR_P (exp) 11799 && TREE_CODE (exp) != NON_LVALUE_EXPR) 11800 return false; 11801 if (TREE_OPERAND (exp, 0) == error_mark_node) 11802 return false; 11803 11804 outer_type = TREE_TYPE (exp); 11805 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); 11806 11807 if (!inner_type) 11808 return false; 11809 11810 return tree_nop_conversion_p (outer_type, inner_type); 11811} 11812 11813/* Return true iff conversion in EXP generates no instruction. Don't 11814 consider conversions changing the signedness. */ 11815 11816static bool 11817tree_sign_nop_conversion (const_tree exp) 11818{ 11819 tree outer_type, inner_type; 11820 11821 if (!tree_nop_conversion (exp)) 11822 return false; 11823 11824 outer_type = TREE_TYPE (exp); 11825 inner_type = TREE_TYPE (TREE_OPERAND (exp, 0)); 11826 11827 return (TYPE_UNSIGNED (outer_type) == TYPE_UNSIGNED (inner_type) 11828 && POINTER_TYPE_P (outer_type) == POINTER_TYPE_P (inner_type)); 11829} 11830 11831/* Strip conversions from EXP according to tree_nop_conversion and 11832 return the resulting expression. */ 11833 11834tree 11835tree_strip_nop_conversions (tree exp) 11836{ 11837 while (tree_nop_conversion (exp)) 11838 exp = TREE_OPERAND (exp, 0); 11839 return exp; 11840} 11841 11842/* Strip conversions from EXP according to tree_sign_nop_conversion 11843 and return the resulting expression. */ 11844 11845tree 11846tree_strip_sign_nop_conversions (tree exp) 11847{ 11848 while (tree_sign_nop_conversion (exp)) 11849 exp = TREE_OPERAND (exp, 0); 11850 return exp; 11851} 11852 11853/* Avoid any floating point extensions from EXP. */ 11854tree 11855strip_float_extensions (tree exp) 11856{ 11857 tree sub, expt, subt; 11858 11859 /* For floating point constant look up the narrowest type that can hold 11860 it properly and handle it like (type)(narrowest_type)constant. 11861 This way we can optimize for instance a=a*2.0 where "a" is float 11862 but 2.0 is double constant. */ 11863 if (TREE_CODE (exp) == REAL_CST && !DECIMAL_FLOAT_TYPE_P (TREE_TYPE (exp))) 11864 { 11865 REAL_VALUE_TYPE orig; 11866 tree type = NULL; 11867 11868 orig = TREE_REAL_CST (exp); 11869 if (TYPE_PRECISION (TREE_TYPE (exp)) > TYPE_PRECISION (float_type_node) 11870 && exact_real_truncate (TYPE_MODE (float_type_node), &orig)) 11871 type = float_type_node; 11872 else if (TYPE_PRECISION (TREE_TYPE (exp)) 11873 > TYPE_PRECISION (double_type_node) 11874 && exact_real_truncate (TYPE_MODE (double_type_node), &orig)) 11875 type = double_type_node; 11876 if (type) 11877 return build_real (type, real_value_truncate (TYPE_MODE (type), orig)); 11878 } 11879 11880 if (!CONVERT_EXPR_P (exp)) 11881 return exp; 11882 11883 sub = TREE_OPERAND (exp, 0); 11884 subt = TREE_TYPE (sub); 11885 expt = TREE_TYPE (exp); 11886 11887 if (!FLOAT_TYPE_P (subt)) 11888 return exp; 11889 11890 if (DECIMAL_FLOAT_TYPE_P (expt) != DECIMAL_FLOAT_TYPE_P (subt)) 11891 return exp; 11892 11893 if (TYPE_PRECISION (subt) > TYPE_PRECISION (expt)) 11894 return exp; 11895 11896 return strip_float_extensions (sub); 11897} 11898 11899/* Strip out all handled components that produce invariant 11900 offsets. */ 11901 11902const_tree 11903strip_invariant_refs (const_tree op) 11904{ 11905 while (handled_component_p (op)) 11906 { 11907 switch (TREE_CODE (op)) 11908 { 11909 case ARRAY_REF: 11910 case ARRAY_RANGE_REF: 11911 if (!is_gimple_constant (TREE_OPERAND (op, 1)) 11912 || TREE_OPERAND (op, 2) != NULL_TREE 11913 || TREE_OPERAND (op, 3) != NULL_TREE) 11914 return NULL; 11915 break; 11916 11917 case COMPONENT_REF: 11918 if (TREE_OPERAND (op, 2) != NULL_TREE) 11919 return NULL; 11920 break; 11921 11922 default:; 11923 } 11924 op = TREE_OPERAND (op, 0); 11925 } 11926 11927 return op; 11928} 11929 11930static GTY(()) tree gcc_eh_personality_decl; 11931 11932/* Return the GCC personality function decl. */ 11933 11934tree 11935lhd_gcc_personality (void) 11936{ 11937 if (!gcc_eh_personality_decl) 11938 gcc_eh_personality_decl = build_personality_function ("gcc"); 11939 return gcc_eh_personality_decl; 11940} 11941 11942/* TARGET is a call target of GIMPLE call statement 11943 (obtained by gimple_call_fn). Return true if it is 11944 OBJ_TYPE_REF representing an virtual call of C++ method. 11945 (As opposed to OBJ_TYPE_REF representing objc calls 11946 through a cast where middle-end devirtualization machinery 11947 can't apply.) */ 11948 11949bool 11950virtual_method_call_p (tree target) 11951{ 11952 if (TREE_CODE (target) != OBJ_TYPE_REF) 11953 return false; 11954 tree t = TREE_TYPE (target); 11955 gcc_checking_assert (TREE_CODE (t) == POINTER_TYPE); 11956 t = TREE_TYPE (t); 11957 if (TREE_CODE (t) == FUNCTION_TYPE) 11958 return false; 11959 gcc_checking_assert (TREE_CODE (t) == METHOD_TYPE); 11960 /* If we do not have BINFO associated, it means that type was built 11961 without devirtualization enabled. Do not consider this a virtual 11962 call. */ 11963 if (!TYPE_BINFO (obj_type_ref_class (target))) 11964 return false; 11965 return true; 11966} 11967 11968/* REF is OBJ_TYPE_REF, return the class the ref corresponds to. */ 11969 11970tree 11971obj_type_ref_class (tree ref) 11972{ 11973 gcc_checking_assert (TREE_CODE (ref) == OBJ_TYPE_REF); 11974 ref = TREE_TYPE (ref); 11975 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE); 11976 ref = TREE_TYPE (ref); 11977 /* We look for type THIS points to. ObjC also builds 11978 OBJ_TYPE_REF with non-method calls, Their first parameter 11979 ID however also corresponds to class type. */ 11980 gcc_checking_assert (TREE_CODE (ref) == METHOD_TYPE 11981 || TREE_CODE (ref) == FUNCTION_TYPE); 11982 ref = TREE_VALUE (TYPE_ARG_TYPES (ref)); 11983 gcc_checking_assert (TREE_CODE (ref) == POINTER_TYPE); 11984 return TREE_TYPE (ref); 11985} 11986 11987/* Return true if T is in anonymous namespace. */ 11988 11989bool 11990type_in_anonymous_namespace_p (const_tree t) 11991{ 11992 /* TREE_PUBLIC of TYPE_STUB_DECL may not be properly set for 11993 bulitin types; those have CONTEXT NULL. */ 11994 if (!TYPE_CONTEXT (t)) 11995 return false; 11996 return (TYPE_STUB_DECL (t) && !TREE_PUBLIC (TYPE_STUB_DECL (t))); 11997} 11998 11999/* Lookup sub-BINFO of BINFO of TYPE at offset POS. */ 12000 12001static tree 12002lookup_binfo_at_offset (tree binfo, tree type, HOST_WIDE_INT pos) 12003{ 12004 unsigned int i; 12005 tree base_binfo, b; 12006 12007 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) 12008 if (pos == tree_to_shwi (BINFO_OFFSET (base_binfo)) 12009 && types_same_for_odr (TREE_TYPE (base_binfo), type)) 12010 return base_binfo; 12011 else if ((b = lookup_binfo_at_offset (base_binfo, type, pos)) != NULL) 12012 return b; 12013 return NULL; 12014} 12015 12016/* Try to find a base info of BINFO that would have its field decl at offset 12017 OFFSET within the BINFO type and which is of EXPECTED_TYPE. If it can be 12018 found, return, otherwise return NULL_TREE. */ 12019 12020tree 12021get_binfo_at_offset (tree binfo, HOST_WIDE_INT offset, tree expected_type) 12022{ 12023 tree type = BINFO_TYPE (binfo); 12024 12025 while (true) 12026 { 12027 HOST_WIDE_INT pos, size; 12028 tree fld; 12029 int i; 12030 12031 if (types_same_for_odr (type, expected_type)) 12032 return binfo; 12033 if (offset < 0) 12034 return NULL_TREE; 12035 12036 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld)) 12037 { 12038 if (TREE_CODE (fld) != FIELD_DECL || !DECL_ARTIFICIAL (fld)) 12039 continue; 12040 12041 pos = int_bit_position (fld); 12042 size = tree_to_uhwi (DECL_SIZE (fld)); 12043 if (pos <= offset && (pos + size) > offset) 12044 break; 12045 } 12046 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE) 12047 return NULL_TREE; 12048 12049 /* Offset 0 indicates the primary base, whose vtable contents are 12050 represented in the binfo for the derived class. */ 12051 else if (offset != 0) 12052 { 12053 tree found_binfo = NULL, base_binfo; 12054 /* Offsets in BINFO are in bytes relative to the whole structure 12055 while POS is in bits relative to the containing field. */ 12056 int binfo_offset = (tree_to_shwi (BINFO_OFFSET (binfo)) + pos 12057 / BITS_PER_UNIT); 12058 12059 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) 12060 if (tree_to_shwi (BINFO_OFFSET (base_binfo)) == binfo_offset 12061 && types_same_for_odr (TREE_TYPE (base_binfo), TREE_TYPE (fld))) 12062 { 12063 found_binfo = base_binfo; 12064 break; 12065 } 12066 if (found_binfo) 12067 binfo = found_binfo; 12068 else 12069 binfo = lookup_binfo_at_offset (binfo, TREE_TYPE (fld), 12070 binfo_offset); 12071 } 12072 12073 type = TREE_TYPE (fld); 12074 offset -= pos; 12075 } 12076} 12077 12078/* Returns true if X is a typedef decl. */ 12079 12080bool 12081is_typedef_decl (tree x) 12082{ 12083 return (x && TREE_CODE (x) == TYPE_DECL 12084 && DECL_ORIGINAL_TYPE (x) != NULL_TREE); 12085} 12086 12087/* Returns true iff TYPE is a type variant created for a typedef. */ 12088 12089bool 12090typedef_variant_p (tree type) 12091{ 12092 return is_typedef_decl (TYPE_NAME (type)); 12093} 12094 12095/* Warn about a use of an identifier which was marked deprecated. */ 12096void 12097warn_deprecated_use (tree node, tree attr) 12098{ 12099 const char *msg; 12100 12101 if (node == 0 || !warn_deprecated_decl) 12102 return; 12103 12104 if (!attr) 12105 { 12106 if (DECL_P (node)) 12107 attr = DECL_ATTRIBUTES (node); 12108 else if (TYPE_P (node)) 12109 { 12110 tree decl = TYPE_STUB_DECL (node); 12111 if (decl) 12112 attr = lookup_attribute ("deprecated", 12113 TYPE_ATTRIBUTES (TREE_TYPE (decl))); 12114 } 12115 } 12116 12117 if (attr) 12118 attr = lookup_attribute ("deprecated", attr); 12119 12120 if (attr) 12121 msg = TREE_STRING_POINTER (TREE_VALUE (TREE_VALUE (attr))); 12122 else 12123 msg = NULL; 12124 12125 bool w; 12126 if (DECL_P (node)) 12127 { 12128 if (msg) 12129 w = warning (OPT_Wdeprecated_declarations, 12130 "%qD is deprecated: %s", node, msg); 12131 else 12132 w = warning (OPT_Wdeprecated_declarations, 12133 "%qD is deprecated", node); 12134 if (w) 12135 inform (DECL_SOURCE_LOCATION (node), "declared here"); 12136 } 12137 else if (TYPE_P (node)) 12138 { 12139 tree what = NULL_TREE; 12140 tree decl = TYPE_STUB_DECL (node); 12141 12142 if (TYPE_NAME (node)) 12143 { 12144 if (TREE_CODE (TYPE_NAME (node)) == IDENTIFIER_NODE) 12145 what = TYPE_NAME (node); 12146 else if (TREE_CODE (TYPE_NAME (node)) == TYPE_DECL 12147 && DECL_NAME (TYPE_NAME (node))) 12148 what = DECL_NAME (TYPE_NAME (node)); 12149 } 12150 12151 if (decl) 12152 { 12153 if (what) 12154 { 12155 if (msg) 12156 w = warning (OPT_Wdeprecated_declarations, 12157 "%qE is deprecated: %s", what, msg); 12158 else 12159 w = warning (OPT_Wdeprecated_declarations, 12160 "%qE is deprecated", what); 12161 } 12162 else 12163 { 12164 if (msg) 12165 w = warning (OPT_Wdeprecated_declarations, 12166 "type is deprecated: %s", msg); 12167 else 12168 w = warning (OPT_Wdeprecated_declarations, 12169 "type is deprecated"); 12170 } 12171 if (w) 12172 inform (DECL_SOURCE_LOCATION (decl), "declared here"); 12173 } 12174 else 12175 { 12176 if (what) 12177 { 12178 if (msg) 12179 warning (OPT_Wdeprecated_declarations, "%qE is deprecated: %s", 12180 what, msg); 12181 else 12182 warning (OPT_Wdeprecated_declarations, "%qE is deprecated", what); 12183 } 12184 else 12185 { 12186 if (msg) 12187 warning (OPT_Wdeprecated_declarations, "type is deprecated: %s", 12188 msg); 12189 else 12190 warning (OPT_Wdeprecated_declarations, "type is deprecated"); 12191 } 12192 } 12193 } 12194} 12195 12196/* Return true if REF has a COMPONENT_REF with a bit-field field declaration 12197 somewhere in it. */ 12198 12199bool 12200contains_bitfld_component_ref_p (const_tree ref) 12201{ 12202 while (handled_component_p (ref)) 12203 { 12204 if (TREE_CODE (ref) == COMPONENT_REF 12205 && DECL_BIT_FIELD (TREE_OPERAND (ref, 1))) 12206 return true; 12207 ref = TREE_OPERAND (ref, 0); 12208 } 12209 12210 return false; 12211} 12212 12213/* Try to determine whether a TRY_CATCH expression can fall through. 12214 This is a subroutine of block_may_fallthru. */ 12215 12216static bool 12217try_catch_may_fallthru (const_tree stmt) 12218{ 12219 tree_stmt_iterator i; 12220 12221 /* If the TRY block can fall through, the whole TRY_CATCH can 12222 fall through. */ 12223 if (block_may_fallthru (TREE_OPERAND (stmt, 0))) 12224 return true; 12225 12226 i = tsi_start (TREE_OPERAND (stmt, 1)); 12227 switch (TREE_CODE (tsi_stmt (i))) 12228 { 12229 case CATCH_EXPR: 12230 /* We expect to see a sequence of CATCH_EXPR trees, each with a 12231 catch expression and a body. The whole TRY_CATCH may fall 12232 through iff any of the catch bodies falls through. */ 12233 for (; !tsi_end_p (i); tsi_next (&i)) 12234 { 12235 if (block_may_fallthru (CATCH_BODY (tsi_stmt (i)))) 12236 return true; 12237 } 12238 return false; 12239 12240 case EH_FILTER_EXPR: 12241 /* The exception filter expression only matters if there is an 12242 exception. If the exception does not match EH_FILTER_TYPES, 12243 we will execute EH_FILTER_FAILURE, and we will fall through 12244 if that falls through. If the exception does match 12245 EH_FILTER_TYPES, the stack unwinder will continue up the 12246 stack, so we will not fall through. We don't know whether we 12247 will throw an exception which matches EH_FILTER_TYPES or not, 12248 so we just ignore EH_FILTER_TYPES and assume that we might 12249 throw an exception which doesn't match. */ 12250 return block_may_fallthru (EH_FILTER_FAILURE (tsi_stmt (i))); 12251 12252 default: 12253 /* This case represents statements to be executed when an 12254 exception occurs. Those statements are implicitly followed 12255 by a RESX statement to resume execution after the exception. 12256 So in this case the TRY_CATCH never falls through. */ 12257 return false; 12258 } 12259} 12260 12261/* Try to determine if we can fall out of the bottom of BLOCK. This guess 12262 need not be 100% accurate; simply be conservative and return true if we 12263 don't know. This is used only to avoid stupidly generating extra code. 12264 If we're wrong, we'll just delete the extra code later. */ 12265 12266bool 12267block_may_fallthru (const_tree block) 12268{ 12269 /* This CONST_CAST is okay because expr_last returns its argument 12270 unmodified and we assign it to a const_tree. */ 12271 const_tree stmt = expr_last (CONST_CAST_TREE (block)); 12272 12273 switch (stmt ? TREE_CODE (stmt) : ERROR_MARK) 12274 { 12275 case GOTO_EXPR: 12276 case RETURN_EXPR: 12277 /* Easy cases. If the last statement of the block implies 12278 control transfer, then we can't fall through. */ 12279 return false; 12280 12281 case SWITCH_EXPR: 12282 /* If SWITCH_LABELS is set, this is lowered, and represents a 12283 branch to a selected label and hence can not fall through. 12284 Otherwise SWITCH_BODY is set, and the switch can fall 12285 through. */ 12286 return SWITCH_LABELS (stmt) == NULL_TREE; 12287 12288 case COND_EXPR: 12289 if (block_may_fallthru (COND_EXPR_THEN (stmt))) 12290 return true; 12291 return block_may_fallthru (COND_EXPR_ELSE (stmt)); 12292 12293 case BIND_EXPR: 12294 return block_may_fallthru (BIND_EXPR_BODY (stmt)); 12295 12296 case TRY_CATCH_EXPR: 12297 return try_catch_may_fallthru (stmt); 12298 12299 case TRY_FINALLY_EXPR: 12300 /* The finally clause is always executed after the try clause, 12301 so if it does not fall through, then the try-finally will not 12302 fall through. Otherwise, if the try clause does not fall 12303 through, then when the finally clause falls through it will 12304 resume execution wherever the try clause was going. So the 12305 whole try-finally will only fall through if both the try 12306 clause and the finally clause fall through. */ 12307 return (block_may_fallthru (TREE_OPERAND (stmt, 0)) 12308 && block_may_fallthru (TREE_OPERAND (stmt, 1))); 12309 12310 case MODIFY_EXPR: 12311 if (TREE_CODE (TREE_OPERAND (stmt, 1)) == CALL_EXPR) 12312 stmt = TREE_OPERAND (stmt, 1); 12313 else 12314 return true; 12315 /* FALLTHRU */ 12316 12317 case CALL_EXPR: 12318 /* Functions that do not return do not fall through. */ 12319 return (call_expr_flags (stmt) & ECF_NORETURN) == 0; 12320 12321 case CLEANUP_POINT_EXPR: 12322 return block_may_fallthru (TREE_OPERAND (stmt, 0)); 12323 12324 case TARGET_EXPR: 12325 return block_may_fallthru (TREE_OPERAND (stmt, 1)); 12326 12327 case ERROR_MARK: 12328 return true; 12329 12330 default: 12331 return lang_hooks.block_may_fallthru (stmt); 12332 } 12333} 12334 12335/* True if we are using EH to handle cleanups. */ 12336static bool using_eh_for_cleanups_flag = false; 12337 12338/* This routine is called from front ends to indicate eh should be used for 12339 cleanups. */ 12340void 12341using_eh_for_cleanups (void) 12342{ 12343 using_eh_for_cleanups_flag = true; 12344} 12345 12346/* Query whether EH is used for cleanups. */ 12347bool 12348using_eh_for_cleanups_p (void) 12349{ 12350 return using_eh_for_cleanups_flag; 12351} 12352 12353/* Wrapper for tree_code_name to ensure that tree code is valid */ 12354const char * 12355get_tree_code_name (enum tree_code code) 12356{ 12357 const char *invalid = "<invalid tree code>"; 12358 12359 if (code >= MAX_TREE_CODES) 12360 return invalid; 12361 12362 return tree_code_name[code]; 12363} 12364 12365/* Drops the TREE_OVERFLOW flag from T. */ 12366 12367tree 12368drop_tree_overflow (tree t) 12369{ 12370 gcc_checking_assert (TREE_OVERFLOW (t)); 12371 12372 /* For tree codes with a sharing machinery re-build the result. */ 12373 if (TREE_CODE (t) == INTEGER_CST) 12374 return wide_int_to_tree (TREE_TYPE (t), t); 12375 12376 /* Otherwise, as all tcc_constants are possibly shared, copy the node 12377 and drop the flag. */ 12378 t = copy_node (t); 12379 TREE_OVERFLOW (t) = 0; 12380 return t; 12381} 12382 12383/* Given a memory reference expression T, return its base address. 12384 The base address of a memory reference expression is the main 12385 object being referenced. For instance, the base address for 12386 'array[i].fld[j]' is 'array'. You can think of this as stripping 12387 away the offset part from a memory address. 12388 12389 This function calls handled_component_p to strip away all the inner 12390 parts of the memory reference until it reaches the base object. */ 12391 12392tree 12393get_base_address (tree t) 12394{ 12395 while (handled_component_p (t)) 12396 t = TREE_OPERAND (t, 0); 12397 12398 if ((TREE_CODE (t) == MEM_REF 12399 || TREE_CODE (t) == TARGET_MEM_REF) 12400 && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR) 12401 t = TREE_OPERAND (TREE_OPERAND (t, 0), 0); 12402 12403 /* ??? Either the alias oracle or all callers need to properly deal 12404 with WITH_SIZE_EXPRs before we can look through those. */ 12405 if (TREE_CODE (t) == WITH_SIZE_EXPR) 12406 return NULL_TREE; 12407 12408 return t; 12409} 12410 12411/* Return the machine mode of T. For vectors, returns the mode of the 12412 inner type. The main use case is to feed the result to HONOR_NANS, 12413 avoiding the BLKmode that a direct TYPE_MODE (T) might return. */ 12414 12415machine_mode 12416element_mode (const_tree t) 12417{ 12418 if (!TYPE_P (t)) 12419 t = TREE_TYPE (t); 12420 if (VECTOR_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE) 12421 t = TREE_TYPE (t); 12422 return TYPE_MODE (t); 12423} 12424 12425#include "gt-tree.h" 12426