1/* Implements exception handling.
2   Copyright (C) 1989-2015 Free Software Foundation, Inc.
3   Contributed by Mike Stump <mrs@cygnus.com>.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 3, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3.  If not see
19<http://www.gnu.org/licenses/>.  */
20
21
22/* An exception is an event that can be "thrown" from within a
23   function.  This event can then be "caught" by the callers of
24   the function.
25
26   The representation of exceptions changes several times during
27   the compilation process:
28
29   In the beginning, in the front end, we have the GENERIC trees
30   TRY_CATCH_EXPR, TRY_FINALLY_EXPR, WITH_CLEANUP_EXPR,
31   CLEANUP_POINT_EXPR, CATCH_EXPR, and EH_FILTER_EXPR.
32
33   During initial gimplification (gimplify.c) these are lowered
34   to the GIMPLE_TRY, GIMPLE_CATCH, and GIMPLE_EH_FILTER nodes.
35   The WITH_CLEANUP_EXPR and CLEANUP_POINT_EXPR nodes are converted
36   into GIMPLE_TRY_FINALLY nodes; the others are a more direct 1-1
37   conversion.
38
39   During pass_lower_eh (tree-eh.c) we record the nested structure
40   of the TRY nodes in EH_REGION nodes in CFUN->EH->REGION_TREE.
41   We expand the eh_protect_cleanup_actions langhook into MUST_NOT_THROW
42   regions at this time.  We can then flatten the statements within
43   the TRY nodes to straight-line code.  Statements that had been within
44   TRY nodes that can throw are recorded within CFUN->EH->THROW_STMT_TABLE,
45   so that we may remember what action is supposed to be taken if
46   a given statement does throw.  During this lowering process,
47   we create an EH_LANDING_PAD node for each EH_REGION that has
48   some code within the function that needs to be executed if a
49   throw does happen.  We also create RESX statements that are
50   used to transfer control from an inner EH_REGION to an outer
51   EH_REGION.  We also create EH_DISPATCH statements as placeholders
52   for a runtime type comparison that should be made in order to
53   select the action to perform among different CATCH and EH_FILTER
54   regions.
55
56   During pass_lower_eh_dispatch (tree-eh.c), which is run after
57   all inlining is complete, we are able to run assign_filter_values,
58   which allows us to map the set of types manipulated by all of the
59   CATCH and EH_FILTER regions to a set of integers.  This set of integers
60   will be how the exception runtime communicates with the code generated
61   within the function.  We then expand the GIMPLE_EH_DISPATCH statements
62   to a switch or conditional branches that use the argument provided by
63   the runtime (__builtin_eh_filter) and the set of integers we computed
64   in assign_filter_values.
65
66   During pass_lower_resx (tree-eh.c), which is run near the end
67   of optimization, we expand RESX statements.  If the eh region
68   that is outer to the RESX statement is a MUST_NOT_THROW, then
69   the RESX expands to some form of abort statement.  If the eh
70   region that is outer to the RESX statement is within the current
71   function, then the RESX expands to a bookkeeping call
72   (__builtin_eh_copy_values) and a goto.  Otherwise, the next
73   handler for the exception must be within a function somewhere
74   up the call chain, so we call back into the exception runtime
75   (__builtin_unwind_resume).
76
77   During pass_expand (cfgexpand.c), we generate REG_EH_REGION notes
78   that create an rtl to eh_region mapping that corresponds to the
79   gimple to eh_region mapping that had been recorded in the
80   THROW_STMT_TABLE.
81
82   Then, via finish_eh_generation, we generate the real landing pads
83   to which the runtime will actually transfer control.  These new
84   landing pads perform whatever bookkeeping is needed by the target
85   backend in order to resume execution within the current function.
86   Each of these new landing pads falls through into the post_landing_pad
87   label which had been used within the CFG up to this point.  All
88   exception edges within the CFG are redirected to the new landing pads.
89   If the target uses setjmp to implement exceptions, the various extra
90   calls into the runtime to register and unregister the current stack
91   frame are emitted at this time.
92
93   During pass_convert_to_eh_region_ranges (except.c), we transform
94   the REG_EH_REGION notes attached to individual insns into
95   non-overlapping ranges of insns bounded by NOTE_INSN_EH_REGION_BEG
96   and NOTE_INSN_EH_REGION_END.  Each insn within such ranges has the
97   same associated action within the exception region tree, meaning
98   that (1) the exception is caught by the same landing pad within the
99   current function, (2) the exception is blocked by the runtime with
100   a MUST_NOT_THROW region, or (3) the exception is not handled at all
101   within the current function.
102
103   Finally, during assembly generation, we call
104   output_function_exception_table (except.c) to emit the tables with
105   which the exception runtime can determine if a given stack frame
106   handles a given exception, and if so what filter value to provide
107   to the function when the non-local control transfer is effected.
108   If the target uses dwarf2 unwinding to implement exceptions, then
109   output_call_frame_info (dwarf2out.c) emits the required unwind data.  */
110
111
112#include "config.h"
113#include "system.h"
114#include "coretypes.h"
115#include "tm.h"
116#include "rtl.h"
117#include "hash-set.h"
118#include "machmode.h"
119#include "vec.h"
120#include "double-int.h"
121#include "input.h"
122#include "alias.h"
123#include "symtab.h"
124#include "wide-int.h"
125#include "inchash.h"
126#include "real.h"
127#include "tree.h"
128#include "fold-const.h"
129#include "stringpool.h"
130#include "stor-layout.h"
131#include "flags.h"
132#include "hard-reg-set.h"
133#include "function.h"
134#include "insn-codes.h"
135#include "optabs.h"
136#include "hashtab.h"
137#include "statistics.h"
138#include "fixed-value.h"
139#include "insn-config.h"
140#include "expmed.h"
141#include "dojump.h"
142#include "explow.h"
143#include "calls.h"
144#include "emit-rtl.h"
145#include "varasm.h"
146#include "stmt.h"
147#include "expr.h"
148#include "libfuncs.h"
149#include "except.h"
150#include "output.h"
151#include "dwarf2asm.h"
152#include "dwarf2out.h"
153#include "dwarf2.h"
154#include "toplev.h"
155#include "hash-table.h"
156#include "intl.h"
157#include "tm_p.h"
158#include "target.h"
159#include "common/common-target.h"
160#include "langhooks.h"
161#include "predict.h"
162#include "dominance.h"
163#include "cfg.h"
164#include "cfgrtl.h"
165#include "basic-block.h"
166#include "hash-map.h"
167#include "is-a.h"
168#include "plugin-api.h"
169#include "ipa-ref.h"
170#include "cgraph.h"
171#include "diagnostic.h"
172#include "tree-pretty-print.h"
173#include "tree-pass.h"
174#include "cfgloop.h"
175#include "builtins.h"
176
177/* Provide defaults for stuff that may not be defined when using
178   sjlj exceptions.  */
179#ifndef EH_RETURN_DATA_REGNO
180#define EH_RETURN_DATA_REGNO(N) INVALID_REGNUM
181#endif
182
183static GTY(()) int call_site_base;
184
185struct tree_hash_traits : default_hashmap_traits
186{
187  static hashval_t hash (tree t) { return TREE_HASH (t); }
188};
189
190static GTY (()) hash_map<tree, tree, tree_hash_traits> *type_to_runtime_map;
191
192/* Describe the SjLj_Function_Context structure.  */
193static GTY(()) tree sjlj_fc_type_node;
194static int sjlj_fc_call_site_ofs;
195static int sjlj_fc_data_ofs;
196static int sjlj_fc_personality_ofs;
197static int sjlj_fc_lsda_ofs;
198static int sjlj_fc_jbuf_ofs;
199
200
201struct GTY(()) call_site_record_d
202{
203  rtx landing_pad;
204  int action;
205};
206
207/* In the following structure and associated functions,
208   we represent entries in the action table as 1-based indices.
209   Special cases are:
210
211	 0:	null action record, non-null landing pad; implies cleanups
212	-1:	null action record, null landing pad; implies no action
213	-2:	no call-site entry; implies must_not_throw
214	-3:	we have yet to process outer regions
215
216   Further, no special cases apply to the "next" field of the record.
217   For next, 0 means end of list.  */
218
219struct action_record
220{
221  int offset;
222  int filter;
223  int next;
224};
225
226/* Hashtable helpers.  */
227
228struct action_record_hasher : typed_free_remove <action_record>
229{
230  typedef action_record value_type;
231  typedef action_record compare_type;
232  static inline hashval_t hash (const value_type *);
233  static inline bool equal (const value_type *, const compare_type *);
234};
235
236inline hashval_t
237action_record_hasher::hash (const value_type *entry)
238{
239  return entry->next * 1009 + entry->filter;
240}
241
242inline bool
243action_record_hasher::equal (const value_type *entry, const compare_type *data)
244{
245  return entry->filter == data->filter && entry->next == data->next;
246}
247
248typedef hash_table<action_record_hasher> action_hash_type;
249
250static bool get_eh_region_and_lp_from_rtx (const_rtx, eh_region *,
251					   eh_landing_pad *);
252
253static void dw2_build_landing_pads (void);
254
255static int collect_one_action_chain (action_hash_type *, eh_region);
256static int add_call_site (rtx, int, int);
257
258static void push_uleb128 (vec<uchar, va_gc> **, unsigned int);
259static void push_sleb128 (vec<uchar, va_gc> **, int);
260#ifndef HAVE_AS_LEB128
261static int dw2_size_of_call_site_table (int);
262static int sjlj_size_of_call_site_table (void);
263#endif
264static void dw2_output_call_site_table (int, int);
265static void sjlj_output_call_site_table (void);
266
267
268void
269init_eh (void)
270{
271  if (! flag_exceptions)
272    return;
273
274  type_to_runtime_map
275    = hash_map<tree, tree, tree_hash_traits>::create_ggc (31);
276
277  /* Create the SjLj_Function_Context structure.  This should match
278     the definition in unwind-sjlj.c.  */
279  if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
280    {
281      tree f_jbuf, f_per, f_lsda, f_prev, f_cs, f_data, tmp;
282
283      sjlj_fc_type_node = lang_hooks.types.make_type (RECORD_TYPE);
284
285      f_prev = build_decl (BUILTINS_LOCATION,
286			   FIELD_DECL, get_identifier ("__prev"),
287			   build_pointer_type (sjlj_fc_type_node));
288      DECL_FIELD_CONTEXT (f_prev) = sjlj_fc_type_node;
289
290      f_cs = build_decl (BUILTINS_LOCATION,
291			 FIELD_DECL, get_identifier ("__call_site"),
292			 integer_type_node);
293      DECL_FIELD_CONTEXT (f_cs) = sjlj_fc_type_node;
294
295      tmp = build_index_type (size_int (4 - 1));
296      tmp = build_array_type (lang_hooks.types.type_for_mode
297				(targetm.unwind_word_mode (), 1),
298			      tmp);
299      f_data = build_decl (BUILTINS_LOCATION,
300			   FIELD_DECL, get_identifier ("__data"), tmp);
301      DECL_FIELD_CONTEXT (f_data) = sjlj_fc_type_node;
302
303      f_per = build_decl (BUILTINS_LOCATION,
304			  FIELD_DECL, get_identifier ("__personality"),
305			  ptr_type_node);
306      DECL_FIELD_CONTEXT (f_per) = sjlj_fc_type_node;
307
308      f_lsda = build_decl (BUILTINS_LOCATION,
309			   FIELD_DECL, get_identifier ("__lsda"),
310			   ptr_type_node);
311      DECL_FIELD_CONTEXT (f_lsda) = sjlj_fc_type_node;
312
313#ifdef DONT_USE_BUILTIN_SETJMP
314#ifdef JMP_BUF_SIZE
315      tmp = size_int (JMP_BUF_SIZE - 1);
316#else
317      /* Should be large enough for most systems, if it is not,
318	 JMP_BUF_SIZE should be defined with the proper value.  It will
319	 also tend to be larger than necessary for most systems, a more
320	 optimal port will define JMP_BUF_SIZE.  */
321      tmp = size_int (FIRST_PSEUDO_REGISTER + 2 - 1);
322#endif
323#else
324      /* Compute a minimally sized jump buffer.  We need room to store at
325	 least 3 pointers - stack pointer, frame pointer and return address.
326	 Plus for some targets we need room for an extra pointer - in the
327	 case of MIPS this is the global pointer.  This makes a total of four
328	 pointers, but to be safe we actually allocate room for 5.
329
330	 If pointers are smaller than words then we allocate enough room for
331	 5 words, just in case the backend needs this much room.  For more
332	 discussion on this issue see:
333	 http://gcc.gnu.org/ml/gcc-patches/2014-05/msg00313.html.  */
334      if (POINTER_SIZE > BITS_PER_WORD)
335	tmp = size_int (5 - 1);
336      else
337	tmp = size_int ((5 * BITS_PER_WORD / POINTER_SIZE) - 1);
338#endif
339
340      tmp = build_index_type (tmp);
341      tmp = build_array_type (ptr_type_node, tmp);
342      f_jbuf = build_decl (BUILTINS_LOCATION,
343			   FIELD_DECL, get_identifier ("__jbuf"), tmp);
344#ifdef DONT_USE_BUILTIN_SETJMP
345      /* We don't know what the alignment requirements of the
346	 runtime's jmp_buf has.  Overestimate.  */
347      DECL_ALIGN (f_jbuf) = BIGGEST_ALIGNMENT;
348      DECL_USER_ALIGN (f_jbuf) = 1;
349#endif
350      DECL_FIELD_CONTEXT (f_jbuf) = sjlj_fc_type_node;
351
352      TYPE_FIELDS (sjlj_fc_type_node) = f_prev;
353      TREE_CHAIN (f_prev) = f_cs;
354      TREE_CHAIN (f_cs) = f_data;
355      TREE_CHAIN (f_data) = f_per;
356      TREE_CHAIN (f_per) = f_lsda;
357      TREE_CHAIN (f_lsda) = f_jbuf;
358
359      layout_type (sjlj_fc_type_node);
360
361      /* Cache the interesting field offsets so that we have
362	 easy access from rtl.  */
363      sjlj_fc_call_site_ofs
364	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_cs))
365	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_cs)) / BITS_PER_UNIT);
366      sjlj_fc_data_ofs
367	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_data))
368	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_data)) / BITS_PER_UNIT);
369      sjlj_fc_personality_ofs
370	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_per))
371	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_per)) / BITS_PER_UNIT);
372      sjlj_fc_lsda_ofs
373	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_lsda))
374	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_lsda)) / BITS_PER_UNIT);
375      sjlj_fc_jbuf_ofs
376	= (tree_to_uhwi (DECL_FIELD_OFFSET (f_jbuf))
377	   + tree_to_uhwi (DECL_FIELD_BIT_OFFSET (f_jbuf)) / BITS_PER_UNIT);
378    }
379}
380
381void
382init_eh_for_function (void)
383{
384  cfun->eh = ggc_cleared_alloc<eh_status> ();
385
386  /* Make sure zero'th entries are used.  */
387  vec_safe_push (cfun->eh->region_array, (eh_region)0);
388  vec_safe_push (cfun->eh->lp_array, (eh_landing_pad)0);
389}
390
391/* Routines to generate the exception tree somewhat directly.
392   These are used from tree-eh.c when processing exception related
393   nodes during tree optimization.  */
394
395static eh_region
396gen_eh_region (enum eh_region_type type, eh_region outer)
397{
398  eh_region new_eh;
399
400  /* Insert a new blank region as a leaf in the tree.  */
401  new_eh = ggc_cleared_alloc<eh_region_d> ();
402  new_eh->type = type;
403  new_eh->outer = outer;
404  if (outer)
405    {
406      new_eh->next_peer = outer->inner;
407      outer->inner = new_eh;
408    }
409  else
410    {
411      new_eh->next_peer = cfun->eh->region_tree;
412      cfun->eh->region_tree = new_eh;
413    }
414
415  new_eh->index = vec_safe_length (cfun->eh->region_array);
416  vec_safe_push (cfun->eh->region_array, new_eh);
417
418  /* Copy the language's notion of whether to use __cxa_end_cleanup.  */
419  if (targetm.arm_eabi_unwinder && lang_hooks.eh_use_cxa_end_cleanup)
420    new_eh->use_cxa_end_cleanup = true;
421
422  return new_eh;
423}
424
425eh_region
426gen_eh_region_cleanup (eh_region outer)
427{
428  return gen_eh_region (ERT_CLEANUP, outer);
429}
430
431eh_region
432gen_eh_region_try (eh_region outer)
433{
434  return gen_eh_region (ERT_TRY, outer);
435}
436
437eh_catch
438gen_eh_region_catch (eh_region t, tree type_or_list)
439{
440  eh_catch c, l;
441  tree type_list, type_node;
442
443  gcc_assert (t->type == ERT_TRY);
444
445  /* Ensure to always end up with a type list to normalize further
446     processing, then register each type against the runtime types map.  */
447  type_list = type_or_list;
448  if (type_or_list)
449    {
450      if (TREE_CODE (type_or_list) != TREE_LIST)
451	type_list = tree_cons (NULL_TREE, type_or_list, NULL_TREE);
452
453      type_node = type_list;
454      for (; type_node; type_node = TREE_CHAIN (type_node))
455	add_type_for_runtime (TREE_VALUE (type_node));
456    }
457
458  c = ggc_cleared_alloc<eh_catch_d> ();
459  c->type_list = type_list;
460  l = t->u.eh_try.last_catch;
461  c->prev_catch = l;
462  if (l)
463    l->next_catch = c;
464  else
465    t->u.eh_try.first_catch = c;
466  t->u.eh_try.last_catch = c;
467
468  return c;
469}
470
471eh_region
472gen_eh_region_allowed (eh_region outer, tree allowed)
473{
474  eh_region region = gen_eh_region (ERT_ALLOWED_EXCEPTIONS, outer);
475  region->u.allowed.type_list = allowed;
476
477  for (; allowed ; allowed = TREE_CHAIN (allowed))
478    add_type_for_runtime (TREE_VALUE (allowed));
479
480  return region;
481}
482
483eh_region
484gen_eh_region_must_not_throw (eh_region outer)
485{
486  return gen_eh_region (ERT_MUST_NOT_THROW, outer);
487}
488
489eh_landing_pad
490gen_eh_landing_pad (eh_region region)
491{
492  eh_landing_pad lp = ggc_cleared_alloc<eh_landing_pad_d> ();
493
494  lp->next_lp = region->landing_pads;
495  lp->region = region;
496  lp->index = vec_safe_length (cfun->eh->lp_array);
497  region->landing_pads = lp;
498
499  vec_safe_push (cfun->eh->lp_array, lp);
500
501  return lp;
502}
503
504eh_region
505get_eh_region_from_number_fn (struct function *ifun, int i)
506{
507  return (*ifun->eh->region_array)[i];
508}
509
510eh_region
511get_eh_region_from_number (int i)
512{
513  return get_eh_region_from_number_fn (cfun, i);
514}
515
516eh_landing_pad
517get_eh_landing_pad_from_number_fn (struct function *ifun, int i)
518{
519  return (*ifun->eh->lp_array)[i];
520}
521
522eh_landing_pad
523get_eh_landing_pad_from_number (int i)
524{
525  return get_eh_landing_pad_from_number_fn (cfun, i);
526}
527
528eh_region
529get_eh_region_from_lp_number_fn (struct function *ifun, int i)
530{
531  if (i < 0)
532    return (*ifun->eh->region_array)[-i];
533  else if (i == 0)
534    return NULL;
535  else
536    {
537      eh_landing_pad lp;
538      lp = (*ifun->eh->lp_array)[i];
539      return lp->region;
540    }
541}
542
543eh_region
544get_eh_region_from_lp_number (int i)
545{
546  return get_eh_region_from_lp_number_fn (cfun, i);
547}
548
549/* Returns true if the current function has exception handling regions.  */
550
551bool
552current_function_has_exception_handlers (void)
553{
554  return cfun->eh->region_tree != NULL;
555}
556
557/* A subroutine of duplicate_eh_regions.  Copy the eh_region tree at OLD.
558   Root it at OUTER, and apply LP_OFFSET to the lp numbers.  */
559
560struct duplicate_eh_regions_data
561{
562  duplicate_eh_regions_map label_map;
563  void *label_map_data;
564  hash_map<void *, void *> *eh_map;
565};
566
567static void
568duplicate_eh_regions_1 (struct duplicate_eh_regions_data *data,
569			eh_region old_r, eh_region outer)
570{
571  eh_landing_pad old_lp, new_lp;
572  eh_region new_r;
573
574  new_r = gen_eh_region (old_r->type, outer);
575  gcc_assert (!data->eh_map->put (old_r, new_r));
576
577  switch (old_r->type)
578    {
579    case ERT_CLEANUP:
580      break;
581
582    case ERT_TRY:
583      {
584	eh_catch oc, nc;
585	for (oc = old_r->u.eh_try.first_catch; oc ; oc = oc->next_catch)
586	  {
587	    /* We should be doing all our region duplication before and
588	       during inlining, which is before filter lists are created.  */
589	    gcc_assert (oc->filter_list == NULL);
590	    nc = gen_eh_region_catch (new_r, oc->type_list);
591	    nc->label = data->label_map (oc->label, data->label_map_data);
592	  }
593      }
594      break;
595
596    case ERT_ALLOWED_EXCEPTIONS:
597      new_r->u.allowed.type_list = old_r->u.allowed.type_list;
598      if (old_r->u.allowed.label)
599	new_r->u.allowed.label
600	    = data->label_map (old_r->u.allowed.label, data->label_map_data);
601      else
602	new_r->u.allowed.label = NULL_TREE;
603      break;
604
605    case ERT_MUST_NOT_THROW:
606      new_r->u.must_not_throw.failure_loc =
607	LOCATION_LOCUS (old_r->u.must_not_throw.failure_loc);
608      new_r->u.must_not_throw.failure_decl =
609	old_r->u.must_not_throw.failure_decl;
610      break;
611    }
612
613  for (old_lp = old_r->landing_pads; old_lp ; old_lp = old_lp->next_lp)
614    {
615      /* Don't bother copying unused landing pads.  */
616      if (old_lp->post_landing_pad == NULL)
617	continue;
618
619      new_lp = gen_eh_landing_pad (new_r);
620      gcc_assert (!data->eh_map->put (old_lp, new_lp));
621
622      new_lp->post_landing_pad
623	= data->label_map (old_lp->post_landing_pad, data->label_map_data);
624      EH_LANDING_PAD_NR (new_lp->post_landing_pad) = new_lp->index;
625    }
626
627  /* Make sure to preserve the original use of __cxa_end_cleanup.  */
628  new_r->use_cxa_end_cleanup = old_r->use_cxa_end_cleanup;
629
630  for (old_r = old_r->inner; old_r ; old_r = old_r->next_peer)
631    duplicate_eh_regions_1 (data, old_r, new_r);
632}
633
634/* Duplicate the EH regions from IFUN rooted at COPY_REGION into
635   the current function and root the tree below OUTER_REGION.
636   The special case of COPY_REGION of NULL means all regions.
637   Remap labels using MAP/MAP_DATA callback.  Return a pointer map
638   that allows the caller to remap uses of both EH regions and
639   EH landing pads.  */
640
641hash_map<void *, void *> *
642duplicate_eh_regions (struct function *ifun,
643		      eh_region copy_region, int outer_lp,
644		      duplicate_eh_regions_map map, void *map_data)
645{
646  struct duplicate_eh_regions_data data;
647  eh_region outer_region;
648
649#ifdef ENABLE_CHECKING
650  verify_eh_tree (ifun);
651#endif
652
653  data.label_map = map;
654  data.label_map_data = map_data;
655  data.eh_map = new hash_map<void *, void *>;
656
657  outer_region = get_eh_region_from_lp_number (outer_lp);
658
659  /* Copy all the regions in the subtree.  */
660  if (copy_region)
661    duplicate_eh_regions_1 (&data, copy_region, outer_region);
662  else
663    {
664      eh_region r;
665      for (r = ifun->eh->region_tree; r ; r = r->next_peer)
666	duplicate_eh_regions_1 (&data, r, outer_region);
667    }
668
669#ifdef ENABLE_CHECKING
670  verify_eh_tree (cfun);
671#endif
672
673  return data.eh_map;
674}
675
676/* Return the region that is outer to both REGION_A and REGION_B in IFUN.  */
677
678eh_region
679eh_region_outermost (struct function *ifun, eh_region region_a,
680		     eh_region region_b)
681{
682  sbitmap b_outer;
683
684  gcc_assert (ifun->eh->region_array);
685  gcc_assert (ifun->eh->region_tree);
686
687  b_outer = sbitmap_alloc (ifun->eh->region_array->length ());
688  bitmap_clear (b_outer);
689
690  do
691    {
692      bitmap_set_bit (b_outer, region_b->index);
693      region_b = region_b->outer;
694    }
695  while (region_b);
696
697  do
698    {
699      if (bitmap_bit_p (b_outer, region_a->index))
700	break;
701      region_a = region_a->outer;
702    }
703  while (region_a);
704
705  sbitmap_free (b_outer);
706  return region_a;
707}
708
709void
710add_type_for_runtime (tree type)
711{
712  /* If TYPE is NOP_EXPR, it means that it already is a runtime type.  */
713  if (TREE_CODE (type) == NOP_EXPR)
714    return;
715
716  bool existed = false;
717  tree *slot = &type_to_runtime_map->get_or_insert (type, &existed);
718  if (!existed)
719    *slot = lang_hooks.eh_runtime_type (type);
720}
721
722tree
723lookup_type_for_runtime (tree type)
724{
725  /* If TYPE is NOP_EXPR, it means that it already is a runtime type.  */
726  if (TREE_CODE (type) == NOP_EXPR)
727    return type;
728
729  /* We should have always inserted the data earlier.  */
730  return *type_to_runtime_map->get (type);
731}
732
733
734/* Represent an entry in @TTypes for either catch actions
735   or exception filter actions.  */
736struct ttypes_filter {
737  tree t;
738  int filter;
739};
740
741/* Helper for ttypes_filter hashing.  */
742
743struct ttypes_filter_hasher : typed_free_remove <ttypes_filter>
744{
745  typedef ttypes_filter value_type;
746  typedef tree_node compare_type;
747  static inline hashval_t hash (const value_type *);
748  static inline bool equal (const value_type *, const compare_type *);
749};
750
751/* Compare ENTRY (a ttypes_filter entry in the hash table) with DATA
752   (a tree) for a @TTypes type node we are thinking about adding.  */
753
754inline bool
755ttypes_filter_hasher::equal (const value_type *entry, const compare_type *data)
756{
757  return entry->t == data;
758}
759
760inline hashval_t
761ttypes_filter_hasher::hash (const value_type *entry)
762{
763  return TREE_HASH (entry->t);
764}
765
766typedef hash_table<ttypes_filter_hasher> ttypes_hash_type;
767
768
769/* Helper for ehspec hashing.  */
770
771struct ehspec_hasher : typed_free_remove <ttypes_filter>
772{
773  typedef ttypes_filter value_type;
774  typedef ttypes_filter compare_type;
775  static inline hashval_t hash (const value_type *);
776  static inline bool equal (const value_type *, const compare_type *);
777};
778
779/* Compare ENTRY with DATA (both struct ttypes_filter) for a @TTypes
780   exception specification list we are thinking about adding.  */
781/* ??? Currently we use the type lists in the order given.  Someone
782   should put these in some canonical order.  */
783
784inline bool
785ehspec_hasher::equal (const value_type *entry, const compare_type *data)
786{
787  return type_list_equal (entry->t, data->t);
788}
789
790/* Hash function for exception specification lists.  */
791
792inline hashval_t
793ehspec_hasher::hash (const value_type *entry)
794{
795  hashval_t h = 0;
796  tree list;
797
798  for (list = entry->t; list ; list = TREE_CHAIN (list))
799    h = (h << 5) + (h >> 27) + TREE_HASH (TREE_VALUE (list));
800  return h;
801}
802
803typedef hash_table<ehspec_hasher> ehspec_hash_type;
804
805
806/* Add TYPE (which may be NULL) to cfun->eh->ttype_data, using TYPES_HASH
807   to speed up the search.  Return the filter value to be used.  */
808
809static int
810add_ttypes_entry (ttypes_hash_type *ttypes_hash, tree type)
811{
812  struct ttypes_filter **slot, *n;
813
814  slot = ttypes_hash->find_slot_with_hash (type, (hashval_t) TREE_HASH (type),
815					  INSERT);
816
817  if ((n = *slot) == NULL)
818    {
819      /* Filter value is a 1 based table index.  */
820
821      n = XNEW (struct ttypes_filter);
822      n->t = type;
823      n->filter = vec_safe_length (cfun->eh->ttype_data) + 1;
824      *slot = n;
825
826      vec_safe_push (cfun->eh->ttype_data, type);
827    }
828
829  return n->filter;
830}
831
832/* Add LIST to cfun->eh->ehspec_data, using EHSPEC_HASH and TYPES_HASH
833   to speed up the search.  Return the filter value to be used.  */
834
835static int
836add_ehspec_entry (ehspec_hash_type *ehspec_hash, ttypes_hash_type *ttypes_hash,
837		  tree list)
838{
839  struct ttypes_filter **slot, *n;
840  struct ttypes_filter dummy;
841
842  dummy.t = list;
843  slot = ehspec_hash->find_slot (&dummy, INSERT);
844
845  if ((n = *slot) == NULL)
846    {
847      int len;
848
849      if (targetm.arm_eabi_unwinder)
850	len = vec_safe_length (cfun->eh->ehspec_data.arm_eabi);
851      else
852	len = vec_safe_length (cfun->eh->ehspec_data.other);
853
854      /* Filter value is a -1 based byte index into a uleb128 buffer.  */
855
856      n = XNEW (struct ttypes_filter);
857      n->t = list;
858      n->filter = -(len + 1);
859      *slot = n;
860
861      /* Generate a 0 terminated list of filter values.  */
862      for (; list ; list = TREE_CHAIN (list))
863	{
864	  if (targetm.arm_eabi_unwinder)
865	    vec_safe_push (cfun->eh->ehspec_data.arm_eabi, TREE_VALUE (list));
866	  else
867	    {
868	      /* Look up each type in the list and encode its filter
869		 value as a uleb128.  */
870	      push_uleb128 (&cfun->eh->ehspec_data.other,
871			    add_ttypes_entry (ttypes_hash, TREE_VALUE (list)));
872	    }
873	}
874      if (targetm.arm_eabi_unwinder)
875	vec_safe_push (cfun->eh->ehspec_data.arm_eabi, NULL_TREE);
876      else
877	vec_safe_push (cfun->eh->ehspec_data.other, (uchar)0);
878    }
879
880  return n->filter;
881}
882
883/* Generate the action filter values to be used for CATCH and
884   ALLOWED_EXCEPTIONS regions.  When using dwarf2 exception regions,
885   we use lots of landing pads, and so every type or list can share
886   the same filter value, which saves table space.  */
887
888void
889assign_filter_values (void)
890{
891  int i;
892  eh_region r;
893  eh_catch c;
894
895  vec_alloc (cfun->eh->ttype_data, 16);
896  if (targetm.arm_eabi_unwinder)
897    vec_alloc (cfun->eh->ehspec_data.arm_eabi, 64);
898  else
899    vec_alloc (cfun->eh->ehspec_data.other, 64);
900
901  ehspec_hash_type ehspec (31);
902  ttypes_hash_type ttypes (31);
903
904  for (i = 1; vec_safe_iterate (cfun->eh->region_array, i, &r); ++i)
905    {
906      if (r == NULL)
907	continue;
908
909      switch (r->type)
910	{
911	case ERT_TRY:
912	  for (c = r->u.eh_try.first_catch; c ; c = c->next_catch)
913	    {
914	      /* Whatever type_list is (NULL or true list), we build a list
915		 of filters for the region.  */
916	      c->filter_list = NULL_TREE;
917
918	      if (c->type_list != NULL)
919		{
920		  /* Get a filter value for each of the types caught and store
921		     them in the region's dedicated list.  */
922		  tree tp_node = c->type_list;
923
924		  for ( ; tp_node; tp_node = TREE_CHAIN (tp_node))
925		    {
926		      int flt
927		       	= add_ttypes_entry (&ttypes, TREE_VALUE (tp_node));
928		      tree flt_node = build_int_cst (integer_type_node, flt);
929
930		      c->filter_list
931			= tree_cons (NULL_TREE, flt_node, c->filter_list);
932		    }
933		}
934	      else
935		{
936		  /* Get a filter value for the NULL list also since it
937		     will need an action record anyway.  */
938		  int flt = add_ttypes_entry (&ttypes, NULL);
939		  tree flt_node = build_int_cst (integer_type_node, flt);
940
941		  c->filter_list
942		    = tree_cons (NULL_TREE, flt_node, NULL);
943		}
944	    }
945	  break;
946
947	case ERT_ALLOWED_EXCEPTIONS:
948	  r->u.allowed.filter
949	    = add_ehspec_entry (&ehspec, &ttypes, r->u.allowed.type_list);
950	  break;
951
952	default:
953	  break;
954	}
955    }
956}
957
958/* Emit SEQ into basic block just before INSN (that is assumed to be
959   first instruction of some existing BB and return the newly
960   produced block.  */
961static basic_block
962emit_to_new_bb_before (rtx_insn *seq, rtx insn)
963{
964  rtx_insn *last;
965  basic_block bb;
966  edge e;
967  edge_iterator ei;
968
969  /* If there happens to be a fallthru edge (possibly created by cleanup_cfg
970     call), we don't want it to go into newly created landing pad or other EH
971     construct.  */
972  for (ei = ei_start (BLOCK_FOR_INSN (insn)->preds); (e = ei_safe_edge (ei)); )
973    if (e->flags & EDGE_FALLTHRU)
974      force_nonfallthru (e);
975    else
976      ei_next (&ei);
977  last = emit_insn_before (seq, insn);
978  if (BARRIER_P (last))
979    last = PREV_INSN (last);
980  bb = create_basic_block (seq, last, BLOCK_FOR_INSN (insn)->prev_bb);
981  update_bb_for_insn (bb);
982  bb->flags |= BB_SUPERBLOCK;
983  return bb;
984}
985
986/* A subroutine of dw2_build_landing_pads, also used for edge splitting
987   at the rtl level.  Emit the code required by the target at a landing
988   pad for the given region.  */
989
990void
991expand_dw2_landing_pad_for_region (eh_region region)
992{
993#ifdef HAVE_exception_receiver
994  if (HAVE_exception_receiver)
995    emit_insn (gen_exception_receiver ());
996  else
997#endif
998#ifdef HAVE_nonlocal_goto_receiver
999  if (HAVE_nonlocal_goto_receiver)
1000    emit_insn (gen_nonlocal_goto_receiver ());
1001  else
1002#endif
1003    { /* Nothing */ }
1004
1005  if (region->exc_ptr_reg)
1006    emit_move_insn (region->exc_ptr_reg,
1007		    gen_rtx_REG (ptr_mode, EH_RETURN_DATA_REGNO (0)));
1008  if (region->filter_reg)
1009    emit_move_insn (region->filter_reg,
1010		    gen_rtx_REG (targetm.eh_return_filter_mode (),
1011				 EH_RETURN_DATA_REGNO (1)));
1012}
1013
1014/* Expand the extra code needed at landing pads for dwarf2 unwinding.  */
1015
1016static void
1017dw2_build_landing_pads (void)
1018{
1019  int i;
1020  eh_landing_pad lp;
1021  int e_flags = EDGE_FALLTHRU;
1022
1023  /* If we're going to partition blocks, we need to be able to add
1024     new landing pads later, which means that we need to hold on to
1025     the post-landing-pad block.  Prevent it from being merged away.
1026     We'll remove this bit after partitioning.  */
1027  if (flag_reorder_blocks_and_partition)
1028    e_flags |= EDGE_PRESERVE;
1029
1030  for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1031    {
1032      basic_block bb;
1033      rtx_insn *seq;
1034      edge e;
1035
1036      if (lp == NULL || lp->post_landing_pad == NULL)
1037	continue;
1038
1039      start_sequence ();
1040
1041      lp->landing_pad = gen_label_rtx ();
1042      emit_label (lp->landing_pad);
1043      LABEL_PRESERVE_P (lp->landing_pad) = 1;
1044
1045      expand_dw2_landing_pad_for_region (lp->region);
1046
1047      seq = get_insns ();
1048      end_sequence ();
1049
1050      bb = emit_to_new_bb_before (seq, label_rtx (lp->post_landing_pad));
1051      e = make_edge (bb, bb->next_bb, e_flags);
1052      e->count = bb->count;
1053      e->probability = REG_BR_PROB_BASE;
1054      if (current_loops)
1055	{
1056	  struct loop *loop = bb->next_bb->loop_father;
1057	  /* If we created a pre-header block, add the new block to the
1058	     outer loop, otherwise to the loop itself.  */
1059	  if (bb->next_bb == loop->header)
1060	    add_bb_to_loop (bb, loop_outer (loop));
1061	  else
1062	    add_bb_to_loop (bb, loop);
1063	}
1064    }
1065}
1066
1067
1068static vec<int> sjlj_lp_call_site_index;
1069
1070/* Process all active landing pads.  Assign each one a compact dispatch
1071   index, and a call-site index.  */
1072
1073static int
1074sjlj_assign_call_site_values (void)
1075{
1076  action_hash_type ar_hash (31);
1077  int i, disp_index;
1078  eh_landing_pad lp;
1079
1080  vec_alloc (crtl->eh.action_record_data, 64);
1081
1082  disp_index = 0;
1083  call_site_base = 1;
1084  for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1085    if (lp && lp->post_landing_pad)
1086      {
1087	int action, call_site;
1088
1089	/* First: build the action table.  */
1090	action = collect_one_action_chain (&ar_hash, lp->region);
1091
1092	/* Next: assign call-site values.  If dwarf2 terms, this would be
1093	   the region number assigned by convert_to_eh_region_ranges, but
1094	   handles no-action and must-not-throw differently.  */
1095	/* Map must-not-throw to otherwise unused call-site index 0.  */
1096	if (action == -2)
1097	  call_site = 0;
1098	/* Map no-action to otherwise unused call-site index -1.  */
1099	else if (action == -1)
1100	  call_site = -1;
1101	/* Otherwise, look it up in the table.  */
1102	else
1103	  call_site = add_call_site (GEN_INT (disp_index), action, 0);
1104	sjlj_lp_call_site_index[i] = call_site;
1105
1106	disp_index++;
1107      }
1108
1109  return disp_index;
1110}
1111
1112/* Emit code to record the current call-site index before every
1113   insn that can throw.  */
1114
1115static void
1116sjlj_mark_call_sites (void)
1117{
1118  int last_call_site = -2;
1119  rtx_insn *insn;
1120  rtx mem;
1121
1122  for (insn = get_insns (); insn ; insn = NEXT_INSN (insn))
1123    {
1124      eh_landing_pad lp;
1125      eh_region r;
1126      bool nothrow;
1127      int this_call_site;
1128      rtx_insn *before, *p;
1129
1130      /* Reset value tracking at extended basic block boundaries.  */
1131      if (LABEL_P (insn))
1132	last_call_site = -2;
1133
1134      if (! INSN_P (insn))
1135	continue;
1136
1137      nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1138      if (nothrow)
1139	continue;
1140      if (lp)
1141	this_call_site = sjlj_lp_call_site_index[lp->index];
1142      else if (r == NULL)
1143	{
1144	  /* Calls (and trapping insns) without notes are outside any
1145	     exception handling region in this function.  Mark them as
1146	     no action.  */
1147	  this_call_site = -1;
1148	}
1149      else
1150	{
1151	  gcc_assert (r->type == ERT_MUST_NOT_THROW);
1152	  this_call_site = 0;
1153	}
1154
1155      if (this_call_site != -1)
1156	crtl->uses_eh_lsda = 1;
1157
1158      if (this_call_site == last_call_site)
1159	continue;
1160
1161      /* Don't separate a call from it's argument loads.  */
1162      before = insn;
1163      if (CALL_P (insn))
1164	before = find_first_parameter_load (insn, NULL);
1165
1166      start_sequence ();
1167      mem = adjust_address (crtl->eh.sjlj_fc, TYPE_MODE (integer_type_node),
1168			    sjlj_fc_call_site_ofs);
1169      emit_move_insn (mem, gen_int_mode (this_call_site, GET_MODE (mem)));
1170      p = get_insns ();
1171      end_sequence ();
1172
1173      emit_insn_before (p, before);
1174      last_call_site = this_call_site;
1175    }
1176}
1177
1178/* Construct the SjLj_Function_Context.  */
1179
1180static void
1181sjlj_emit_function_enter (rtx_code_label *dispatch_label)
1182{
1183  rtx_insn *fn_begin, *seq;
1184  rtx fc, mem;
1185  bool fn_begin_outside_block;
1186  rtx personality = get_personality_function (current_function_decl);
1187
1188  fc = crtl->eh.sjlj_fc;
1189
1190  start_sequence ();
1191
1192  /* We're storing this libcall's address into memory instead of
1193     calling it directly.  Thus, we must call assemble_external_libcall
1194     here, as we can not depend on emit_library_call to do it for us.  */
1195  assemble_external_libcall (personality);
1196  mem = adjust_address (fc, Pmode, sjlj_fc_personality_ofs);
1197  emit_move_insn (mem, personality);
1198
1199  mem = adjust_address (fc, Pmode, sjlj_fc_lsda_ofs);
1200  if (crtl->uses_eh_lsda)
1201    {
1202      char buf[20];
1203      rtx sym;
1204
1205      ASM_GENERATE_INTERNAL_LABEL (buf, "LLSDA", current_function_funcdef_no);
1206      sym = gen_rtx_SYMBOL_REF (Pmode, ggc_strdup (buf));
1207      SYMBOL_REF_FLAGS (sym) = SYMBOL_FLAG_LOCAL;
1208      emit_move_insn (mem, sym);
1209    }
1210  else
1211    emit_move_insn (mem, const0_rtx);
1212
1213  if (dispatch_label)
1214    {
1215#ifdef DONT_USE_BUILTIN_SETJMP
1216      rtx x;
1217      x = emit_library_call_value (setjmp_libfunc, NULL_RTX, LCT_RETURNS_TWICE,
1218				   TYPE_MODE (integer_type_node), 1,
1219				   plus_constant (Pmode, XEXP (fc, 0),
1220						  sjlj_fc_jbuf_ofs), Pmode);
1221
1222      emit_cmp_and_jump_insns (x, const0_rtx, NE, 0,
1223			       TYPE_MODE (integer_type_node), 0,
1224			       dispatch_label, REG_BR_PROB_BASE / 100);
1225#else
1226      expand_builtin_setjmp_setup (plus_constant (Pmode, XEXP (fc, 0),
1227						  sjlj_fc_jbuf_ofs),
1228				   dispatch_label);
1229#endif
1230    }
1231
1232  emit_library_call (unwind_sjlj_register_libfunc, LCT_NORMAL, VOIDmode,
1233		     1, XEXP (fc, 0), Pmode);
1234
1235  seq = get_insns ();
1236  end_sequence ();
1237
1238  /* ??? Instead of doing this at the beginning of the function,
1239     do this in a block that is at loop level 0 and dominates all
1240     can_throw_internal instructions.  */
1241
1242  fn_begin_outside_block = true;
1243  for (fn_begin = get_insns (); ; fn_begin = NEXT_INSN (fn_begin))
1244    if (NOTE_P (fn_begin))
1245      {
1246	if (NOTE_KIND (fn_begin) == NOTE_INSN_FUNCTION_BEG)
1247	  break;
1248	else if (NOTE_INSN_BASIC_BLOCK_P (fn_begin))
1249	  fn_begin_outside_block = false;
1250      }
1251
1252  if (fn_begin_outside_block)
1253    insert_insn_on_edge (seq, single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun)));
1254  else
1255    emit_insn_after (seq, fn_begin);
1256}
1257
1258/* Call back from expand_function_end to know where we should put
1259   the call to unwind_sjlj_unregister_libfunc if needed.  */
1260
1261void
1262sjlj_emit_function_exit_after (rtx_insn *after)
1263{
1264  crtl->eh.sjlj_exit_after = after;
1265}
1266
1267static void
1268sjlj_emit_function_exit (void)
1269{
1270  rtx_insn *seq, *insn;
1271
1272  start_sequence ();
1273
1274  emit_library_call (unwind_sjlj_unregister_libfunc, LCT_NORMAL, VOIDmode,
1275		     1, XEXP (crtl->eh.sjlj_fc, 0), Pmode);
1276
1277  seq = get_insns ();
1278  end_sequence ();
1279
1280  /* ??? Really this can be done in any block at loop level 0 that
1281     post-dominates all can_throw_internal instructions.  This is
1282     the last possible moment.  */
1283
1284  insn = crtl->eh.sjlj_exit_after;
1285  if (LABEL_P (insn))
1286    insn = NEXT_INSN (insn);
1287
1288  emit_insn_after (seq, insn);
1289}
1290
1291static void
1292sjlj_emit_dispatch_table (rtx_code_label *dispatch_label, int num_dispatch)
1293{
1294  machine_mode unwind_word_mode = targetm.unwind_word_mode ();
1295  machine_mode filter_mode = targetm.eh_return_filter_mode ();
1296  eh_landing_pad lp;
1297  rtx mem, fc, before, exc_ptr_reg, filter_reg;
1298  rtx_insn *seq;
1299  rtx first_reachable_label;
1300  basic_block bb;
1301  eh_region r;
1302  edge e;
1303  int i, disp_index;
1304  vec<tree> dispatch_labels = vNULL;
1305
1306  fc = crtl->eh.sjlj_fc;
1307
1308  start_sequence ();
1309
1310  emit_label (dispatch_label);
1311
1312#ifndef DONT_USE_BUILTIN_SETJMP
1313  expand_builtin_setjmp_receiver (dispatch_label);
1314
1315  /* The caller of expand_builtin_setjmp_receiver is responsible for
1316     making sure that the label doesn't vanish.  The only other caller
1317     is the expander for __builtin_setjmp_receiver, which places this
1318     label on the nonlocal_goto_label list.  Since we're modeling these
1319     CFG edges more exactly, we can use the forced_labels list instead.  */
1320  LABEL_PRESERVE_P (dispatch_label) = 1;
1321  forced_labels
1322    = gen_rtx_INSN_LIST (VOIDmode, dispatch_label, forced_labels);
1323#endif
1324
1325  /* Load up exc_ptr and filter values from the function context.  */
1326  mem = adjust_address (fc, unwind_word_mode, sjlj_fc_data_ofs);
1327  if (unwind_word_mode != ptr_mode)
1328    {
1329#ifdef POINTERS_EXTEND_UNSIGNED
1330      mem = convert_memory_address (ptr_mode, mem);
1331#else
1332      mem = convert_to_mode (ptr_mode, mem, 0);
1333#endif
1334    }
1335  exc_ptr_reg = force_reg (ptr_mode, mem);
1336
1337  mem = adjust_address (fc, unwind_word_mode,
1338			sjlj_fc_data_ofs + GET_MODE_SIZE (unwind_word_mode));
1339  if (unwind_word_mode != filter_mode)
1340    mem = convert_to_mode (filter_mode, mem, 0);
1341  filter_reg = force_reg (filter_mode, mem);
1342
1343  /* Jump to one of the directly reachable regions.  */
1344
1345  disp_index = 0;
1346  first_reachable_label = NULL;
1347
1348  /* If there's exactly one call site in the function, don't bother
1349     generating a switch statement.  */
1350  if (num_dispatch > 1)
1351    dispatch_labels.create (num_dispatch);
1352
1353  for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1354    if (lp && lp->post_landing_pad)
1355      {
1356	rtx_insn *seq2;
1357	rtx label;
1358
1359	start_sequence ();
1360
1361	lp->landing_pad = dispatch_label;
1362
1363	if (num_dispatch > 1)
1364	  {
1365	    tree t_label, case_elt, t;
1366
1367	    t_label = create_artificial_label (UNKNOWN_LOCATION);
1368	    t = build_int_cst (integer_type_node, disp_index);
1369	    case_elt = build_case_label (t, NULL, t_label);
1370	    dispatch_labels.quick_push (case_elt);
1371	    label = label_rtx (t_label);
1372	  }
1373	else
1374	  label = gen_label_rtx ();
1375
1376	if (disp_index == 0)
1377	  first_reachable_label = label;
1378	emit_label (label);
1379
1380	r = lp->region;
1381	if (r->exc_ptr_reg)
1382	  emit_move_insn (r->exc_ptr_reg, exc_ptr_reg);
1383	if (r->filter_reg)
1384	  emit_move_insn (r->filter_reg, filter_reg);
1385
1386	seq2 = get_insns ();
1387	end_sequence ();
1388
1389	before = label_rtx (lp->post_landing_pad);
1390	bb = emit_to_new_bb_before (seq2, before);
1391	e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1392	e->count = bb->count;
1393	e->probability = REG_BR_PROB_BASE;
1394	if (current_loops)
1395	  {
1396	    struct loop *loop = bb->next_bb->loop_father;
1397	    /* If we created a pre-header block, add the new block to the
1398	       outer loop, otherwise to the loop itself.  */
1399	    if (bb->next_bb == loop->header)
1400	      add_bb_to_loop (bb, loop_outer (loop));
1401	    else
1402	      add_bb_to_loop (bb, loop);
1403	    /* ???  For multiple dispatches we will end up with edges
1404	       from the loop tree root into this loop, making it a
1405	       multiple-entry loop.  Discard all affected loops.  */
1406	    if (num_dispatch > 1)
1407	      {
1408		for (loop = bb->loop_father;
1409		     loop_outer (loop); loop = loop_outer (loop))
1410		  mark_loop_for_removal (loop);
1411	      }
1412	  }
1413
1414	disp_index++;
1415      }
1416  gcc_assert (disp_index == num_dispatch);
1417
1418  if (num_dispatch > 1)
1419    {
1420      rtx disp = adjust_address (fc, TYPE_MODE (integer_type_node),
1421				 sjlj_fc_call_site_ofs);
1422      expand_sjlj_dispatch_table (disp, dispatch_labels);
1423    }
1424
1425  seq = get_insns ();
1426  end_sequence ();
1427
1428  bb = emit_to_new_bb_before (seq, first_reachable_label);
1429  if (num_dispatch == 1)
1430    {
1431      e = make_edge (bb, bb->next_bb, EDGE_FALLTHRU);
1432      e->count = bb->count;
1433      e->probability = REG_BR_PROB_BASE;
1434      if (current_loops)
1435	{
1436	  struct loop *loop = bb->next_bb->loop_father;
1437	  /* If we created a pre-header block, add the new block to the
1438	     outer loop, otherwise to the loop itself.  */
1439	  if (bb->next_bb == loop->header)
1440	    add_bb_to_loop (bb, loop_outer (loop));
1441	  else
1442	    add_bb_to_loop (bb, loop);
1443	}
1444    }
1445  else
1446    {
1447      /* We are not wiring up edges here, but as the dispatcher call
1448         is at function begin simply associate the block with the
1449	 outermost (non-)loop.  */
1450      if (current_loops)
1451	add_bb_to_loop (bb, current_loops->tree_root);
1452    }
1453}
1454
1455static void
1456sjlj_build_landing_pads (void)
1457{
1458  int num_dispatch;
1459
1460  num_dispatch = vec_safe_length (cfun->eh->lp_array);
1461  if (num_dispatch == 0)
1462    return;
1463  sjlj_lp_call_site_index.safe_grow_cleared (num_dispatch);
1464
1465  num_dispatch = sjlj_assign_call_site_values ();
1466  if (num_dispatch > 0)
1467    {
1468      rtx_code_label *dispatch_label = gen_label_rtx ();
1469      int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1470					TYPE_MODE (sjlj_fc_type_node),
1471					TYPE_ALIGN (sjlj_fc_type_node));
1472      crtl->eh.sjlj_fc
1473	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1474			      int_size_in_bytes (sjlj_fc_type_node),
1475			      align);
1476
1477      sjlj_mark_call_sites ();
1478      sjlj_emit_function_enter (dispatch_label);
1479      sjlj_emit_dispatch_table (dispatch_label, num_dispatch);
1480      sjlj_emit_function_exit ();
1481    }
1482
1483  /* If we do not have any landing pads, we may still need to register a
1484     personality routine and (empty) LSDA to handle must-not-throw regions.  */
1485  else if (function_needs_eh_personality (cfun) != eh_personality_none)
1486    {
1487      int align = STACK_SLOT_ALIGNMENT (sjlj_fc_type_node,
1488					TYPE_MODE (sjlj_fc_type_node),
1489					TYPE_ALIGN (sjlj_fc_type_node));
1490      crtl->eh.sjlj_fc
1491	= assign_stack_local (TYPE_MODE (sjlj_fc_type_node),
1492			      int_size_in_bytes (sjlj_fc_type_node),
1493			      align);
1494
1495      sjlj_mark_call_sites ();
1496      sjlj_emit_function_enter (NULL);
1497      sjlj_emit_function_exit ();
1498    }
1499
1500  sjlj_lp_call_site_index.release ();
1501}
1502
1503/* After initial rtl generation, call back to finish generating
1504   exception support code.  */
1505
1506void
1507finish_eh_generation (void)
1508{
1509  basic_block bb;
1510
1511  /* Construct the landing pads.  */
1512  if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1513    sjlj_build_landing_pads ();
1514  else
1515    dw2_build_landing_pads ();
1516  break_superblocks ();
1517
1518  if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ
1519      /* Kludge for Alpha (see alpha_gp_save_rtx).  */
1520      || single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun))->insns.r)
1521    commit_edge_insertions ();
1522
1523  /* Redirect all EH edges from the post_landing_pad to the landing pad.  */
1524  FOR_EACH_BB_FN (bb, cfun)
1525    {
1526      eh_landing_pad lp;
1527      edge_iterator ei;
1528      edge e;
1529
1530      lp = get_eh_landing_pad_from_rtx (BB_END (bb));
1531
1532      FOR_EACH_EDGE (e, ei, bb->succs)
1533	if (e->flags & EDGE_EH)
1534	  break;
1535
1536      /* We should not have generated any new throwing insns during this
1537	 pass, and we should not have lost any EH edges, so we only need
1538	 to handle two cases here:
1539	 (1) reachable handler and an existing edge to post-landing-pad,
1540	 (2) no reachable handler and no edge.  */
1541      gcc_assert ((lp != NULL) == (e != NULL));
1542      if (lp != NULL)
1543	{
1544	  gcc_assert (BB_HEAD (e->dest) == label_rtx (lp->post_landing_pad));
1545
1546	  redirect_edge_succ (e, BLOCK_FOR_INSN (lp->landing_pad));
1547	  e->flags |= (CALL_P (BB_END (bb))
1548		       ? EDGE_ABNORMAL | EDGE_ABNORMAL_CALL
1549		       : EDGE_ABNORMAL);
1550	}
1551    }
1552}
1553
1554/* This section handles removing dead code for flow.  */
1555
1556void
1557remove_eh_landing_pad (eh_landing_pad lp)
1558{
1559  eh_landing_pad *pp;
1560
1561  for (pp = &lp->region->landing_pads; *pp != lp; pp = &(*pp)->next_lp)
1562    continue;
1563  *pp = lp->next_lp;
1564
1565  if (lp->post_landing_pad)
1566    EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1567  (*cfun->eh->lp_array)[lp->index] = NULL;
1568}
1569
1570/* Splice the EH region at PP from the region tree.  */
1571
1572static void
1573remove_eh_handler_splicer (eh_region *pp)
1574{
1575  eh_region region = *pp;
1576  eh_landing_pad lp;
1577
1578  for (lp = region->landing_pads; lp ; lp = lp->next_lp)
1579    {
1580      if (lp->post_landing_pad)
1581	EH_LANDING_PAD_NR (lp->post_landing_pad) = 0;
1582      (*cfun->eh->lp_array)[lp->index] = NULL;
1583    }
1584
1585  if (region->inner)
1586    {
1587      eh_region p, outer;
1588      outer = region->outer;
1589
1590      *pp = p = region->inner;
1591      do
1592	{
1593	  p->outer = outer;
1594	  pp = &p->next_peer;
1595	  p = *pp;
1596	}
1597      while (p);
1598    }
1599  *pp = region->next_peer;
1600
1601  (*cfun->eh->region_array)[region->index] = NULL;
1602}
1603
1604/* Splice a single EH region REGION from the region tree.
1605
1606   To unlink REGION, we need to find the pointer to it with a relatively
1607   expensive search in REGION's outer region.  If you are going to
1608   remove a number of handlers, using remove_unreachable_eh_regions may
1609   be a better option.  */
1610
1611void
1612remove_eh_handler (eh_region region)
1613{
1614  eh_region *pp, *pp_start, p, outer;
1615
1616  outer = region->outer;
1617  if (outer)
1618    pp_start = &outer->inner;
1619  else
1620    pp_start = &cfun->eh->region_tree;
1621  for (pp = pp_start, p = *pp; p != region; pp = &p->next_peer, p = *pp)
1622    continue;
1623
1624  remove_eh_handler_splicer (pp);
1625}
1626
1627/* Worker for remove_unreachable_eh_regions.
1628   PP is a pointer to the region to start a region tree depth-first
1629   search from.  R_REACHABLE is the set of regions that have to be
1630   preserved.  */
1631
1632static void
1633remove_unreachable_eh_regions_worker (eh_region *pp, sbitmap r_reachable)
1634{
1635  while (*pp)
1636    {
1637      eh_region region = *pp;
1638      remove_unreachable_eh_regions_worker (&region->inner, r_reachable);
1639      if (!bitmap_bit_p (r_reachable, region->index))
1640	remove_eh_handler_splicer (pp);
1641      else
1642	pp = &region->next_peer;
1643    }
1644}
1645
1646/* Splice all EH regions *not* marked in R_REACHABLE from the region tree.
1647   Do this by traversing the EH tree top-down and splice out regions that
1648   are not marked.  By removing regions from the leaves, we avoid costly
1649   searches in the region tree.  */
1650
1651void
1652remove_unreachable_eh_regions (sbitmap r_reachable)
1653{
1654  remove_unreachable_eh_regions_worker (&cfun->eh->region_tree, r_reachable);
1655}
1656
1657/* Invokes CALLBACK for every exception handler landing pad label.
1658   Only used by reload hackery; should not be used by new code.  */
1659
1660void
1661for_each_eh_label (void (*callback) (rtx))
1662{
1663  eh_landing_pad lp;
1664  int i;
1665
1666  for (i = 1; vec_safe_iterate (cfun->eh->lp_array, i, &lp); ++i)
1667    {
1668      if (lp)
1669	{
1670	  rtx lab = lp->landing_pad;
1671	  if (lab && LABEL_P (lab))
1672	    (*callback) (lab);
1673	}
1674    }
1675}
1676
1677/* Create the REG_EH_REGION note for INSN, given its ECF_FLAGS for a
1678   call insn.
1679
1680   At the gimple level, we use LP_NR
1681       > 0 : The statement transfers to landing pad LP_NR
1682       = 0 : The statement is outside any EH region
1683       < 0 : The statement is within MUST_NOT_THROW region -LP_NR.
1684
1685   At the rtl level, we use LP_NR
1686       > 0 : The insn transfers to landing pad LP_NR
1687       = 0 : The insn cannot throw
1688       < 0 : The insn is within MUST_NOT_THROW region -LP_NR
1689       = INT_MIN : The insn cannot throw or execute a nonlocal-goto.
1690       missing note: The insn is outside any EH region.
1691
1692  ??? This difference probably ought to be avoided.  We could stand
1693  to record nothrow for arbitrary gimple statements, and so avoid
1694  some moderately complex lookups in stmt_could_throw_p.  Perhaps
1695  NOTHROW should be mapped on both sides to INT_MIN.  Perhaps the
1696  no-nonlocal-goto property should be recorded elsewhere as a bit
1697  on the call_insn directly.  Perhaps we should make more use of
1698  attaching the trees to call_insns (reachable via symbol_ref in
1699  direct call cases) and just pull the data out of the trees.  */
1700
1701void
1702make_reg_eh_region_note (rtx insn, int ecf_flags, int lp_nr)
1703{
1704  rtx value;
1705  if (ecf_flags & ECF_NOTHROW)
1706    value = const0_rtx;
1707  else if (lp_nr != 0)
1708    value = GEN_INT (lp_nr);
1709  else
1710    return;
1711  add_reg_note (insn, REG_EH_REGION, value);
1712}
1713
1714/* Create a REG_EH_REGION note for a CALL_INSN that cannot throw
1715   nor perform a non-local goto.  Replace the region note if it
1716   already exists.  */
1717
1718void
1719make_reg_eh_region_note_nothrow_nononlocal (rtx insn)
1720{
1721  rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1722  rtx intmin = GEN_INT (INT_MIN);
1723
1724  if (note != 0)
1725    XEXP (note, 0) = intmin;
1726  else
1727    add_reg_note (insn, REG_EH_REGION, intmin);
1728}
1729
1730/* Return true if INSN could throw, assuming no REG_EH_REGION note
1731   to the contrary.  */
1732
1733bool
1734insn_could_throw_p (const_rtx insn)
1735{
1736  if (!flag_exceptions)
1737    return false;
1738  if (CALL_P (insn))
1739    return true;
1740  if (INSN_P (insn) && cfun->can_throw_non_call_exceptions)
1741    return may_trap_p (PATTERN (insn));
1742  return false;
1743}
1744
1745/* Copy an REG_EH_REGION note to each insn that might throw beginning
1746   at FIRST and ending at LAST.  NOTE_OR_INSN is either the source insn
1747   to look for a note, or the note itself.  */
1748
1749void
1750copy_reg_eh_region_note_forward (rtx note_or_insn, rtx_insn *first, rtx last)
1751{
1752  rtx_insn *insn;
1753  rtx note = note_or_insn;
1754
1755  if (INSN_P (note_or_insn))
1756    {
1757      note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1758      if (note == NULL)
1759	return;
1760    }
1761  note = XEXP (note, 0);
1762
1763  for (insn = first; insn != last ; insn = NEXT_INSN (insn))
1764    if (!find_reg_note (insn, REG_EH_REGION, NULL_RTX)
1765        && insn_could_throw_p (insn))
1766      add_reg_note (insn, REG_EH_REGION, note);
1767}
1768
1769/* Likewise, but iterate backward.  */
1770
1771void
1772copy_reg_eh_region_note_backward (rtx note_or_insn, rtx_insn *last, rtx first)
1773{
1774  rtx_insn *insn;
1775  rtx note = note_or_insn;
1776
1777  if (INSN_P (note_or_insn))
1778    {
1779      note = find_reg_note (note_or_insn, REG_EH_REGION, NULL_RTX);
1780      if (note == NULL)
1781	return;
1782    }
1783  note = XEXP (note, 0);
1784
1785  for (insn = last; insn != first; insn = PREV_INSN (insn))
1786    if (insn_could_throw_p (insn))
1787      add_reg_note (insn, REG_EH_REGION, note);
1788}
1789
1790
1791/* Extract all EH information from INSN.  Return true if the insn
1792   was marked NOTHROW.  */
1793
1794static bool
1795get_eh_region_and_lp_from_rtx (const_rtx insn, eh_region *pr,
1796			       eh_landing_pad *plp)
1797{
1798  eh_landing_pad lp = NULL;
1799  eh_region r = NULL;
1800  bool ret = false;
1801  rtx note;
1802  int lp_nr;
1803
1804  if (! INSN_P (insn))
1805    goto egress;
1806
1807  if (NONJUMP_INSN_P (insn)
1808      && GET_CODE (PATTERN (insn)) == SEQUENCE)
1809    insn = XVECEXP (PATTERN (insn), 0, 0);
1810
1811  note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1812  if (!note)
1813    {
1814      ret = !insn_could_throw_p (insn);
1815      goto egress;
1816    }
1817
1818  lp_nr = INTVAL (XEXP (note, 0));
1819  if (lp_nr == 0 || lp_nr == INT_MIN)
1820    {
1821      ret = true;
1822      goto egress;
1823    }
1824
1825  if (lp_nr < 0)
1826    r = (*cfun->eh->region_array)[-lp_nr];
1827  else
1828    {
1829      lp = (*cfun->eh->lp_array)[lp_nr];
1830      r = lp->region;
1831    }
1832
1833 egress:
1834  *plp = lp;
1835  *pr = r;
1836  return ret;
1837}
1838
1839/* Return the landing pad to which INSN may go, or NULL if it does not
1840   have a reachable landing pad within this function.  */
1841
1842eh_landing_pad
1843get_eh_landing_pad_from_rtx (const_rtx insn)
1844{
1845  eh_landing_pad lp;
1846  eh_region r;
1847
1848  get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1849  return lp;
1850}
1851
1852/* Return the region to which INSN may go, or NULL if it does not
1853   have a reachable region within this function.  */
1854
1855eh_region
1856get_eh_region_from_rtx (const_rtx insn)
1857{
1858  eh_landing_pad lp;
1859  eh_region r;
1860
1861  get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1862  return r;
1863}
1864
1865/* Return true if INSN throws and is caught by something in this function.  */
1866
1867bool
1868can_throw_internal (const_rtx insn)
1869{
1870  return get_eh_landing_pad_from_rtx (insn) != NULL;
1871}
1872
1873/* Return true if INSN throws and escapes from the current function.  */
1874
1875bool
1876can_throw_external (const_rtx insn)
1877{
1878  eh_landing_pad lp;
1879  eh_region r;
1880  bool nothrow;
1881
1882  if (! INSN_P (insn))
1883    return false;
1884
1885  if (NONJUMP_INSN_P (insn)
1886      && GET_CODE (PATTERN (insn)) == SEQUENCE)
1887    {
1888      rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1889      int i, n = seq->len ();
1890
1891      for (i = 0; i < n; i++)
1892	if (can_throw_external (seq->element (i)))
1893	  return true;
1894
1895      return false;
1896    }
1897
1898  nothrow = get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1899
1900  /* If we can't throw, we obviously can't throw external.  */
1901  if (nothrow)
1902    return false;
1903
1904  /* If we have an internal landing pad, then we're not external.  */
1905  if (lp != NULL)
1906    return false;
1907
1908  /* If we're not within an EH region, then we are external.  */
1909  if (r == NULL)
1910    return true;
1911
1912  /* The only thing that ought to be left is MUST_NOT_THROW regions,
1913     which don't always have landing pads.  */
1914  gcc_assert (r->type == ERT_MUST_NOT_THROW);
1915  return false;
1916}
1917
1918/* Return true if INSN cannot throw at all.  */
1919
1920bool
1921insn_nothrow_p (const_rtx insn)
1922{
1923  eh_landing_pad lp;
1924  eh_region r;
1925
1926  if (! INSN_P (insn))
1927    return true;
1928
1929  if (NONJUMP_INSN_P (insn)
1930      && GET_CODE (PATTERN (insn)) == SEQUENCE)
1931    {
1932      rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
1933      int i, n = seq->len ();
1934
1935      for (i = 0; i < n; i++)
1936	if (!insn_nothrow_p (seq->element (i)))
1937	  return false;
1938
1939      return true;
1940    }
1941
1942  return get_eh_region_and_lp_from_rtx (insn, &r, &lp);
1943}
1944
1945/* Return true if INSN can perform a non-local goto.  */
1946/* ??? This test is here in this file because it (ab)uses REG_EH_REGION.  */
1947
1948bool
1949can_nonlocal_goto (const_rtx insn)
1950{
1951  if (nonlocal_goto_handler_labels && CALL_P (insn))
1952    {
1953      rtx note = find_reg_note (insn, REG_EH_REGION, NULL_RTX);
1954      if (!note || INTVAL (XEXP (note, 0)) != INT_MIN)
1955	return true;
1956    }
1957  return false;
1958}
1959
1960/* Set TREE_NOTHROW and crtl->all_throwers_are_sibcalls.  */
1961
1962static unsigned int
1963set_nothrow_function_flags (void)
1964{
1965  rtx_insn *insn;
1966
1967  crtl->nothrow = 1;
1968
1969  /* Assume crtl->all_throwers_are_sibcalls until we encounter
1970     something that can throw an exception.  We specifically exempt
1971     CALL_INSNs that are SIBLING_CALL_P, as these are really jumps,
1972     and can't throw.  Most CALL_INSNs are not SIBLING_CALL_P, so this
1973     is optimistic.  */
1974
1975  crtl->all_throwers_are_sibcalls = 1;
1976
1977  /* If we don't know that this implementation of the function will
1978     actually be used, then we must not set TREE_NOTHROW, since
1979     callers must not assume that this function does not throw.  */
1980  if (TREE_NOTHROW (current_function_decl))
1981    return 0;
1982
1983  if (! flag_exceptions)
1984    return 0;
1985
1986  for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1987    if (can_throw_external (insn))
1988      {
1989        crtl->nothrow = 0;
1990
1991	if (!CALL_P (insn) || !SIBLING_CALL_P (insn))
1992	  {
1993	    crtl->all_throwers_are_sibcalls = 0;
1994	    return 0;
1995	  }
1996      }
1997
1998  if (crtl->nothrow
1999      && (cgraph_node::get (current_function_decl)->get_availability ()
2000          >= AVAIL_AVAILABLE))
2001    {
2002      struct cgraph_node *node = cgraph_node::get (current_function_decl);
2003      struct cgraph_edge *e;
2004      for (e = node->callers; e; e = e->next_caller)
2005        e->can_throw_external = false;
2006      node->set_nothrow_flag (true);
2007
2008      if (dump_file)
2009	fprintf (dump_file, "Marking function nothrow: %s\n\n",
2010		 current_function_name ());
2011    }
2012  return 0;
2013}
2014
2015namespace {
2016
2017const pass_data pass_data_set_nothrow_function_flags =
2018{
2019  RTL_PASS, /* type */
2020  "nothrow", /* name */
2021  OPTGROUP_NONE, /* optinfo_flags */
2022  TV_NONE, /* tv_id */
2023  0, /* properties_required */
2024  0, /* properties_provided */
2025  0, /* properties_destroyed */
2026  0, /* todo_flags_start */
2027  0, /* todo_flags_finish */
2028};
2029
2030class pass_set_nothrow_function_flags : public rtl_opt_pass
2031{
2032public:
2033  pass_set_nothrow_function_flags (gcc::context *ctxt)
2034    : rtl_opt_pass (pass_data_set_nothrow_function_flags, ctxt)
2035  {}
2036
2037  /* opt_pass methods: */
2038  virtual unsigned int execute (function *)
2039    {
2040      return set_nothrow_function_flags ();
2041    }
2042
2043}; // class pass_set_nothrow_function_flags
2044
2045} // anon namespace
2046
2047rtl_opt_pass *
2048make_pass_set_nothrow_function_flags (gcc::context *ctxt)
2049{
2050  return new pass_set_nothrow_function_flags (ctxt);
2051}
2052
2053
2054/* Various hooks for unwind library.  */
2055
2056/* Expand the EH support builtin functions:
2057   __builtin_eh_pointer and __builtin_eh_filter.  */
2058
2059static eh_region
2060expand_builtin_eh_common (tree region_nr_t)
2061{
2062  HOST_WIDE_INT region_nr;
2063  eh_region region;
2064
2065  gcc_assert (tree_fits_shwi_p (region_nr_t));
2066  region_nr = tree_to_shwi (region_nr_t);
2067
2068  region = (*cfun->eh->region_array)[region_nr];
2069
2070  /* ??? We shouldn't have been able to delete a eh region without
2071     deleting all the code that depended on it.  */
2072  gcc_assert (region != NULL);
2073
2074  return region;
2075}
2076
2077/* Expand to the exc_ptr value from the given eh region.  */
2078
2079rtx
2080expand_builtin_eh_pointer (tree exp)
2081{
2082  eh_region region
2083    = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2084  if (region->exc_ptr_reg == NULL)
2085    region->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2086  return region->exc_ptr_reg;
2087}
2088
2089/* Expand to the filter value from the given eh region.  */
2090
2091rtx
2092expand_builtin_eh_filter (tree exp)
2093{
2094  eh_region region
2095    = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2096  if (region->filter_reg == NULL)
2097    region->filter_reg = gen_reg_rtx (targetm.eh_return_filter_mode ());
2098  return region->filter_reg;
2099}
2100
2101/* Copy the exc_ptr and filter values from one landing pad's registers
2102   to another.  This is used to inline the resx statement.  */
2103
2104rtx
2105expand_builtin_eh_copy_values (tree exp)
2106{
2107  eh_region dst
2108    = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 0));
2109  eh_region src
2110    = expand_builtin_eh_common (CALL_EXPR_ARG (exp, 1));
2111  machine_mode fmode = targetm.eh_return_filter_mode ();
2112
2113  if (dst->exc_ptr_reg == NULL)
2114    dst->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2115  if (src->exc_ptr_reg == NULL)
2116    src->exc_ptr_reg = gen_reg_rtx (ptr_mode);
2117
2118  if (dst->filter_reg == NULL)
2119    dst->filter_reg = gen_reg_rtx (fmode);
2120  if (src->filter_reg == NULL)
2121    src->filter_reg = gen_reg_rtx (fmode);
2122
2123  emit_move_insn (dst->exc_ptr_reg, src->exc_ptr_reg);
2124  emit_move_insn (dst->filter_reg, src->filter_reg);
2125
2126  return const0_rtx;
2127}
2128
2129/* Do any necessary initialization to access arbitrary stack frames.
2130   On the SPARC, this means flushing the register windows.  */
2131
2132void
2133expand_builtin_unwind_init (void)
2134{
2135  /* Set this so all the registers get saved in our frame; we need to be
2136     able to copy the saved values for any registers from frames we unwind.  */
2137  crtl->saves_all_registers = 1;
2138
2139#ifdef SETUP_FRAME_ADDRESSES
2140  SETUP_FRAME_ADDRESSES ();
2141#endif
2142}
2143
2144/* Map a non-negative number to an eh return data register number; expands
2145   to -1 if no return data register is associated with the input number.
2146   At least the inputs 0 and 1 must be mapped; the target may provide more.  */
2147
2148rtx
2149expand_builtin_eh_return_data_regno (tree exp)
2150{
2151  tree which = CALL_EXPR_ARG (exp, 0);
2152  unsigned HOST_WIDE_INT iwhich;
2153
2154  if (TREE_CODE (which) != INTEGER_CST)
2155    {
2156      error ("argument of %<__builtin_eh_return_regno%> must be constant");
2157      return constm1_rtx;
2158    }
2159
2160  iwhich = tree_to_uhwi (which);
2161  iwhich = EH_RETURN_DATA_REGNO (iwhich);
2162  if (iwhich == INVALID_REGNUM)
2163    return constm1_rtx;
2164
2165#ifdef DWARF_FRAME_REGNUM
2166  iwhich = DWARF_FRAME_REGNUM (iwhich);
2167#else
2168  iwhich = DBX_REGISTER_NUMBER (iwhich);
2169#endif
2170
2171  return GEN_INT (iwhich);
2172}
2173
2174/* Given a value extracted from the return address register or stack slot,
2175   return the actual address encoded in that value.  */
2176
2177rtx
2178expand_builtin_extract_return_addr (tree addr_tree)
2179{
2180  rtx addr = expand_expr (addr_tree, NULL_RTX, Pmode, EXPAND_NORMAL);
2181
2182  if (GET_MODE (addr) != Pmode
2183      && GET_MODE (addr) != VOIDmode)
2184    {
2185#ifdef POINTERS_EXTEND_UNSIGNED
2186      addr = convert_memory_address (Pmode, addr);
2187#else
2188      addr = convert_to_mode (Pmode, addr, 0);
2189#endif
2190    }
2191
2192  /* First mask out any unwanted bits.  */
2193#ifdef MASK_RETURN_ADDR
2194  expand_and (Pmode, addr, MASK_RETURN_ADDR, addr);
2195#endif
2196
2197  /* Then adjust to find the real return address.  */
2198#if defined (RETURN_ADDR_OFFSET)
2199  addr = plus_constant (Pmode, addr, RETURN_ADDR_OFFSET);
2200#endif
2201
2202  return addr;
2203}
2204
2205/* Given an actual address in addr_tree, do any necessary encoding
2206   and return the value to be stored in the return address register or
2207   stack slot so the epilogue will return to that address.  */
2208
2209rtx
2210expand_builtin_frob_return_addr (tree addr_tree)
2211{
2212  rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2213
2214  addr = convert_memory_address (Pmode, addr);
2215
2216#ifdef RETURN_ADDR_OFFSET
2217  addr = force_reg (Pmode, addr);
2218  addr = plus_constant (Pmode, addr, -RETURN_ADDR_OFFSET);
2219#endif
2220
2221  return addr;
2222}
2223
2224/* Set up the epilogue with the magic bits we'll need to return to the
2225   exception handler.  */
2226
2227void
2228expand_builtin_eh_return (tree stackadj_tree ATTRIBUTE_UNUSED,
2229			  tree handler_tree)
2230{
2231  rtx tmp;
2232
2233#ifdef EH_RETURN_STACKADJ_RTX
2234  tmp = expand_expr (stackadj_tree, crtl->eh.ehr_stackadj,
2235		     VOIDmode, EXPAND_NORMAL);
2236  tmp = convert_memory_address (Pmode, tmp);
2237  if (!crtl->eh.ehr_stackadj)
2238    crtl->eh.ehr_stackadj = copy_addr_to_reg (tmp);
2239  else if (tmp != crtl->eh.ehr_stackadj)
2240    emit_move_insn (crtl->eh.ehr_stackadj, tmp);
2241#endif
2242
2243  tmp = expand_expr (handler_tree, crtl->eh.ehr_handler,
2244		     VOIDmode, EXPAND_NORMAL);
2245  tmp = convert_memory_address (Pmode, tmp);
2246  if (!crtl->eh.ehr_handler)
2247    crtl->eh.ehr_handler = copy_addr_to_reg (tmp);
2248  else if (tmp != crtl->eh.ehr_handler)
2249    emit_move_insn (crtl->eh.ehr_handler, tmp);
2250
2251  if (!crtl->eh.ehr_label)
2252    crtl->eh.ehr_label = gen_label_rtx ();
2253  emit_jump (crtl->eh.ehr_label);
2254}
2255
2256/* Expand __builtin_eh_return.  This exit path from the function loads up
2257   the eh return data registers, adjusts the stack, and branches to a
2258   given PC other than the normal return address.  */
2259
2260void
2261expand_eh_return (void)
2262{
2263  rtx_code_label *around_label;
2264
2265  if (! crtl->eh.ehr_label)
2266    return;
2267
2268  crtl->calls_eh_return = 1;
2269
2270#ifdef EH_RETURN_STACKADJ_RTX
2271  emit_move_insn (EH_RETURN_STACKADJ_RTX, const0_rtx);
2272#endif
2273
2274  around_label = gen_label_rtx ();
2275  emit_jump (around_label);
2276
2277  emit_label (crtl->eh.ehr_label);
2278  clobber_return_register ();
2279
2280#ifdef EH_RETURN_STACKADJ_RTX
2281  emit_move_insn (EH_RETURN_STACKADJ_RTX, crtl->eh.ehr_stackadj);
2282#endif
2283
2284#ifdef HAVE_eh_return
2285  if (HAVE_eh_return)
2286    emit_insn (gen_eh_return (crtl->eh.ehr_handler));
2287  else
2288#endif
2289    {
2290#ifdef EH_RETURN_HANDLER_RTX
2291      emit_move_insn (EH_RETURN_HANDLER_RTX, crtl->eh.ehr_handler);
2292#else
2293      error ("__builtin_eh_return not supported on this target");
2294#endif
2295    }
2296
2297  emit_label (around_label);
2298}
2299
2300/* Convert a ptr_mode address ADDR_TREE to a Pmode address controlled by
2301   POINTERS_EXTEND_UNSIGNED and return it.  */
2302
2303rtx
2304expand_builtin_extend_pointer (tree addr_tree)
2305{
2306  rtx addr = expand_expr (addr_tree, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2307  int extend;
2308
2309#ifdef POINTERS_EXTEND_UNSIGNED
2310  extend = POINTERS_EXTEND_UNSIGNED;
2311#else
2312  /* The previous EH code did an unsigned extend by default, so we do this also
2313     for consistency.  */
2314  extend = 1;
2315#endif
2316
2317  return convert_modes (targetm.unwind_word_mode (), ptr_mode, addr, extend);
2318}
2319
2320static int
2321add_action_record (action_hash_type *ar_hash, int filter, int next)
2322{
2323  struct action_record **slot, *new_ar, tmp;
2324
2325  tmp.filter = filter;
2326  tmp.next = next;
2327  slot = ar_hash->find_slot (&tmp, INSERT);
2328
2329  if ((new_ar = *slot) == NULL)
2330    {
2331      new_ar = XNEW (struct action_record);
2332      new_ar->offset = crtl->eh.action_record_data->length () + 1;
2333      new_ar->filter = filter;
2334      new_ar->next = next;
2335      *slot = new_ar;
2336
2337      /* The filter value goes in untouched.  The link to the next
2338	 record is a "self-relative" byte offset, or zero to indicate
2339	 that there is no next record.  So convert the absolute 1 based
2340	 indices we've been carrying around into a displacement.  */
2341
2342      push_sleb128 (&crtl->eh.action_record_data, filter);
2343      if (next)
2344	next -= crtl->eh.action_record_data->length () + 1;
2345      push_sleb128 (&crtl->eh.action_record_data, next);
2346    }
2347
2348  return new_ar->offset;
2349}
2350
2351static int
2352collect_one_action_chain (action_hash_type *ar_hash, eh_region region)
2353{
2354  int next;
2355
2356  /* If we've reached the top of the region chain, then we have
2357     no actions, and require no landing pad.  */
2358  if (region == NULL)
2359    return -1;
2360
2361  switch (region->type)
2362    {
2363    case ERT_CLEANUP:
2364      {
2365	eh_region r;
2366	/* A cleanup adds a zero filter to the beginning of the chain, but
2367	   there are special cases to look out for.  If there are *only*
2368	   cleanups along a path, then it compresses to a zero action.
2369	   Further, if there are multiple cleanups along a path, we only
2370	   need to represent one of them, as that is enough to trigger
2371	   entry to the landing pad at runtime.  */
2372	next = collect_one_action_chain (ar_hash, region->outer);
2373	if (next <= 0)
2374	  return 0;
2375	for (r = region->outer; r ; r = r->outer)
2376	  if (r->type == ERT_CLEANUP)
2377	    return next;
2378	return add_action_record (ar_hash, 0, next);
2379      }
2380
2381    case ERT_TRY:
2382      {
2383	eh_catch c;
2384
2385	/* Process the associated catch regions in reverse order.
2386	   If there's a catch-all handler, then we don't need to
2387	   search outer regions.  Use a magic -3 value to record
2388	   that we haven't done the outer search.  */
2389	next = -3;
2390	for (c = region->u.eh_try.last_catch; c ; c = c->prev_catch)
2391	  {
2392	    if (c->type_list == NULL)
2393	      {
2394		/* Retrieve the filter from the head of the filter list
2395		   where we have stored it (see assign_filter_values).  */
2396		int filter = TREE_INT_CST_LOW (TREE_VALUE (c->filter_list));
2397		next = add_action_record (ar_hash, filter, 0);
2398	      }
2399	    else
2400	      {
2401		/* Once the outer search is done, trigger an action record for
2402		   each filter we have.  */
2403		tree flt_node;
2404
2405		if (next == -3)
2406		  {
2407		    next = collect_one_action_chain (ar_hash, region->outer);
2408
2409		    /* If there is no next action, terminate the chain.  */
2410		    if (next == -1)
2411		      next = 0;
2412		    /* If all outer actions are cleanups or must_not_throw,
2413		       we'll have no action record for it, since we had wanted
2414		       to encode these states in the call-site record directly.
2415		       Add a cleanup action to the chain to catch these.  */
2416		    else if (next <= 0)
2417		      next = add_action_record (ar_hash, 0, 0);
2418		  }
2419
2420		flt_node = c->filter_list;
2421		for (; flt_node; flt_node = TREE_CHAIN (flt_node))
2422		  {
2423		    int filter = TREE_INT_CST_LOW (TREE_VALUE (flt_node));
2424		    next = add_action_record (ar_hash, filter, next);
2425		  }
2426	      }
2427	  }
2428	return next;
2429      }
2430
2431    case ERT_ALLOWED_EXCEPTIONS:
2432      /* An exception specification adds its filter to the
2433	 beginning of the chain.  */
2434      next = collect_one_action_chain (ar_hash, region->outer);
2435
2436      /* If there is no next action, terminate the chain.  */
2437      if (next == -1)
2438	next = 0;
2439      /* If all outer actions are cleanups or must_not_throw,
2440	 we'll have no action record for it, since we had wanted
2441	 to encode these states in the call-site record directly.
2442	 Add a cleanup action to the chain to catch these.  */
2443      else if (next <= 0)
2444	next = add_action_record (ar_hash, 0, 0);
2445
2446      return add_action_record (ar_hash, region->u.allowed.filter, next);
2447
2448    case ERT_MUST_NOT_THROW:
2449      /* A must-not-throw region with no inner handlers or cleanups
2450	 requires no call-site entry.  Note that this differs from
2451	 the no handler or cleanup case in that we do require an lsda
2452	 to be generated.  Return a magic -2 value to record this.  */
2453      return -2;
2454    }
2455
2456  gcc_unreachable ();
2457}
2458
2459static int
2460add_call_site (rtx landing_pad, int action, int section)
2461{
2462  call_site_record record;
2463
2464  record = ggc_alloc<call_site_record_d> ();
2465  record->landing_pad = landing_pad;
2466  record->action = action;
2467
2468  vec_safe_push (crtl->eh.call_site_record_v[section], record);
2469
2470  return call_site_base + crtl->eh.call_site_record_v[section]->length () - 1;
2471}
2472
2473static rtx_note *
2474emit_note_eh_region_end (rtx_insn *insn)
2475{
2476  rtx_insn *next = NEXT_INSN (insn);
2477
2478  /* Make sure we do not split a call and its corresponding
2479     CALL_ARG_LOCATION note.  */
2480  if (next && NOTE_P (next)
2481      && NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
2482    insn = next;
2483
2484  return emit_note_after (NOTE_INSN_EH_REGION_END, insn);
2485}
2486
2487/* Turn REG_EH_REGION notes back into NOTE_INSN_EH_REGION notes.
2488   The new note numbers will not refer to region numbers, but
2489   instead to call site entries.  */
2490
2491static unsigned int
2492convert_to_eh_region_ranges (void)
2493{
2494  rtx insn;
2495  rtx_insn *iter;
2496  rtx_note *note;
2497  action_hash_type ar_hash (31);
2498  int last_action = -3;
2499  rtx_insn *last_action_insn = NULL;
2500  rtx last_landing_pad = NULL_RTX;
2501  rtx_insn *first_no_action_insn = NULL;
2502  int call_site = 0;
2503  int cur_sec = 0;
2504  rtx section_switch_note = NULL_RTX;
2505  rtx_insn *first_no_action_insn_before_switch = NULL;
2506  rtx_insn *last_no_action_insn_before_switch = NULL;
2507  int saved_call_site_base = call_site_base;
2508
2509  vec_alloc (crtl->eh.action_record_data, 64);
2510
2511  for (iter = get_insns (); iter ; iter = NEXT_INSN (iter))
2512    if (INSN_P (iter))
2513      {
2514	eh_landing_pad lp;
2515	eh_region region;
2516	bool nothrow;
2517	int this_action;
2518	rtx this_landing_pad;
2519
2520	insn = iter;
2521	if (NONJUMP_INSN_P (insn)
2522	    && GET_CODE (PATTERN (insn)) == SEQUENCE)
2523	  insn = XVECEXP (PATTERN (insn), 0, 0);
2524
2525	nothrow = get_eh_region_and_lp_from_rtx (insn, &region, &lp);
2526	if (nothrow)
2527	  continue;
2528	if (region)
2529	  this_action = collect_one_action_chain (&ar_hash, region);
2530	else
2531	  this_action = -1;
2532
2533	/* Existence of catch handlers, or must-not-throw regions
2534	   implies that an lsda is needed (even if empty).  */
2535	if (this_action != -1)
2536	  crtl->uses_eh_lsda = 1;
2537
2538	/* Delay creation of region notes for no-action regions
2539	   until we're sure that an lsda will be required.  */
2540	else if (last_action == -3)
2541	  {
2542	    first_no_action_insn = iter;
2543	    last_action = -1;
2544	  }
2545
2546	if (this_action >= 0)
2547	  this_landing_pad = lp->landing_pad;
2548	else
2549	  this_landing_pad = NULL_RTX;
2550
2551	/* Differing actions or landing pads implies a change in call-site
2552	   info, which implies some EH_REGION note should be emitted.  */
2553	if (last_action != this_action
2554	    || last_landing_pad != this_landing_pad)
2555	  {
2556	    /* If there is a queued no-action region in the other section
2557	       with hot/cold partitioning, emit it now.  */
2558	    if (first_no_action_insn_before_switch)
2559	      {
2560		gcc_assert (this_action != -1
2561			    && last_action == (first_no_action_insn
2562					       ? -1 : -3));
2563		call_site = add_call_site (NULL_RTX, 0, 0);
2564		note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2565					 first_no_action_insn_before_switch);
2566		NOTE_EH_HANDLER (note) = call_site;
2567		note
2568		  = emit_note_eh_region_end (last_no_action_insn_before_switch);
2569		NOTE_EH_HANDLER (note) = call_site;
2570		gcc_assert (last_action != -3
2571			    || (last_action_insn
2572				== last_no_action_insn_before_switch));
2573		first_no_action_insn_before_switch = NULL;
2574		last_no_action_insn_before_switch = NULL;
2575		call_site_base++;
2576	      }
2577	    /* If we'd not seen a previous action (-3) or the previous
2578	       action was must-not-throw (-2), then we do not need an
2579	       end note.  */
2580	    if (last_action >= -1)
2581	      {
2582		/* If we delayed the creation of the begin, do it now.  */
2583		if (first_no_action_insn)
2584		  {
2585		    call_site = add_call_site (NULL_RTX, 0, cur_sec);
2586		    note = emit_note_before (NOTE_INSN_EH_REGION_BEG,
2587					     first_no_action_insn);
2588		    NOTE_EH_HANDLER (note) = call_site;
2589		    first_no_action_insn = NULL;
2590		  }
2591
2592		note = emit_note_eh_region_end (last_action_insn);
2593		NOTE_EH_HANDLER (note) = call_site;
2594	      }
2595
2596	    /* If the new action is must-not-throw, then no region notes
2597	       are created.  */
2598	    if (this_action >= -1)
2599	      {
2600		call_site = add_call_site (this_landing_pad,
2601					   this_action < 0 ? 0 : this_action,
2602					   cur_sec);
2603		note = emit_note_before (NOTE_INSN_EH_REGION_BEG, iter);
2604		NOTE_EH_HANDLER (note) = call_site;
2605	      }
2606
2607	    last_action = this_action;
2608	    last_landing_pad = this_landing_pad;
2609	  }
2610	last_action_insn = iter;
2611      }
2612    else if (NOTE_P (iter)
2613	     && NOTE_KIND (iter) == NOTE_INSN_SWITCH_TEXT_SECTIONS)
2614      {
2615	gcc_assert (section_switch_note == NULL_RTX);
2616	gcc_assert (flag_reorder_blocks_and_partition);
2617	section_switch_note = iter;
2618	if (first_no_action_insn)
2619	  {
2620	    first_no_action_insn_before_switch = first_no_action_insn;
2621	    last_no_action_insn_before_switch = last_action_insn;
2622	    first_no_action_insn = NULL;
2623	    gcc_assert (last_action == -1);
2624	    last_action = -3;
2625	  }
2626	/* Force closing of current EH region before section switch and
2627	   opening a new one afterwards.  */
2628	else if (last_action != -3)
2629	  last_landing_pad = pc_rtx;
2630	if (crtl->eh.call_site_record_v[cur_sec])
2631	  call_site_base += crtl->eh.call_site_record_v[cur_sec]->length ();
2632	cur_sec++;
2633	gcc_assert (crtl->eh.call_site_record_v[cur_sec] == NULL);
2634	vec_alloc (crtl->eh.call_site_record_v[cur_sec], 10);
2635      }
2636
2637  if (last_action >= -1 && ! first_no_action_insn)
2638    {
2639      note = emit_note_eh_region_end (last_action_insn);
2640      NOTE_EH_HANDLER (note) = call_site;
2641    }
2642
2643  call_site_base = saved_call_site_base;
2644
2645  return 0;
2646}
2647
2648namespace {
2649
2650const pass_data pass_data_convert_to_eh_region_ranges =
2651{
2652  RTL_PASS, /* type */
2653  "eh_ranges", /* name */
2654  OPTGROUP_NONE, /* optinfo_flags */
2655  TV_NONE, /* tv_id */
2656  0, /* properties_required */
2657  0, /* properties_provided */
2658  0, /* properties_destroyed */
2659  0, /* todo_flags_start */
2660  0, /* todo_flags_finish */
2661};
2662
2663class pass_convert_to_eh_region_ranges : public rtl_opt_pass
2664{
2665public:
2666  pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2667    : rtl_opt_pass (pass_data_convert_to_eh_region_ranges, ctxt)
2668  {}
2669
2670  /* opt_pass methods: */
2671  virtual bool gate (function *);
2672  virtual unsigned int execute (function *)
2673    {
2674      return convert_to_eh_region_ranges ();
2675    }
2676
2677}; // class pass_convert_to_eh_region_ranges
2678
2679bool
2680pass_convert_to_eh_region_ranges::gate (function *)
2681{
2682  /* Nothing to do for SJLJ exceptions or if no regions created.  */
2683  if (cfun->eh->region_tree == NULL)
2684    return false;
2685  if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
2686    return false;
2687  return true;
2688}
2689
2690} // anon namespace
2691
2692rtl_opt_pass *
2693make_pass_convert_to_eh_region_ranges (gcc::context *ctxt)
2694{
2695  return new pass_convert_to_eh_region_ranges (ctxt);
2696}
2697
2698static void
2699push_uleb128 (vec<uchar, va_gc> **data_area, unsigned int value)
2700{
2701  do
2702    {
2703      unsigned char byte = value & 0x7f;
2704      value >>= 7;
2705      if (value)
2706	byte |= 0x80;
2707      vec_safe_push (*data_area, byte);
2708    }
2709  while (value);
2710}
2711
2712static void
2713push_sleb128 (vec<uchar, va_gc> **data_area, int value)
2714{
2715  unsigned char byte;
2716  int more;
2717
2718  do
2719    {
2720      byte = value & 0x7f;
2721      value >>= 7;
2722      more = ! ((value == 0 && (byte & 0x40) == 0)
2723		|| (value == -1 && (byte & 0x40) != 0));
2724      if (more)
2725	byte |= 0x80;
2726      vec_safe_push (*data_area, byte);
2727    }
2728  while (more);
2729}
2730
2731
2732#ifndef HAVE_AS_LEB128
2733static int
2734dw2_size_of_call_site_table (int section)
2735{
2736  int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2737  int size = n * (4 + 4 + 4);
2738  int i;
2739
2740  for (i = 0; i < n; ++i)
2741    {
2742      struct call_site_record_d *cs =
2743	(*crtl->eh.call_site_record_v[section])[i];
2744      size += size_of_uleb128 (cs->action);
2745    }
2746
2747  return size;
2748}
2749
2750static int
2751sjlj_size_of_call_site_table (void)
2752{
2753  int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2754  int size = 0;
2755  int i;
2756
2757  for (i = 0; i < n; ++i)
2758    {
2759      struct call_site_record_d *cs =
2760	(*crtl->eh.call_site_record_v[0])[i];
2761      size += size_of_uleb128 (INTVAL (cs->landing_pad));
2762      size += size_of_uleb128 (cs->action);
2763    }
2764
2765  return size;
2766}
2767#endif
2768
2769static void
2770dw2_output_call_site_table (int cs_format, int section)
2771{
2772  int n = vec_safe_length (crtl->eh.call_site_record_v[section]);
2773  int i;
2774  const char *begin;
2775
2776  if (section == 0)
2777    begin = current_function_func_begin_label;
2778  else if (first_function_block_is_cold)
2779    begin = crtl->subsections.hot_section_label;
2780  else
2781    begin = crtl->subsections.cold_section_label;
2782
2783  for (i = 0; i < n; ++i)
2784    {
2785      struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[section])[i];
2786      char reg_start_lab[32];
2787      char reg_end_lab[32];
2788      char landing_pad_lab[32];
2789
2790      ASM_GENERATE_INTERNAL_LABEL (reg_start_lab, "LEHB", call_site_base + i);
2791      ASM_GENERATE_INTERNAL_LABEL (reg_end_lab, "LEHE", call_site_base + i);
2792
2793      if (cs->landing_pad)
2794	ASM_GENERATE_INTERNAL_LABEL (landing_pad_lab, "L",
2795				     CODE_LABEL_NUMBER (cs->landing_pad));
2796
2797      /* ??? Perhaps use insn length scaling if the assembler supports
2798	 generic arithmetic.  */
2799      /* ??? Perhaps use attr_length to choose data1 or data2 instead of
2800	 data4 if the function is small enough.  */
2801      if (cs_format == DW_EH_PE_uleb128)
2802	{
2803	  dw2_asm_output_delta_uleb128 (reg_start_lab, begin,
2804					"region %d start", i);
2805	  dw2_asm_output_delta_uleb128 (reg_end_lab, reg_start_lab,
2806					"length");
2807	  if (cs->landing_pad)
2808	    dw2_asm_output_delta_uleb128 (landing_pad_lab, begin,
2809					  "landing pad");
2810	  else
2811	    dw2_asm_output_data_uleb128 (0, "landing pad");
2812	}
2813      else
2814	{
2815	  dw2_asm_output_delta (4, reg_start_lab, begin,
2816				"region %d start", i);
2817	  dw2_asm_output_delta (4, reg_end_lab, reg_start_lab, "length");
2818	  if (cs->landing_pad)
2819	    dw2_asm_output_delta (4, landing_pad_lab, begin,
2820				  "landing pad");
2821	  else
2822	    dw2_asm_output_data (4, 0, "landing pad");
2823	}
2824      dw2_asm_output_data_uleb128 (cs->action, "action");
2825    }
2826
2827  call_site_base += n;
2828}
2829
2830static void
2831sjlj_output_call_site_table (void)
2832{
2833  int n = vec_safe_length (crtl->eh.call_site_record_v[0]);
2834  int i;
2835
2836  for (i = 0; i < n; ++i)
2837    {
2838      struct call_site_record_d *cs = (*crtl->eh.call_site_record_v[0])[i];
2839
2840      dw2_asm_output_data_uleb128 (INTVAL (cs->landing_pad),
2841				   "region %d landing pad", i);
2842      dw2_asm_output_data_uleb128 (cs->action, "action");
2843    }
2844
2845  call_site_base += n;
2846}
2847
2848/* Switch to the section that should be used for exception tables.  */
2849
2850static void
2851switch_to_exception_section (const char * ARG_UNUSED (fnname))
2852{
2853  section *s;
2854
2855  if (exception_section)
2856    s = exception_section;
2857  else
2858    {
2859      /* Compute the section and cache it into exception_section,
2860	 unless it depends on the function name.  */
2861      if (targetm_common.have_named_sections)
2862	{
2863	  int flags;
2864
2865	  if (EH_TABLES_CAN_BE_READ_ONLY)
2866	    {
2867	      int tt_format =
2868		ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2869	      flags = ((! flag_pic
2870			|| ((tt_format & 0x70) != DW_EH_PE_absptr
2871			    && (tt_format & 0x70) != DW_EH_PE_aligned))
2872		       ? 0 : SECTION_WRITE);
2873	    }
2874	  else
2875	    flags = SECTION_WRITE;
2876
2877#ifdef HAVE_LD_EH_GC_SECTIONS
2878	  if (flag_function_sections
2879	      || (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP))
2880	    {
2881	      char *section_name = XNEWVEC (char, strlen (fnname) + 32);
2882	      /* The EH table must match the code section, so only mark
2883		 it linkonce if we have COMDAT groups to tie them together.  */
2884	      if (DECL_COMDAT_GROUP (current_function_decl) && HAVE_COMDAT_GROUP)
2885		flags |= SECTION_LINKONCE;
2886	      sprintf (section_name, ".gcc_except_table.%s", fnname);
2887	      s = get_section (section_name, flags, current_function_decl);
2888	      free (section_name);
2889	    }
2890	  else
2891#endif
2892	    exception_section
2893	      = s = get_section (".gcc_except_table", flags, NULL);
2894	}
2895      else
2896	exception_section
2897	  = s = flag_pic ? data_section : readonly_data_section;
2898    }
2899
2900  switch_to_section (s);
2901}
2902
2903
2904/* Output a reference from an exception table to the type_info object TYPE.
2905   TT_FORMAT and TT_FORMAT_SIZE describe the DWARF encoding method used for
2906   the value.  */
2907
2908static void
2909output_ttype (tree type, int tt_format, int tt_format_size)
2910{
2911  rtx value;
2912  bool is_public = true;
2913
2914  if (type == NULL_TREE)
2915    value = const0_rtx;
2916  else
2917    {
2918      /* FIXME lto.  pass_ipa_free_lang_data changes all types to
2919	 runtime types so TYPE should already be a runtime type
2920	 reference.  When pass_ipa_free_lang data is made a default
2921	 pass, we can then remove the call to lookup_type_for_runtime
2922	 below.  */
2923      if (TYPE_P (type))
2924	type = lookup_type_for_runtime (type);
2925
2926      value = expand_expr (type, NULL_RTX, VOIDmode, EXPAND_INITIALIZER);
2927
2928      /* Let cgraph know that the rtti decl is used.  Not all of the
2929	 paths below go through assemble_integer, which would take
2930	 care of this for us.  */
2931      STRIP_NOPS (type);
2932      if (TREE_CODE (type) == ADDR_EXPR)
2933	{
2934	  type = TREE_OPERAND (type, 0);
2935	  if (TREE_CODE (type) == VAR_DECL)
2936	    is_public = TREE_PUBLIC (type);
2937	}
2938      else
2939	gcc_assert (TREE_CODE (type) == INTEGER_CST);
2940    }
2941
2942  /* Allow the target to override the type table entry format.  */
2943  if (targetm.asm_out.ttype (value))
2944    return;
2945
2946  if (tt_format == DW_EH_PE_absptr || tt_format == DW_EH_PE_aligned)
2947    assemble_integer (value, tt_format_size,
2948		      tt_format_size * BITS_PER_UNIT, 1);
2949  else
2950    dw2_asm_output_encoded_addr_rtx (tt_format, value, is_public, NULL);
2951}
2952
2953static void
2954output_one_function_exception_table (int section)
2955{
2956  int tt_format, cs_format, lp_format, i;
2957#ifdef HAVE_AS_LEB128
2958  char ttype_label[32];
2959  char cs_after_size_label[32];
2960  char cs_end_label[32];
2961#else
2962  int call_site_len;
2963#endif
2964  int have_tt_data;
2965  int tt_format_size = 0;
2966
2967  have_tt_data = (vec_safe_length (cfun->eh->ttype_data)
2968		  || (targetm.arm_eabi_unwinder
2969		      ? vec_safe_length (cfun->eh->ehspec_data.arm_eabi)
2970		      : vec_safe_length (cfun->eh->ehspec_data.other)));
2971
2972  /* Indicate the format of the @TType entries.  */
2973  if (! have_tt_data)
2974    tt_format = DW_EH_PE_omit;
2975  else
2976    {
2977      tt_format = ASM_PREFERRED_EH_DATA_FORMAT (/*code=*/0, /*global=*/1);
2978#ifdef HAVE_AS_LEB128
2979      ASM_GENERATE_INTERNAL_LABEL (ttype_label,
2980				   section ? "LLSDATTC" : "LLSDATT",
2981				   current_function_funcdef_no);
2982#endif
2983      tt_format_size = size_of_encoded_value (tt_format);
2984
2985      assemble_align (tt_format_size * BITS_PER_UNIT);
2986    }
2987
2988  targetm.asm_out.internal_label (asm_out_file, section ? "LLSDAC" : "LLSDA",
2989				  current_function_funcdef_no);
2990
2991  /* The LSDA header.  */
2992
2993  /* Indicate the format of the landing pad start pointer.  An omitted
2994     field implies @LPStart == @Start.  */
2995  /* Currently we always put @LPStart == @Start.  This field would
2996     be most useful in moving the landing pads completely out of
2997     line to another section, but it could also be used to minimize
2998     the size of uleb128 landing pad offsets.  */
2999  lp_format = DW_EH_PE_omit;
3000  dw2_asm_output_data (1, lp_format, "@LPStart format (%s)",
3001		       eh_data_format_name (lp_format));
3002
3003  /* @LPStart pointer would go here.  */
3004
3005  dw2_asm_output_data (1, tt_format, "@TType format (%s)",
3006		       eh_data_format_name (tt_format));
3007
3008#ifndef HAVE_AS_LEB128
3009  if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3010    call_site_len = sjlj_size_of_call_site_table ();
3011  else
3012    call_site_len = dw2_size_of_call_site_table (section);
3013#endif
3014
3015  /* A pc-relative 4-byte displacement to the @TType data.  */
3016  if (have_tt_data)
3017    {
3018#ifdef HAVE_AS_LEB128
3019      char ttype_after_disp_label[32];
3020      ASM_GENERATE_INTERNAL_LABEL (ttype_after_disp_label,
3021				   section ? "LLSDATTDC" : "LLSDATTD",
3022				   current_function_funcdef_no);
3023      dw2_asm_output_delta_uleb128 (ttype_label, ttype_after_disp_label,
3024				    "@TType base offset");
3025      ASM_OUTPUT_LABEL (asm_out_file, ttype_after_disp_label);
3026#else
3027      /* Ug.  Alignment queers things.  */
3028      unsigned int before_disp, after_disp, last_disp, disp;
3029
3030      before_disp = 1 + 1;
3031      after_disp = (1 + size_of_uleb128 (call_site_len)
3032		    + call_site_len
3033		    + vec_safe_length (crtl->eh.action_record_data)
3034		    + (vec_safe_length (cfun->eh->ttype_data)
3035		       * tt_format_size));
3036
3037      disp = after_disp;
3038      do
3039	{
3040	  unsigned int disp_size, pad;
3041
3042	  last_disp = disp;
3043	  disp_size = size_of_uleb128 (disp);
3044	  pad = before_disp + disp_size + after_disp;
3045	  if (pad % tt_format_size)
3046	    pad = tt_format_size - (pad % tt_format_size);
3047	  else
3048	    pad = 0;
3049	  disp = after_disp + pad;
3050	}
3051      while (disp != last_disp);
3052
3053      dw2_asm_output_data_uleb128 (disp, "@TType base offset");
3054#endif
3055    }
3056
3057  /* Indicate the format of the call-site offsets.  */
3058#ifdef HAVE_AS_LEB128
3059  cs_format = DW_EH_PE_uleb128;
3060#else
3061  cs_format = DW_EH_PE_udata4;
3062#endif
3063  dw2_asm_output_data (1, cs_format, "call-site format (%s)",
3064		       eh_data_format_name (cs_format));
3065
3066#ifdef HAVE_AS_LEB128
3067  ASM_GENERATE_INTERNAL_LABEL (cs_after_size_label,
3068			       section ? "LLSDACSBC" : "LLSDACSB",
3069			       current_function_funcdef_no);
3070  ASM_GENERATE_INTERNAL_LABEL (cs_end_label,
3071			       section ? "LLSDACSEC" : "LLSDACSE",
3072			       current_function_funcdef_no);
3073  dw2_asm_output_delta_uleb128 (cs_end_label, cs_after_size_label,
3074				"Call-site table length");
3075  ASM_OUTPUT_LABEL (asm_out_file, cs_after_size_label);
3076  if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3077    sjlj_output_call_site_table ();
3078  else
3079    dw2_output_call_site_table (cs_format, section);
3080  ASM_OUTPUT_LABEL (asm_out_file, cs_end_label);
3081#else
3082  dw2_asm_output_data_uleb128 (call_site_len, "Call-site table length");
3083  if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
3084    sjlj_output_call_site_table ();
3085  else
3086    dw2_output_call_site_table (cs_format, section);
3087#endif
3088
3089  /* ??? Decode and interpret the data for flag_debug_asm.  */
3090  {
3091    uchar uc;
3092    FOR_EACH_VEC_ELT (*crtl->eh.action_record_data, i, uc)
3093      dw2_asm_output_data (1, uc, i ? NULL : "Action record table");
3094  }
3095
3096  if (have_tt_data)
3097    assemble_align (tt_format_size * BITS_PER_UNIT);
3098
3099  i = vec_safe_length (cfun->eh->ttype_data);
3100  while (i-- > 0)
3101    {
3102      tree type = (*cfun->eh->ttype_data)[i];
3103      output_ttype (type, tt_format, tt_format_size);
3104    }
3105
3106#ifdef HAVE_AS_LEB128
3107  if (have_tt_data)
3108      ASM_OUTPUT_LABEL (asm_out_file, ttype_label);
3109#endif
3110
3111  /* ??? Decode and interpret the data for flag_debug_asm.  */
3112  if (targetm.arm_eabi_unwinder)
3113    {
3114      tree type;
3115      for (i = 0;
3116	   vec_safe_iterate (cfun->eh->ehspec_data.arm_eabi, i, &type); ++i)
3117	output_ttype (type, tt_format, tt_format_size);
3118    }
3119  else
3120    {
3121      uchar uc;
3122      for (i = 0;
3123	   vec_safe_iterate (cfun->eh->ehspec_data.other, i, &uc); ++i)
3124	dw2_asm_output_data (1, uc,
3125			     i ? NULL : "Exception specification table");
3126    }
3127}
3128
3129void
3130output_function_exception_table (const char *fnname)
3131{
3132  rtx personality = get_personality_function (current_function_decl);
3133
3134  /* Not all functions need anything.  */
3135  if (! crtl->uses_eh_lsda)
3136    return;
3137
3138  if (personality)
3139    {
3140      assemble_external_libcall (personality);
3141
3142      if (targetm.asm_out.emit_except_personality)
3143	targetm.asm_out.emit_except_personality (personality);
3144    }
3145
3146  switch_to_exception_section (fnname);
3147
3148  /* If the target wants a label to begin the table, emit it here.  */
3149  targetm.asm_out.emit_except_table_label (asm_out_file);
3150
3151  output_one_function_exception_table (0);
3152  if (crtl->eh.call_site_record_v[1])
3153    output_one_function_exception_table (1);
3154
3155  switch_to_section (current_function_section ());
3156}
3157
3158void
3159set_eh_throw_stmt_table (function *fun, hash_map<gimple, int> *table)
3160{
3161  fun->eh->throw_stmt_table = table;
3162}
3163
3164hash_map<gimple, int> *
3165get_eh_throw_stmt_table (struct function *fun)
3166{
3167  return fun->eh->throw_stmt_table;
3168}
3169
3170/* Determine if the function needs an EH personality function.  */
3171
3172enum eh_personality_kind
3173function_needs_eh_personality (struct function *fn)
3174{
3175  enum eh_personality_kind kind = eh_personality_none;
3176  eh_region i;
3177
3178  FOR_ALL_EH_REGION_FN (i, fn)
3179    {
3180      switch (i->type)
3181	{
3182	case ERT_CLEANUP:
3183	  /* Can do with any personality including the generic C one.  */
3184	  kind = eh_personality_any;
3185	  break;
3186
3187	case ERT_TRY:
3188	case ERT_ALLOWED_EXCEPTIONS:
3189	  /* Always needs a EH personality function.  The generic C
3190	     personality doesn't handle these even for empty type lists.  */
3191	  return eh_personality_lang;
3192
3193	case ERT_MUST_NOT_THROW:
3194	  /* Always needs a EH personality function.  The language may specify
3195	     what abort routine that must be used, e.g. std::terminate.  */
3196	  return eh_personality_lang;
3197	}
3198    }
3199
3200  return kind;
3201}
3202
3203/* Dump EH information to OUT.  */
3204
3205void
3206dump_eh_tree (FILE * out, struct function *fun)
3207{
3208  eh_region i;
3209  int depth = 0;
3210  static const char *const type_name[] = {
3211    "cleanup", "try", "allowed_exceptions", "must_not_throw"
3212  };
3213
3214  i = fun->eh->region_tree;
3215  if (!i)
3216    return;
3217
3218  fprintf (out, "Eh tree:\n");
3219  while (1)
3220    {
3221      fprintf (out, "  %*s %i %s", depth * 2, "",
3222	       i->index, type_name[(int) i->type]);
3223
3224      if (i->landing_pads)
3225	{
3226	  eh_landing_pad lp;
3227
3228	  fprintf (out, " land:");
3229	  if (current_ir_type () == IR_GIMPLE)
3230	    {
3231	      for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3232		{
3233		  fprintf (out, "{%i,", lp->index);
3234		  print_generic_expr (out, lp->post_landing_pad, 0);
3235		  fputc ('}', out);
3236		  if (lp->next_lp)
3237		    fputc (',', out);
3238		}
3239	    }
3240	  else
3241	    {
3242	      for (lp = i->landing_pads; lp ; lp = lp->next_lp)
3243		{
3244		  fprintf (out, "{%i,", lp->index);
3245		  if (lp->landing_pad)
3246		    fprintf (out, "%i%s,", INSN_UID (lp->landing_pad),
3247			     NOTE_P (lp->landing_pad) ? "(del)" : "");
3248		  else
3249		    fprintf (out, "(nil),");
3250		  if (lp->post_landing_pad)
3251		    {
3252		      rtx lab = label_rtx (lp->post_landing_pad);
3253		      fprintf (out, "%i%s}", INSN_UID (lab),
3254			       NOTE_P (lab) ? "(del)" : "");
3255		    }
3256		  else
3257		    fprintf (out, "(nil)}");
3258		  if (lp->next_lp)
3259		    fputc (',', out);
3260		}
3261	    }
3262	}
3263
3264      switch (i->type)
3265	{
3266	case ERT_CLEANUP:
3267	case ERT_MUST_NOT_THROW:
3268	  break;
3269
3270	case ERT_TRY:
3271	  {
3272	    eh_catch c;
3273	    fprintf (out, " catch:");
3274	    for (c = i->u.eh_try.first_catch; c; c = c->next_catch)
3275	      {
3276		fputc ('{', out);
3277		if (c->label)
3278		  {
3279		    fprintf (out, "lab:");
3280		    print_generic_expr (out, c->label, 0);
3281		    fputc (';', out);
3282		  }
3283		print_generic_expr (out, c->type_list, 0);
3284		fputc ('}', out);
3285		if (c->next_catch)
3286		  fputc (',', out);
3287	      }
3288	  }
3289	  break;
3290
3291	case ERT_ALLOWED_EXCEPTIONS:
3292	  fprintf (out, " filter :%i types:", i->u.allowed.filter);
3293	  print_generic_expr (out, i->u.allowed.type_list, 0);
3294	  break;
3295	}
3296      fputc ('\n', out);
3297
3298      /* If there are sub-regions, process them.  */
3299      if (i->inner)
3300	i = i->inner, depth++;
3301      /* If there are peers, process them.  */
3302      else if (i->next_peer)
3303	i = i->next_peer;
3304      /* Otherwise, step back up the tree to the next peer.  */
3305      else
3306	{
3307	  do
3308	    {
3309	      i = i->outer;
3310	      depth--;
3311	      if (i == NULL)
3312		return;
3313	    }
3314	  while (i->next_peer == NULL);
3315	  i = i->next_peer;
3316	}
3317    }
3318}
3319
3320/* Dump the EH tree for FN on stderr.  */
3321
3322DEBUG_FUNCTION void
3323debug_eh_tree (struct function *fn)
3324{
3325  dump_eh_tree (stderr, fn);
3326}
3327
3328/* Verify invariants on EH datastructures.  */
3329
3330DEBUG_FUNCTION void
3331verify_eh_tree (struct function *fun)
3332{
3333  eh_region r, outer;
3334  int nvisited_lp, nvisited_r;
3335  int count_lp, count_r, depth, i;
3336  eh_landing_pad lp;
3337  bool err = false;
3338
3339  if (!fun->eh->region_tree)
3340    return;
3341
3342  count_r = 0;
3343  for (i = 1; vec_safe_iterate (fun->eh->region_array, i, &r); ++i)
3344    if (r)
3345      {
3346	if (r->index == i)
3347	  count_r++;
3348	else
3349	  {
3350	    error ("region_array is corrupted for region %i", r->index);
3351	    err = true;
3352	  }
3353      }
3354
3355  count_lp = 0;
3356  for (i = 1; vec_safe_iterate (fun->eh->lp_array, i, &lp); ++i)
3357    if (lp)
3358      {
3359	if (lp->index == i)
3360	  count_lp++;
3361	else
3362	  {
3363	    error ("lp_array is corrupted for lp %i", lp->index);
3364	    err = true;
3365	  }
3366      }
3367
3368  depth = nvisited_lp = nvisited_r = 0;
3369  outer = NULL;
3370  r = fun->eh->region_tree;
3371  while (1)
3372    {
3373      if ((*fun->eh->region_array)[r->index] != r)
3374	{
3375	  error ("region_array is corrupted for region %i", r->index);
3376	  err = true;
3377	}
3378      if (r->outer != outer)
3379	{
3380	  error ("outer block of region %i is wrong", r->index);
3381	  err = true;
3382	}
3383      if (depth < 0)
3384	{
3385	  error ("negative nesting depth of region %i", r->index);
3386	  err = true;
3387	}
3388      nvisited_r++;
3389
3390      for (lp = r->landing_pads; lp ; lp = lp->next_lp)
3391	{
3392	  if ((*fun->eh->lp_array)[lp->index] != lp)
3393	    {
3394	      error ("lp_array is corrupted for lp %i", lp->index);
3395	      err = true;
3396	    }
3397	  if (lp->region != r)
3398	    {
3399	      error ("region of lp %i is wrong", lp->index);
3400	      err = true;
3401	    }
3402	  nvisited_lp++;
3403	}
3404
3405      if (r->inner)
3406	outer = r, r = r->inner, depth++;
3407      else if (r->next_peer)
3408	r = r->next_peer;
3409      else
3410	{
3411	  do
3412	    {
3413	      r = r->outer;
3414	      if (r == NULL)
3415		goto region_done;
3416	      depth--;
3417	      outer = r->outer;
3418	    }
3419	  while (r->next_peer == NULL);
3420	  r = r->next_peer;
3421	}
3422    }
3423 region_done:
3424  if (depth != 0)
3425    {
3426      error ("tree list ends on depth %i", depth);
3427      err = true;
3428    }
3429  if (count_r != nvisited_r)
3430    {
3431      error ("region_array does not match region_tree");
3432      err = true;
3433    }
3434  if (count_lp != nvisited_lp)
3435    {
3436      error ("lp_array does not match region_tree");
3437      err = true;
3438    }
3439
3440  if (err)
3441    {
3442      dump_eh_tree (stderr, fun);
3443      internal_error ("verify_eh_tree failed");
3444    }
3445}
3446
3447#include "gt-except.h"
3448