1/* Definitions for computing resource usage of specific insns.
2   Copyright (C) 1999-2015 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8Software Foundation; either version 3, or (at your option) any later
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING3.  If not see
18<http://www.gnu.org/licenses/>.  */
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24#include "diagnostic-core.h"
25#include "rtl.h"
26#include "tm_p.h"
27#include "hard-reg-set.h"
28#include "hashtab.h"
29#include "hash-set.h"
30#include "vec.h"
31#include "machmode.h"
32#include "input.h"
33#include "function.h"
34#include "regs.h"
35#include "flags.h"
36#include "output.h"
37#include "dominance.h"
38#include "cfg.h"
39#include "predict.h"
40#include "basic-block.h"
41#include "resource.h"
42#include "except.h"
43#include "insn-attr.h"
44#include "params.h"
45#include "df.h"
46
47/* This structure is used to record liveness information at the targets or
48   fallthrough insns of branches.  We will most likely need the information
49   at targets again, so save them in a hash table rather than recomputing them
50   each time.  */
51
52struct target_info
53{
54  int uid;			/* INSN_UID of target.  */
55  struct target_info *next;	/* Next info for same hash bucket.  */
56  HARD_REG_SET live_regs;	/* Registers live at target.  */
57  int block;			/* Basic block number containing target.  */
58  int bb_tick;			/* Generation count of basic block info.  */
59};
60
61#define TARGET_HASH_PRIME 257
62
63/* Indicates what resources are required at the beginning of the epilogue.  */
64static struct resources start_of_epilogue_needs;
65
66/* Indicates what resources are required at function end.  */
67static struct resources end_of_function_needs;
68
69/* Define the hash table itself.  */
70static struct target_info **target_hash_table = NULL;
71
72/* For each basic block, we maintain a generation number of its basic
73   block info, which is updated each time we move an insn from the
74   target of a jump.  This is the generation number indexed by block
75   number.  */
76
77static int *bb_ticks;
78
79/* Marks registers possibly live at the current place being scanned by
80   mark_target_live_regs.  Also used by update_live_status.  */
81
82static HARD_REG_SET current_live_regs;
83
84/* Marks registers for which we have seen a REG_DEAD note but no assignment.
85   Also only used by the next two functions.  */
86
87static HARD_REG_SET pending_dead_regs;
88
89static void update_live_status (rtx, const_rtx, void *);
90static int find_basic_block (rtx_insn *, int);
91static rtx_insn *next_insn_no_annul (rtx_insn *);
92static rtx_insn *find_dead_or_set_registers (rtx_insn *, struct resources*,
93					     rtx *, int, struct resources,
94					     struct resources);
95
96/* Utility function called from mark_target_live_regs via note_stores.
97   It deadens any CLOBBERed registers and livens any SET registers.  */
98
99static void
100update_live_status (rtx dest, const_rtx x, void *data ATTRIBUTE_UNUSED)
101{
102  int first_regno, last_regno;
103  int i;
104
105  if (!REG_P (dest)
106      && (GET_CODE (dest) != SUBREG || !REG_P (SUBREG_REG (dest))))
107    return;
108
109  if (GET_CODE (dest) == SUBREG)
110    {
111      first_regno = subreg_regno (dest);
112      last_regno = first_regno + subreg_nregs (dest);
113
114    }
115  else
116    {
117      first_regno = REGNO (dest);
118      last_regno = END_HARD_REGNO (dest);
119    }
120
121  if (GET_CODE (x) == CLOBBER)
122    for (i = first_regno; i < last_regno; i++)
123      CLEAR_HARD_REG_BIT (current_live_regs, i);
124  else
125    for (i = first_regno; i < last_regno; i++)
126      {
127	SET_HARD_REG_BIT (current_live_regs, i);
128	CLEAR_HARD_REG_BIT (pending_dead_regs, i);
129      }
130}
131
132/* Find the number of the basic block with correct live register
133   information that starts closest to INSN.  Return -1 if we couldn't
134   find such a basic block or the beginning is more than
135   SEARCH_LIMIT instructions before INSN.  Use SEARCH_LIMIT = -1 for
136   an unlimited search.
137
138   The delay slot filling code destroys the control-flow graph so,
139   instead of finding the basic block containing INSN, we search
140   backwards toward a BARRIER where the live register information is
141   correct.  */
142
143static int
144find_basic_block (rtx_insn *insn, int search_limit)
145{
146  /* Scan backwards to the previous BARRIER.  Then see if we can find a
147     label that starts a basic block.  Return the basic block number.  */
148  for (insn = prev_nonnote_insn (insn);
149       insn && !BARRIER_P (insn) && search_limit != 0;
150       insn = prev_nonnote_insn (insn), --search_limit)
151    ;
152
153  /* The closest BARRIER is too far away.  */
154  if (search_limit == 0)
155    return -1;
156
157  /* The start of the function.  */
158  else if (insn == 0)
159    return ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index;
160
161  /* See if any of the upcoming CODE_LABELs start a basic block.  If we reach
162     anything other than a CODE_LABEL or note, we can't find this code.  */
163  for (insn = next_nonnote_insn (insn);
164       insn && LABEL_P (insn);
165       insn = next_nonnote_insn (insn))
166    if (BLOCK_FOR_INSN (insn))
167      return BLOCK_FOR_INSN (insn)->index;
168
169  return -1;
170}
171
172/* Similar to next_insn, but ignores insns in the delay slots of
173   an annulled branch.  */
174
175static rtx_insn *
176next_insn_no_annul (rtx_insn *insn)
177{
178  if (insn)
179    {
180      /* If INSN is an annulled branch, skip any insns from the target
181	 of the branch.  */
182      if (JUMP_P (insn)
183	  && INSN_ANNULLED_BRANCH_P (insn)
184	  && NEXT_INSN (PREV_INSN (insn)) != insn)
185	{
186	  rtx_insn *next = NEXT_INSN (insn);
187
188	  while ((NONJUMP_INSN_P (next) || JUMP_P (next) || CALL_P (next))
189		 && INSN_FROM_TARGET_P (next))
190	    {
191	      insn = next;
192	      next = NEXT_INSN (insn);
193	    }
194	}
195
196      insn = NEXT_INSN (insn);
197      if (insn && NONJUMP_INSN_P (insn)
198	  && GET_CODE (PATTERN (insn)) == SEQUENCE)
199	insn = as_a <rtx_sequence *> (PATTERN (insn))->insn (0);
200    }
201
202  return insn;
203}
204
205/* Given X, some rtl, and RES, a pointer to a `struct resource', mark
206   which resources are referenced by the insn.  If INCLUDE_DELAYED_EFFECTS
207   is TRUE, resources used by the called routine will be included for
208   CALL_INSNs.  */
209
210void
211mark_referenced_resources (rtx x, struct resources *res,
212			   bool include_delayed_effects)
213{
214  enum rtx_code code = GET_CODE (x);
215  int i, j;
216  unsigned int r;
217  const char *format_ptr;
218
219  /* Handle leaf items for which we set resource flags.  Also, special-case
220     CALL, SET and CLOBBER operators.  */
221  switch (code)
222    {
223    case CONST:
224    CASE_CONST_ANY:
225    case PC:
226    case SYMBOL_REF:
227    case LABEL_REF:
228      return;
229
230    case SUBREG:
231      if (!REG_P (SUBREG_REG (x)))
232	mark_referenced_resources (SUBREG_REG (x), res, false);
233      else
234	{
235	  unsigned int regno = subreg_regno (x);
236	  unsigned int last_regno = regno + subreg_nregs (x);
237
238	  gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
239	  for (r = regno; r < last_regno; r++)
240	    SET_HARD_REG_BIT (res->regs, r);
241	}
242      return;
243
244    case REG:
245      gcc_assert (HARD_REGISTER_P (x));
246      add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
247      return;
248
249    case MEM:
250      /* If this memory shouldn't change, it really isn't referencing
251	 memory.  */
252      if (! MEM_READONLY_P (x))
253	res->memory = 1;
254      res->volatil |= MEM_VOLATILE_P (x);
255
256      /* Mark registers used to access memory.  */
257      mark_referenced_resources (XEXP (x, 0), res, false);
258      return;
259
260    case CC0:
261      res->cc = 1;
262      return;
263
264    case UNSPEC_VOLATILE:
265    case TRAP_IF:
266    case ASM_INPUT:
267      /* Traditional asm's are always volatile.  */
268      res->volatil = 1;
269      break;
270
271    case ASM_OPERANDS:
272      res->volatil |= MEM_VOLATILE_P (x);
273
274      /* For all ASM_OPERANDS, we must traverse the vector of input operands.
275	 We can not just fall through here since then we would be confused
276	 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
277	 traditional asms unlike their normal usage.  */
278
279      for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
280	mark_referenced_resources (ASM_OPERANDS_INPUT (x, i), res, false);
281      return;
282
283    case CALL:
284      /* The first operand will be a (MEM (xxx)) but doesn't really reference
285	 memory.  The second operand may be referenced, though.  */
286      mark_referenced_resources (XEXP (XEXP (x, 0), 0), res, false);
287      mark_referenced_resources (XEXP (x, 1), res, false);
288      return;
289
290    case SET:
291      /* Usually, the first operand of SET is set, not referenced.  But
292	 registers used to access memory are referenced.  SET_DEST is
293	 also referenced if it is a ZERO_EXTRACT.  */
294
295      mark_referenced_resources (SET_SRC (x), res, false);
296
297      x = SET_DEST (x);
298      if (GET_CODE (x) == ZERO_EXTRACT
299	  || GET_CODE (x) == STRICT_LOW_PART)
300	mark_referenced_resources (x, res, false);
301      else if (GET_CODE (x) == SUBREG)
302	x = SUBREG_REG (x);
303      if (MEM_P (x))
304	mark_referenced_resources (XEXP (x, 0), res, false);
305      return;
306
307    case CLOBBER:
308      return;
309
310    case CALL_INSN:
311      if (include_delayed_effects)
312	{
313	  /* A CALL references memory, the frame pointer if it exists, the
314	     stack pointer, any global registers and any registers given in
315	     USE insns immediately in front of the CALL.
316
317	     However, we may have moved some of the parameter loading insns
318	     into the delay slot of this CALL.  If so, the USE's for them
319	     don't count and should be skipped.  */
320	  rtx_insn *insn = PREV_INSN (as_a <rtx_insn *> (x));
321	  rtx_sequence *sequence = 0;
322	  int seq_size = 0;
323	  int i;
324
325	  /* If we are part of a delay slot sequence, point at the SEQUENCE.  */
326	  if (NEXT_INSN (insn) != x)
327	    {
328	      sequence = as_a <rtx_sequence *> (PATTERN (NEXT_INSN (insn)));
329	      seq_size = sequence->len ();
330	      gcc_assert (GET_CODE (sequence) == SEQUENCE);
331	    }
332
333	  res->memory = 1;
334	  SET_HARD_REG_BIT (res->regs, STACK_POINTER_REGNUM);
335	  if (frame_pointer_needed)
336	    {
337	      SET_HARD_REG_BIT (res->regs, FRAME_POINTER_REGNUM);
338#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
339	      SET_HARD_REG_BIT (res->regs, HARD_FRAME_POINTER_REGNUM);
340#endif
341	    }
342
343	  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
344	    if (global_regs[i])
345	      SET_HARD_REG_BIT (res->regs, i);
346
347	  /* Check for a REG_SETJMP.  If it exists, then we must
348	     assume that this call can need any register.
349
350	     This is done to be more conservative about how we handle setjmp.
351	     We assume that they both use and set all registers.  Using all
352	     registers ensures that a register will not be considered dead
353	     just because it crosses a setjmp call.  A register should be
354	     considered dead only if the setjmp call returns nonzero.  */
355	  if (find_reg_note (x, REG_SETJMP, NULL))
356	    SET_HARD_REG_SET (res->regs);
357
358	  {
359	    rtx link;
360
361	    for (link = CALL_INSN_FUNCTION_USAGE (x);
362		 link;
363		 link = XEXP (link, 1))
364	      if (GET_CODE (XEXP (link, 0)) == USE)
365		{
366		  for (i = 1; i < seq_size; i++)
367		    {
368		      rtx slot_pat = PATTERN (sequence->element (i));
369		      if (GET_CODE (slot_pat) == SET
370			  && rtx_equal_p (SET_DEST (slot_pat),
371					  XEXP (XEXP (link, 0), 0)))
372			break;
373		    }
374		  if (i >= seq_size)
375		    mark_referenced_resources (XEXP (XEXP (link, 0), 0),
376					       res, false);
377		}
378	  }
379	}
380
381      /* ... fall through to other INSN processing ...  */
382
383    case INSN:
384    case JUMP_INSN:
385
386      if (GET_CODE (PATTERN (x)) == COND_EXEC)
387      /* In addition to the usual references, also consider all outputs
388	 as referenced, to compensate for mark_set_resources treating
389	 them as killed.  This is similar to ZERO_EXTRACT / STRICT_LOW_PART
390	 handling, execpt that we got a partial incidence instead of a partial
391	 width.  */
392      mark_set_resources (x, res, 0,
393			  include_delayed_effects
394			  ? MARK_SRC_DEST_CALL : MARK_SRC_DEST);
395
396#ifdef INSN_REFERENCES_ARE_DELAYED
397      if (! include_delayed_effects
398	  && INSN_REFERENCES_ARE_DELAYED (as_a <rtx_insn *> (x)))
399	return;
400#endif
401
402      /* No special processing, just speed up.  */
403      mark_referenced_resources (PATTERN (x), res, include_delayed_effects);
404      return;
405
406    default:
407      break;
408    }
409
410  /* Process each sub-expression and flag what it needs.  */
411  format_ptr = GET_RTX_FORMAT (code);
412  for (i = 0; i < GET_RTX_LENGTH (code); i++)
413    switch (*format_ptr++)
414      {
415      case 'e':
416	mark_referenced_resources (XEXP (x, i), res, include_delayed_effects);
417	break;
418
419      case 'E':
420	for (j = 0; j < XVECLEN (x, i); j++)
421	  mark_referenced_resources (XVECEXP (x, i, j), res,
422				     include_delayed_effects);
423	break;
424      }
425}
426
427/* A subroutine of mark_target_live_regs.  Search forward from TARGET
428   looking for registers that are set before they are used.  These are dead.
429   Stop after passing a few conditional jumps, and/or a small
430   number of unconditional branches.  */
431
432static rtx_insn *
433find_dead_or_set_registers (rtx_insn *target, struct resources *res,
434			    rtx *jump_target, int jump_count,
435			    struct resources set, struct resources needed)
436{
437  HARD_REG_SET scratch;
438  rtx_insn *insn;
439  rtx_insn *next_insn;
440  rtx_insn *jump_insn = 0;
441  int i;
442
443  for (insn = target; insn; insn = next_insn)
444    {
445      rtx_insn *this_jump_insn = insn;
446
447      next_insn = NEXT_INSN (insn);
448
449      /* If this instruction can throw an exception, then we don't
450	 know where we might end up next.  That means that we have to
451	 assume that whatever we have already marked as live really is
452	 live.  */
453      if (can_throw_internal (insn))
454	break;
455
456      switch (GET_CODE (insn))
457	{
458	case CODE_LABEL:
459	  /* After a label, any pending dead registers that weren't yet
460	     used can be made dead.  */
461	  AND_COMPL_HARD_REG_SET (pending_dead_regs, needed.regs);
462	  AND_COMPL_HARD_REG_SET (res->regs, pending_dead_regs);
463	  CLEAR_HARD_REG_SET (pending_dead_regs);
464
465	  continue;
466
467	case BARRIER:
468	case NOTE:
469	  continue;
470
471	case INSN:
472	  if (GET_CODE (PATTERN (insn)) == USE)
473	    {
474	      /* If INSN is a USE made by update_block, we care about the
475		 underlying insn.  Any registers set by the underlying insn
476		 are live since the insn is being done somewhere else.  */
477	      if (INSN_P (XEXP (PATTERN (insn), 0)))
478		mark_set_resources (XEXP (PATTERN (insn), 0), res, 0,
479				    MARK_SRC_DEST_CALL);
480
481	      /* All other USE insns are to be ignored.  */
482	      continue;
483	    }
484	  else if (GET_CODE (PATTERN (insn)) == CLOBBER)
485	    continue;
486	  else if (rtx_sequence *seq =
487		     dyn_cast <rtx_sequence *> (PATTERN (insn)))
488	    {
489	      /* An unconditional jump can be used to fill the delay slot
490		 of a call, so search for a JUMP_INSN in any position.  */
491	      for (i = 0; i < seq->len (); i++)
492		{
493		  this_jump_insn = seq->insn (i);
494		  if (JUMP_P (this_jump_insn))
495		    break;
496		}
497	    }
498
499	default:
500	  break;
501	}
502
503      if (JUMP_P (this_jump_insn))
504	{
505	  if (jump_count++ < 10)
506	    {
507	      if (any_uncondjump_p (this_jump_insn)
508		  || ANY_RETURN_P (PATTERN (this_jump_insn)))
509		{
510		  rtx lab_or_return = JUMP_LABEL (this_jump_insn);
511		  if (ANY_RETURN_P (lab_or_return))
512		    next_insn = NULL;
513		  else
514		    next_insn = as_a <rtx_insn *> (lab_or_return);
515		  if (jump_insn == 0)
516		    {
517		      jump_insn = insn;
518		      if (jump_target)
519			*jump_target = JUMP_LABEL (this_jump_insn);
520		    }
521		}
522	      else if (any_condjump_p (this_jump_insn))
523		{
524		  struct resources target_set, target_res;
525		  struct resources fallthrough_res;
526
527		  /* We can handle conditional branches here by following
528		     both paths, and then IOR the results of the two paths
529		     together, which will give us registers that are dead
530		     on both paths.  Since this is expensive, we give it
531		     a much higher cost than unconditional branches.  The
532		     cost was chosen so that we will follow at most 1
533		     conditional branch.  */
534
535		  jump_count += 4;
536		  if (jump_count >= 10)
537		    break;
538
539		  mark_referenced_resources (insn, &needed, true);
540
541		  /* For an annulled branch, mark_set_resources ignores slots
542		     filled by instructions from the target.  This is correct
543		     if the branch is not taken.  Since we are following both
544		     paths from the branch, we must also compute correct info
545		     if the branch is taken.  We do this by inverting all of
546		     the INSN_FROM_TARGET_P bits, calling mark_set_resources,
547		     and then inverting the INSN_FROM_TARGET_P bits again.  */
548
549		  if (GET_CODE (PATTERN (insn)) == SEQUENCE
550		      && INSN_ANNULLED_BRANCH_P (this_jump_insn))
551		    {
552		      rtx_sequence *seq = as_a <rtx_sequence *> (PATTERN (insn));
553		      for (i = 1; i < seq->len (); i++)
554			INSN_FROM_TARGET_P (seq->element (i))
555			  = ! INSN_FROM_TARGET_P (seq->element (i));
556
557		      target_set = set;
558		      mark_set_resources (insn, &target_set, 0,
559					  MARK_SRC_DEST_CALL);
560
561		      for (i = 1; i < seq->len (); i++)
562			INSN_FROM_TARGET_P (seq->element (i))
563			  = ! INSN_FROM_TARGET_P (seq->element (i));
564
565		      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
566		    }
567		  else
568		    {
569		      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
570		      target_set = set;
571		    }
572
573		  target_res = *res;
574		  COPY_HARD_REG_SET (scratch, target_set.regs);
575		  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
576		  AND_COMPL_HARD_REG_SET (target_res.regs, scratch);
577
578		  fallthrough_res = *res;
579		  COPY_HARD_REG_SET (scratch, set.regs);
580		  AND_COMPL_HARD_REG_SET (scratch, needed.regs);
581		  AND_COMPL_HARD_REG_SET (fallthrough_res.regs, scratch);
582
583		  if (!ANY_RETURN_P (JUMP_LABEL (this_jump_insn)))
584		    find_dead_or_set_registers (JUMP_LABEL_AS_INSN (this_jump_insn),
585						&target_res, 0, jump_count,
586						target_set, needed);
587		  find_dead_or_set_registers (next_insn,
588					      &fallthrough_res, 0, jump_count,
589					      set, needed);
590		  IOR_HARD_REG_SET (fallthrough_res.regs, target_res.regs);
591		  AND_HARD_REG_SET (res->regs, fallthrough_res.regs);
592		  break;
593		}
594	      else
595		break;
596	    }
597	  else
598	    {
599	      /* Don't try this optimization if we expired our jump count
600		 above, since that would mean there may be an infinite loop
601		 in the function being compiled.  */
602	      jump_insn = 0;
603	      break;
604	    }
605	}
606
607      mark_referenced_resources (insn, &needed, true);
608      mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
609
610      COPY_HARD_REG_SET (scratch, set.regs);
611      AND_COMPL_HARD_REG_SET (scratch, needed.regs);
612      AND_COMPL_HARD_REG_SET (res->regs, scratch);
613    }
614
615  return jump_insn;
616}
617
618/* Given X, a part of an insn, and a pointer to a `struct resource',
619   RES, indicate which resources are modified by the insn. If
620   MARK_TYPE is MARK_SRC_DEST_CALL, also mark resources potentially
621   set by the called routine.
622
623   If IN_DEST is nonzero, it means we are inside a SET.  Otherwise,
624   objects are being referenced instead of set.
625
626   We never mark the insn as modifying the condition code unless it explicitly
627   SETs CC0 even though this is not totally correct.  The reason for this is
628   that we require a SET of CC0 to immediately precede the reference to CC0.
629   So if some other insn sets CC0 as a side-effect, we know it cannot affect
630   our computation and thus may be placed in a delay slot.  */
631
632void
633mark_set_resources (rtx x, struct resources *res, int in_dest,
634		    enum mark_resource_type mark_type)
635{
636  enum rtx_code code;
637  int i, j;
638  unsigned int r;
639  const char *format_ptr;
640
641 restart:
642
643  code = GET_CODE (x);
644
645  switch (code)
646    {
647    case NOTE:
648    case BARRIER:
649    case CODE_LABEL:
650    case USE:
651    CASE_CONST_ANY:
652    case LABEL_REF:
653    case SYMBOL_REF:
654    case CONST:
655    case PC:
656      /* These don't set any resources.  */
657      return;
658
659    case CC0:
660      if (in_dest)
661	res->cc = 1;
662      return;
663
664    case CALL_INSN:
665      /* Called routine modifies the condition code, memory, any registers
666	 that aren't saved across calls, global registers and anything
667	 explicitly CLOBBERed immediately after the CALL_INSN.  */
668
669      if (mark_type == MARK_SRC_DEST_CALL)
670	{
671	  rtx_call_insn *call_insn = as_a <rtx_call_insn *> (x);
672	  rtx link;
673	  HARD_REG_SET regs;
674
675	  res->cc = res->memory = 1;
676
677	  get_call_reg_set_usage (call_insn, &regs, regs_invalidated_by_call);
678	  IOR_HARD_REG_SET (res->regs, regs);
679
680	  for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
681	       link; link = XEXP (link, 1))
682	    if (GET_CODE (XEXP (link, 0)) == CLOBBER)
683	      mark_set_resources (SET_DEST (XEXP (link, 0)), res, 1,
684				  MARK_SRC_DEST);
685
686	  /* Check for a REG_SETJMP.  If it exists, then we must
687	     assume that this call can clobber any register.  */
688	  if (find_reg_note (call_insn, REG_SETJMP, NULL))
689	    SET_HARD_REG_SET (res->regs);
690	}
691
692      /* ... and also what its RTL says it modifies, if anything.  */
693
694    case JUMP_INSN:
695    case INSN:
696
697	/* An insn consisting of just a CLOBBER (or USE) is just for flow
698	   and doesn't actually do anything, so we ignore it.  */
699
700#ifdef INSN_SETS_ARE_DELAYED
701      if (mark_type != MARK_SRC_DEST_CALL
702	  && INSN_SETS_ARE_DELAYED (as_a <rtx_insn *> (x)))
703	return;
704#endif
705
706      x = PATTERN (x);
707      if (GET_CODE (x) != USE && GET_CODE (x) != CLOBBER)
708	goto restart;
709      return;
710
711    case SET:
712      /* If the source of a SET is a CALL, this is actually done by
713	 the called routine.  So only include it if we are to include the
714	 effects of the calling routine.  */
715
716      mark_set_resources (SET_DEST (x), res,
717			  (mark_type == MARK_SRC_DEST_CALL
718			   || GET_CODE (SET_SRC (x)) != CALL),
719			  mark_type);
720
721      mark_set_resources (SET_SRC (x), res, 0, MARK_SRC_DEST);
722      return;
723
724    case CLOBBER:
725      mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
726      return;
727
728    case SEQUENCE:
729      {
730        rtx_sequence *seq = as_a <rtx_sequence *> (x);
731        rtx control = seq->element (0);
732        bool annul_p = JUMP_P (control) && INSN_ANNULLED_BRANCH_P (control);
733
734        mark_set_resources (control, res, 0, mark_type);
735        for (i = seq->len () - 1; i >= 0; --i)
736	  {
737	    rtx elt = seq->element (i);
738	    if (!annul_p && INSN_FROM_TARGET_P (elt))
739	      mark_set_resources (elt, res, 0, mark_type);
740	  }
741      }
742      return;
743
744    case POST_INC:
745    case PRE_INC:
746    case POST_DEC:
747    case PRE_DEC:
748      mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
749      return;
750
751    case PRE_MODIFY:
752    case POST_MODIFY:
753      mark_set_resources (XEXP (x, 0), res, 1, MARK_SRC_DEST);
754      mark_set_resources (XEXP (XEXP (x, 1), 0), res, 0, MARK_SRC_DEST);
755      mark_set_resources (XEXP (XEXP (x, 1), 1), res, 0, MARK_SRC_DEST);
756      return;
757
758    case SIGN_EXTRACT:
759    case ZERO_EXTRACT:
760      mark_set_resources (XEXP (x, 0), res, in_dest, MARK_SRC_DEST);
761      mark_set_resources (XEXP (x, 1), res, 0, MARK_SRC_DEST);
762      mark_set_resources (XEXP (x, 2), res, 0, MARK_SRC_DEST);
763      return;
764
765    case MEM:
766      if (in_dest)
767	{
768	  res->memory = 1;
769	  res->volatil |= MEM_VOLATILE_P (x);
770	}
771
772      mark_set_resources (XEXP (x, 0), res, 0, MARK_SRC_DEST);
773      return;
774
775    case SUBREG:
776      if (in_dest)
777	{
778	  if (!REG_P (SUBREG_REG (x)))
779	    mark_set_resources (SUBREG_REG (x), res, in_dest, mark_type);
780	  else
781	    {
782	      unsigned int regno = subreg_regno (x);
783	      unsigned int last_regno = regno + subreg_nregs (x);
784
785	      gcc_assert (last_regno <= FIRST_PSEUDO_REGISTER);
786	      for (r = regno; r < last_regno; r++)
787		SET_HARD_REG_BIT (res->regs, r);
788	    }
789	}
790      return;
791
792    case REG:
793      if (in_dest)
794	{
795	  gcc_assert (HARD_REGISTER_P (x));
796	  add_to_hard_reg_set (&res->regs, GET_MODE (x), REGNO (x));
797	}
798      return;
799
800    case UNSPEC_VOLATILE:
801    case ASM_INPUT:
802      /* Traditional asm's are always volatile.  */
803      res->volatil = 1;
804      return;
805
806    case TRAP_IF:
807      res->volatil = 1;
808      break;
809
810    case ASM_OPERANDS:
811      res->volatil |= MEM_VOLATILE_P (x);
812
813      /* For all ASM_OPERANDS, we must traverse the vector of input operands.
814	 We can not just fall through here since then we would be confused
815	 by the ASM_INPUT rtx inside ASM_OPERANDS, which do not indicate
816	 traditional asms unlike their normal usage.  */
817
818      for (i = 0; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
819	mark_set_resources (ASM_OPERANDS_INPUT (x, i), res, in_dest,
820			    MARK_SRC_DEST);
821      return;
822
823    default:
824      break;
825    }
826
827  /* Process each sub-expression and flag what it needs.  */
828  format_ptr = GET_RTX_FORMAT (code);
829  for (i = 0; i < GET_RTX_LENGTH (code); i++)
830    switch (*format_ptr++)
831      {
832      case 'e':
833	mark_set_resources (XEXP (x, i), res, in_dest, mark_type);
834	break;
835
836      case 'E':
837	for (j = 0; j < XVECLEN (x, i); j++)
838	  mark_set_resources (XVECEXP (x, i, j), res, in_dest, mark_type);
839	break;
840      }
841}
842
843/* Return TRUE if INSN is a return, possibly with a filled delay slot.  */
844
845static bool
846return_insn_p (const_rtx insn)
847{
848  if (JUMP_P (insn) && ANY_RETURN_P (PATTERN (insn)))
849    return true;
850
851  if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
852    return return_insn_p (XVECEXP (PATTERN (insn), 0, 0));
853
854  return false;
855}
856
857/* Set the resources that are live at TARGET.
858
859   If TARGET is zero, we refer to the end of the current function and can
860   return our precomputed value.
861
862   Otherwise, we try to find out what is live by consulting the basic block
863   information.  This is tricky, because we must consider the actions of
864   reload and jump optimization, which occur after the basic block information
865   has been computed.
866
867   Accordingly, we proceed as follows::
868
869   We find the previous BARRIER and look at all immediately following labels
870   (with no intervening active insns) to see if any of them start a basic
871   block.  If we hit the start of the function first, we use block 0.
872
873   Once we have found a basic block and a corresponding first insn, we can
874   accurately compute the live status (by starting at a label following a
875   BARRIER, we are immune to actions taken by reload and jump.)  Then we
876   scan all insns between that point and our target.  For each CLOBBER (or
877   for call-clobbered regs when we pass a CALL_INSN), mark the appropriate
878   registers are dead.  For a SET, mark them as live.
879
880   We have to be careful when using REG_DEAD notes because they are not
881   updated by such things as find_equiv_reg.  So keep track of registers
882   marked as dead that haven't been assigned to, and mark them dead at the
883   next CODE_LABEL since reload and jump won't propagate values across labels.
884
885   If we cannot find the start of a basic block (should be a very rare
886   case, if it can happen at all), mark everything as potentially live.
887
888   Next, scan forward from TARGET looking for things set or clobbered
889   before they are used.  These are not live.
890
891   Because we can be called many times on the same target, save our results
892   in a hash table indexed by INSN_UID.  This is only done if the function
893   init_resource_info () was invoked before we are called.  */
894
895void
896mark_target_live_regs (rtx_insn *insns, rtx target_maybe_return, struct resources *res)
897{
898  int b = -1;
899  unsigned int i;
900  struct target_info *tinfo = NULL;
901  rtx_insn *insn;
902  rtx jump_insn = 0;
903  rtx jump_target;
904  HARD_REG_SET scratch;
905  struct resources set, needed;
906
907  /* Handle end of function.  */
908  if (target_maybe_return == 0 || ANY_RETURN_P (target_maybe_return))
909    {
910      *res = end_of_function_needs;
911      return;
912    }
913
914  /* We've handled the case of RETURN/SIMPLE_RETURN; we should now have an
915     instruction.  */
916  rtx_insn *target = as_a <rtx_insn *> (target_maybe_return);
917
918  /* Handle return insn.  */
919  if (return_insn_p (target))
920    {
921      *res = end_of_function_needs;
922      mark_referenced_resources (target, res, false);
923      return;
924    }
925
926  /* We have to assume memory is needed, but the CC isn't.  */
927  res->memory = 1;
928  res->volatil = 0;
929  res->cc = 0;
930
931  /* See if we have computed this value already.  */
932  if (target_hash_table != NULL)
933    {
934      for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
935	   tinfo; tinfo = tinfo->next)
936	if (tinfo->uid == INSN_UID (target))
937	  break;
938
939      /* Start by getting the basic block number.  If we have saved
940	 information, we can get it from there unless the insn at the
941	 start of the basic block has been deleted.  */
942      if (tinfo && tinfo->block != -1
943	  && ! BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, tinfo->block))->deleted ())
944	b = tinfo->block;
945    }
946
947  if (b == -1)
948    b = find_basic_block (target, MAX_DELAY_SLOT_LIVE_SEARCH);
949
950  if (target_hash_table != NULL)
951    {
952      if (tinfo)
953	{
954	  /* If the information is up-to-date, use it.  Otherwise, we will
955	     update it below.  */
956	  if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b])
957	    {
958	      COPY_HARD_REG_SET (res->regs, tinfo->live_regs);
959	      return;
960	    }
961	}
962      else
963	{
964	  /* Allocate a place to put our results and chain it into the
965	     hash table.  */
966	  tinfo = XNEW (struct target_info);
967	  tinfo->uid = INSN_UID (target);
968	  tinfo->block = b;
969	  tinfo->next
970	    = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME];
971	  target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo;
972	}
973    }
974
975  CLEAR_HARD_REG_SET (pending_dead_regs);
976
977  /* If we found a basic block, get the live registers from it and update
978     them with anything set or killed between its start and the insn before
979     TARGET; this custom life analysis is really about registers so we need
980     to use the LR problem.  Otherwise, we must assume everything is live.  */
981  if (b != -1)
982    {
983      regset regs_live = DF_LR_IN (BASIC_BLOCK_FOR_FN (cfun, b));
984      rtx_insn *start_insn, *stop_insn;
985
986      /* Compute hard regs live at start of block.  */
987      REG_SET_TO_HARD_REG_SET (current_live_regs, regs_live);
988
989      /* Get starting and ending insn, handling the case where each might
990	 be a SEQUENCE.  */
991      start_insn = (b == ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->index ?
992		    insns : BB_HEAD (BASIC_BLOCK_FOR_FN (cfun, b)));
993      stop_insn = target;
994
995      if (NONJUMP_INSN_P (start_insn)
996	  && GET_CODE (PATTERN (start_insn)) == SEQUENCE)
997	start_insn = as_a <rtx_sequence *> (PATTERN (start_insn))->insn (0);
998
999      if (NONJUMP_INSN_P (stop_insn)
1000	  && GET_CODE (PATTERN (stop_insn)) == SEQUENCE)
1001	stop_insn = next_insn (PREV_INSN (stop_insn));
1002
1003      for (insn = start_insn; insn != stop_insn;
1004	   insn = next_insn_no_annul (insn))
1005	{
1006	  rtx link;
1007	  rtx_insn *real_insn = insn;
1008	  enum rtx_code code = GET_CODE (insn);
1009
1010	  if (DEBUG_INSN_P (insn))
1011	    continue;
1012
1013	  /* If this insn is from the target of a branch, it isn't going to
1014	     be used in the sequel.  If it is used in both cases, this
1015	     test will not be true.  */
1016	  if ((code == INSN || code == JUMP_INSN || code == CALL_INSN)
1017	      && INSN_FROM_TARGET_P (insn))
1018	    continue;
1019
1020	  /* If this insn is a USE made by update_block, we care about the
1021	     underlying insn.  */
1022	  if (code == INSN
1023	      && GET_CODE (PATTERN (insn)) == USE
1024	      && INSN_P (XEXP (PATTERN (insn), 0)))
1025	    real_insn = as_a <rtx_insn *> (XEXP (PATTERN (insn), 0));
1026
1027	  if (CALL_P (real_insn))
1028	    {
1029	      /* Values in call-clobbered registers survive a COND_EXEC CALL
1030		 if that is not executed; this matters for resoure use because
1031		 they may be used by a complementarily (or more strictly)
1032		 predicated instruction, or if the CALL is NORETURN.  */
1033	      if (GET_CODE (PATTERN (real_insn)) != COND_EXEC)
1034		{
1035		  HARD_REG_SET regs_invalidated_by_this_call;
1036		  get_call_reg_set_usage (real_insn,
1037					  &regs_invalidated_by_this_call,
1038					  regs_invalidated_by_call);
1039		  /* CALL clobbers all call-used regs that aren't fixed except
1040		     sp, ap, and fp.  Do this before setting the result of the
1041		     call live.  */
1042		  AND_COMPL_HARD_REG_SET (current_live_regs,
1043					  regs_invalidated_by_this_call);
1044		}
1045
1046	      /* A CALL_INSN sets any global register live, since it may
1047		 have been modified by the call.  */
1048	      for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1049		if (global_regs[i])
1050		  SET_HARD_REG_BIT (current_live_regs, i);
1051	    }
1052
1053	  /* Mark anything killed in an insn to be deadened at the next
1054	     label.  Ignore USE insns; the only REG_DEAD notes will be for
1055	     parameters.  But they might be early.  A CALL_INSN will usually
1056	     clobber registers used for parameters.  It isn't worth bothering
1057	     with the unlikely case when it won't.  */
1058	  if ((NONJUMP_INSN_P (real_insn)
1059	       && GET_CODE (PATTERN (real_insn)) != USE
1060	       && GET_CODE (PATTERN (real_insn)) != CLOBBER)
1061	      || JUMP_P (real_insn)
1062	      || CALL_P (real_insn))
1063	    {
1064	      for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1065		if (REG_NOTE_KIND (link) == REG_DEAD
1066		    && REG_P (XEXP (link, 0))
1067		    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1068		  add_to_hard_reg_set (&pending_dead_regs,
1069				      GET_MODE (XEXP (link, 0)),
1070				      REGNO (XEXP (link, 0)));
1071
1072	      note_stores (PATTERN (real_insn), update_live_status, NULL);
1073
1074	      /* If any registers were unused after this insn, kill them.
1075		 These notes will always be accurate.  */
1076	      for (link = REG_NOTES (real_insn); link; link = XEXP (link, 1))
1077		if (REG_NOTE_KIND (link) == REG_UNUSED
1078		    && REG_P (XEXP (link, 0))
1079		    && REGNO (XEXP (link, 0)) < FIRST_PSEUDO_REGISTER)
1080		  remove_from_hard_reg_set (&current_live_regs,
1081					   GET_MODE (XEXP (link, 0)),
1082					   REGNO (XEXP (link, 0)));
1083	    }
1084
1085	  else if (LABEL_P (real_insn))
1086	    {
1087	      basic_block bb;
1088
1089	      /* A label clobbers the pending dead registers since neither
1090		 reload nor jump will propagate a value across a label.  */
1091	      AND_COMPL_HARD_REG_SET (current_live_regs, pending_dead_regs);
1092	      CLEAR_HARD_REG_SET (pending_dead_regs);
1093
1094	      /* We must conservatively assume that all registers that used
1095		 to be live here still are.  The fallthrough edge may have
1096		 left a live register uninitialized.  */
1097	      bb = BLOCK_FOR_INSN (real_insn);
1098	      if (bb)
1099		{
1100		  HARD_REG_SET extra_live;
1101
1102		  REG_SET_TO_HARD_REG_SET (extra_live, DF_LR_IN (bb));
1103		  IOR_HARD_REG_SET (current_live_regs, extra_live);
1104		}
1105	    }
1106
1107	  /* The beginning of the epilogue corresponds to the end of the
1108	     RTL chain when there are no epilogue insns.  Certain resources
1109	     are implicitly required at that point.  */
1110	  else if (NOTE_P (real_insn)
1111		   && NOTE_KIND (real_insn) == NOTE_INSN_EPILOGUE_BEG)
1112	    IOR_HARD_REG_SET (current_live_regs, start_of_epilogue_needs.regs);
1113	}
1114
1115      COPY_HARD_REG_SET (res->regs, current_live_regs);
1116      if (tinfo != NULL)
1117	{
1118	  tinfo->block = b;
1119	  tinfo->bb_tick = bb_ticks[b];
1120	}
1121    }
1122  else
1123    /* We didn't find the start of a basic block.  Assume everything
1124       in use.  This should happen only extremely rarely.  */
1125    SET_HARD_REG_SET (res->regs);
1126
1127  CLEAR_RESOURCE (&set);
1128  CLEAR_RESOURCE (&needed);
1129
1130  jump_insn = find_dead_or_set_registers (target, res, &jump_target, 0,
1131					  set, needed);
1132
1133  /* If we hit an unconditional branch, we have another way of finding out
1134     what is live: we can see what is live at the branch target and include
1135     anything used but not set before the branch.  We add the live
1136     resources found using the test below to those found until now.  */
1137
1138  if (jump_insn)
1139    {
1140      struct resources new_resources;
1141      rtx_insn *stop_insn = next_active_insn (jump_insn);
1142
1143      if (!ANY_RETURN_P (jump_target))
1144	jump_target = next_active_insn (jump_target);
1145      mark_target_live_regs (insns, jump_target, &new_resources);
1146      CLEAR_RESOURCE (&set);
1147      CLEAR_RESOURCE (&needed);
1148
1149      /* Include JUMP_INSN in the needed registers.  */
1150      for (insn = target; insn != stop_insn; insn = next_active_insn (insn))
1151	{
1152	  mark_referenced_resources (insn, &needed, true);
1153
1154	  COPY_HARD_REG_SET (scratch, needed.regs);
1155	  AND_COMPL_HARD_REG_SET (scratch, set.regs);
1156	  IOR_HARD_REG_SET (new_resources.regs, scratch);
1157
1158	  mark_set_resources (insn, &set, 0, MARK_SRC_DEST_CALL);
1159	}
1160
1161      IOR_HARD_REG_SET (res->regs, new_resources.regs);
1162    }
1163
1164  if (tinfo != NULL)
1165    {
1166      COPY_HARD_REG_SET (tinfo->live_regs, res->regs);
1167    }
1168}
1169
1170/* Initialize the resources required by mark_target_live_regs ().
1171   This should be invoked before the first call to mark_target_live_regs.  */
1172
1173void
1174init_resource_info (rtx_insn *epilogue_insn)
1175{
1176  int i;
1177  basic_block bb;
1178
1179  /* Indicate what resources are required to be valid at the end of the current
1180     function.  The condition code never is and memory always is.
1181     The stack pointer is needed unless EXIT_IGNORE_STACK is true
1182     and there is an epilogue that restores the original stack pointer
1183     from the frame pointer.  Registers used to return the function value
1184     are needed.  Registers holding global variables are needed.  */
1185
1186  end_of_function_needs.cc = 0;
1187  end_of_function_needs.memory = 1;
1188  CLEAR_HARD_REG_SET (end_of_function_needs.regs);
1189
1190  if (frame_pointer_needed)
1191    {
1192      SET_HARD_REG_BIT (end_of_function_needs.regs, FRAME_POINTER_REGNUM);
1193#if !HARD_FRAME_POINTER_IS_FRAME_POINTER
1194      SET_HARD_REG_BIT (end_of_function_needs.regs, HARD_FRAME_POINTER_REGNUM);
1195#endif
1196    }
1197  if (!(frame_pointer_needed
1198	&& EXIT_IGNORE_STACK
1199	&& epilogue_insn
1200	&& !crtl->sp_is_unchanging))
1201    SET_HARD_REG_BIT (end_of_function_needs.regs, STACK_POINTER_REGNUM);
1202
1203  if (crtl->return_rtx != 0)
1204    mark_referenced_resources (crtl->return_rtx,
1205			       &end_of_function_needs, true);
1206
1207  for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1208    if (global_regs[i]
1209#ifdef EPILOGUE_USES
1210	|| EPILOGUE_USES (i)
1211#endif
1212	)
1213      SET_HARD_REG_BIT (end_of_function_needs.regs, i);
1214
1215  /* The registers required to be live at the end of the function are
1216     represented in the flow information as being dead just prior to
1217     reaching the end of the function.  For example, the return of a value
1218     might be represented by a USE of the return register immediately
1219     followed by an unconditional jump to the return label where the
1220     return label is the end of the RTL chain.  The end of the RTL chain
1221     is then taken to mean that the return register is live.
1222
1223     This sequence is no longer maintained when epilogue instructions are
1224     added to the RTL chain.  To reconstruct the original meaning, the
1225     start of the epilogue (NOTE_INSN_EPILOGUE_BEG) is regarded as the
1226     point where these registers become live (start_of_epilogue_needs).
1227     If epilogue instructions are present, the registers set by those
1228     instructions won't have been processed by flow.  Thus, those
1229     registers are additionally required at the end of the RTL chain
1230     (end_of_function_needs).  */
1231
1232  start_of_epilogue_needs = end_of_function_needs;
1233
1234  while ((epilogue_insn = next_nonnote_insn (epilogue_insn)))
1235    {
1236      mark_set_resources (epilogue_insn, &end_of_function_needs, 0,
1237			  MARK_SRC_DEST_CALL);
1238      if (return_insn_p (epilogue_insn))
1239	break;
1240    }
1241
1242  /* Allocate and initialize the tables used by mark_target_live_regs.  */
1243  target_hash_table = XCNEWVEC (struct target_info *, TARGET_HASH_PRIME);
1244  bb_ticks = XCNEWVEC (int, last_basic_block_for_fn (cfun));
1245
1246  /* Set the BLOCK_FOR_INSN of each label that starts a basic block.  */
1247  FOR_EACH_BB_FN (bb, cfun)
1248    if (LABEL_P (BB_HEAD (bb)))
1249      BLOCK_FOR_INSN (BB_HEAD (bb)) = bb;
1250}
1251
1252/* Free up the resources allocated to mark_target_live_regs ().  This
1253   should be invoked after the last call to mark_target_live_regs ().  */
1254
1255void
1256free_resource_info (void)
1257{
1258  basic_block bb;
1259
1260  if (target_hash_table != NULL)
1261    {
1262      int i;
1263
1264      for (i = 0; i < TARGET_HASH_PRIME; ++i)
1265	{
1266	  struct target_info *ti = target_hash_table[i];
1267
1268	  while (ti)
1269	    {
1270	      struct target_info *next = ti->next;
1271	      free (ti);
1272	      ti = next;
1273	    }
1274	}
1275
1276      free (target_hash_table);
1277      target_hash_table = NULL;
1278    }
1279
1280  if (bb_ticks != NULL)
1281    {
1282      free (bb_ticks);
1283      bb_ticks = NULL;
1284    }
1285
1286  FOR_EACH_BB_FN (bb, cfun)
1287    if (LABEL_P (BB_HEAD (bb)))
1288      BLOCK_FOR_INSN (BB_HEAD (bb)) = NULL;
1289}
1290
1291/* Clear any hashed information that we have stored for INSN.  */
1292
1293void
1294clear_hashed_info_for_insn (rtx_insn *insn)
1295{
1296  struct target_info *tinfo;
1297
1298  if (target_hash_table != NULL)
1299    {
1300      for (tinfo = target_hash_table[INSN_UID (insn) % TARGET_HASH_PRIME];
1301	   tinfo; tinfo = tinfo->next)
1302	if (tinfo->uid == INSN_UID (insn))
1303	  break;
1304
1305      if (tinfo)
1306	tinfo->block = -1;
1307    }
1308}
1309
1310/* Increment the tick count for the basic block that contains INSN.  */
1311
1312void
1313incr_ticks_for_insn (rtx_insn *insn)
1314{
1315  int b = find_basic_block (insn, MAX_DELAY_SLOT_LIVE_SEARCH);
1316
1317  if (b != -1)
1318    bb_ticks[b]++;
1319}
1320
1321/* Add TRIAL to the set of resources used at the end of the current
1322   function.  */
1323void
1324mark_end_of_function_resources (rtx trial, bool include_delayed_effects)
1325{
1326  mark_referenced_resources (trial, &end_of_function_needs,
1327			     include_delayed_effects);
1328}
1329