RegisterScavenging.cpp revision 360784
1//===- RegisterScavenging.cpp - Machine register scavenging ---------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9/// \file
10/// This file implements the machine register scavenger. It can provide
11/// information, such as unused registers, at any point in a machine basic
12/// block. It also provides a mechanism to make registers available by evicting
13/// them to spill slots.
14//
15//===----------------------------------------------------------------------===//
16
17#include "llvm/CodeGen/RegisterScavenging.h"
18#include "llvm/ADT/ArrayRef.h"
19#include "llvm/ADT/BitVector.h"
20#include "llvm/ADT/SmallVector.h"
21#include "llvm/ADT/Statistic.h"
22#include "llvm/CodeGen/LiveRegUnits.h"
23#include "llvm/CodeGen/MachineBasicBlock.h"
24#include "llvm/CodeGen/MachineFrameInfo.h"
25#include "llvm/CodeGen/MachineFunction.h"
26#include "llvm/CodeGen/MachineFunctionPass.h"
27#include "llvm/CodeGen/MachineInstr.h"
28#include "llvm/CodeGen/MachineOperand.h"
29#include "llvm/CodeGen/MachineRegisterInfo.h"
30#include "llvm/CodeGen/TargetFrameLowering.h"
31#include "llvm/CodeGen/TargetInstrInfo.h"
32#include "llvm/CodeGen/TargetRegisterInfo.h"
33#include "llvm/CodeGen/TargetSubtargetInfo.h"
34#include "llvm/InitializePasses.h"
35#include "llvm/MC/MCRegisterInfo.h"
36#include "llvm/Pass.h"
37#include "llvm/Support/Debug.h"
38#include "llvm/Support/ErrorHandling.h"
39#include "llvm/Support/raw_ostream.h"
40#include <algorithm>
41#include <cassert>
42#include <iterator>
43#include <limits>
44#include <string>
45#include <utility>
46
47using namespace llvm;
48
49#define DEBUG_TYPE "reg-scavenging"
50
51STATISTIC(NumScavengedRegs, "Number of frame index regs scavenged");
52
53void RegScavenger::setRegUsed(Register Reg, LaneBitmask LaneMask) {
54  LiveUnits.addRegMasked(Reg, LaneMask);
55}
56
57void RegScavenger::init(MachineBasicBlock &MBB) {
58  MachineFunction &MF = *MBB.getParent();
59  TII = MF.getSubtarget().getInstrInfo();
60  TRI = MF.getSubtarget().getRegisterInfo();
61  MRI = &MF.getRegInfo();
62  LiveUnits.init(*TRI);
63
64  assert((NumRegUnits == 0 || NumRegUnits == TRI->getNumRegUnits()) &&
65         "Target changed?");
66
67  // Self-initialize.
68  if (!this->MBB) {
69    NumRegUnits = TRI->getNumRegUnits();
70    KillRegUnits.resize(NumRegUnits);
71    DefRegUnits.resize(NumRegUnits);
72    TmpRegUnits.resize(NumRegUnits);
73  }
74  this->MBB = &MBB;
75
76  for (ScavengedInfo &SI : Scavenged) {
77    SI.Reg = 0;
78    SI.Restore = nullptr;
79  }
80
81  Tracking = false;
82}
83
84void RegScavenger::enterBasicBlock(MachineBasicBlock &MBB) {
85  init(MBB);
86  LiveUnits.addLiveIns(MBB);
87}
88
89void RegScavenger::enterBasicBlockEnd(MachineBasicBlock &MBB) {
90  init(MBB);
91  LiveUnits.addLiveOuts(MBB);
92
93  // Move internal iterator at the last instruction of the block.
94  if (MBB.begin() != MBB.end()) {
95    MBBI = std::prev(MBB.end());
96    Tracking = true;
97  }
98}
99
100void RegScavenger::addRegUnits(BitVector &BV, Register Reg) {
101  for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI)
102    BV.set(*RUI);
103}
104
105void RegScavenger::removeRegUnits(BitVector &BV, Register Reg) {
106  for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI)
107    BV.reset(*RUI);
108}
109
110void RegScavenger::determineKillsAndDefs() {
111  assert(Tracking && "Must be tracking to determine kills and defs");
112
113  MachineInstr &MI = *MBBI;
114  assert(!MI.isDebugInstr() && "Debug values have no kills or defs");
115
116  // Find out which registers are early clobbered, killed, defined, and marked
117  // def-dead in this instruction.
118  KillRegUnits.reset();
119  DefRegUnits.reset();
120  for (const MachineOperand &MO : MI.operands()) {
121    if (MO.isRegMask()) {
122      TmpRegUnits.clear();
123      for (unsigned RU = 0, RUEnd = TRI->getNumRegUnits(); RU != RUEnd; ++RU) {
124        for (MCRegUnitRootIterator RURI(RU, TRI); RURI.isValid(); ++RURI) {
125          if (MO.clobbersPhysReg(*RURI)) {
126            TmpRegUnits.set(RU);
127            break;
128          }
129        }
130      }
131
132      // Apply the mask.
133      KillRegUnits |= TmpRegUnits;
134    }
135    if (!MO.isReg())
136      continue;
137    Register Reg = MO.getReg();
138    if (!Register::isPhysicalRegister(Reg) || isReserved(Reg))
139      continue;
140
141    if (MO.isUse()) {
142      // Ignore undef uses.
143      if (MO.isUndef())
144        continue;
145      if (MO.isKill())
146        addRegUnits(KillRegUnits, Reg);
147    } else {
148      assert(MO.isDef());
149      if (MO.isDead())
150        addRegUnits(KillRegUnits, Reg);
151      else
152        addRegUnits(DefRegUnits, Reg);
153    }
154  }
155}
156
157void RegScavenger::unprocess() {
158  assert(Tracking && "Cannot unprocess because we're not tracking");
159
160  MachineInstr &MI = *MBBI;
161  if (!MI.isDebugInstr()) {
162    determineKillsAndDefs();
163
164    // Commit the changes.
165    setUnused(DefRegUnits);
166    setUsed(KillRegUnits);
167  }
168
169  if (MBBI == MBB->begin()) {
170    MBBI = MachineBasicBlock::iterator(nullptr);
171    Tracking = false;
172  } else
173    --MBBI;
174}
175
176void RegScavenger::forward() {
177  // Move ptr forward.
178  if (!Tracking) {
179    MBBI = MBB->begin();
180    Tracking = true;
181  } else {
182    assert(MBBI != MBB->end() && "Already past the end of the basic block!");
183    MBBI = std::next(MBBI);
184  }
185  assert(MBBI != MBB->end() && "Already at the end of the basic block!");
186
187  MachineInstr &MI = *MBBI;
188
189  for (SmallVectorImpl<ScavengedInfo>::iterator I = Scavenged.begin(),
190         IE = Scavenged.end(); I != IE; ++I) {
191    if (I->Restore != &MI)
192      continue;
193
194    I->Reg = 0;
195    I->Restore = nullptr;
196  }
197
198  if (MI.isDebugInstr())
199    return;
200
201  determineKillsAndDefs();
202
203  // Verify uses and defs.
204#ifndef NDEBUG
205  for (const MachineOperand &MO : MI.operands()) {
206    if (!MO.isReg())
207      continue;
208    Register Reg = MO.getReg();
209    if (!Register::isPhysicalRegister(Reg) || isReserved(Reg))
210      continue;
211    if (MO.isUse()) {
212      if (MO.isUndef())
213        continue;
214      if (!isRegUsed(Reg)) {
215        // Check if it's partial live: e.g.
216        // D0 = insert_subreg undef D0, S0
217        // ... D0
218        // The problem is the insert_subreg could be eliminated. The use of
219        // D0 is using a partially undef value. This is not *incorrect* since
220        // S1 is can be freely clobbered.
221        // Ideally we would like a way to model this, but leaving the
222        // insert_subreg around causes both correctness and performance issues.
223        bool SubUsed = false;
224        for (const MCPhysReg &SubReg : TRI->subregs(Reg))
225          if (isRegUsed(SubReg)) {
226            SubUsed = true;
227            break;
228          }
229        bool SuperUsed = false;
230        for (MCSuperRegIterator SR(Reg, TRI); SR.isValid(); ++SR) {
231          if (isRegUsed(*SR)) {
232            SuperUsed = true;
233            break;
234          }
235        }
236        if (!SubUsed && !SuperUsed) {
237          MBB->getParent()->verify(nullptr, "In Register Scavenger");
238          llvm_unreachable("Using an undefined register!");
239        }
240        (void)SubUsed;
241        (void)SuperUsed;
242      }
243    } else {
244      assert(MO.isDef());
245#if 0
246      // FIXME: Enable this once we've figured out how to correctly transfer
247      // implicit kills during codegen passes like the coalescer.
248      assert((KillRegs.test(Reg) || isUnused(Reg) ||
249              isLiveInButUnusedBefore(Reg, MI, MBB, TRI, MRI)) &&
250             "Re-defining a live register!");
251#endif
252    }
253  }
254#endif // NDEBUG
255
256  // Commit the changes.
257  setUnused(KillRegUnits);
258  setUsed(DefRegUnits);
259}
260
261void RegScavenger::backward() {
262  assert(Tracking && "Must be tracking to determine kills and defs");
263
264  const MachineInstr &MI = *MBBI;
265  LiveUnits.stepBackward(MI);
266
267  // Expire scavenge spill frameindex uses.
268  for (ScavengedInfo &I : Scavenged) {
269    if (I.Restore == &MI) {
270      I.Reg = 0;
271      I.Restore = nullptr;
272    }
273  }
274
275  if (MBBI == MBB->begin()) {
276    MBBI = MachineBasicBlock::iterator(nullptr);
277    Tracking = false;
278  } else
279    --MBBI;
280}
281
282bool RegScavenger::isRegUsed(Register Reg, bool includeReserved) const {
283  if (isReserved(Reg))
284    return includeReserved;
285  return !LiveUnits.available(Reg);
286}
287
288Register RegScavenger::FindUnusedReg(const TargetRegisterClass *RC) const {
289  for (Register Reg : *RC) {
290    if (!isRegUsed(Reg)) {
291      LLVM_DEBUG(dbgs() << "Scavenger found unused reg: " << printReg(Reg, TRI)
292                        << "\n");
293      return Reg;
294    }
295  }
296  return 0;
297}
298
299BitVector RegScavenger::getRegsAvailable(const TargetRegisterClass *RC) {
300  BitVector Mask(TRI->getNumRegs());
301  for (Register Reg : *RC)
302    if (!isRegUsed(Reg))
303      Mask.set(Reg);
304  return Mask;
305}
306
307Register RegScavenger::findSurvivorReg(MachineBasicBlock::iterator StartMI,
308                                       BitVector &Candidates,
309                                       unsigned InstrLimit,
310                                       MachineBasicBlock::iterator &UseMI) {
311  int Survivor = Candidates.find_first();
312  assert(Survivor > 0 && "No candidates for scavenging");
313
314  MachineBasicBlock::iterator ME = MBB->getFirstTerminator();
315  assert(StartMI != ME && "MI already at terminator");
316  MachineBasicBlock::iterator RestorePointMI = StartMI;
317  MachineBasicBlock::iterator MI = StartMI;
318
319  bool inVirtLiveRange = false;
320  for (++MI; InstrLimit > 0 && MI != ME; ++MI, --InstrLimit) {
321    if (MI->isDebugInstr()) {
322      ++InstrLimit; // Don't count debug instructions
323      continue;
324    }
325    bool isVirtKillInsn = false;
326    bool isVirtDefInsn = false;
327    // Remove any candidates touched by instruction.
328    for (const MachineOperand &MO : MI->operands()) {
329      if (MO.isRegMask())
330        Candidates.clearBitsNotInMask(MO.getRegMask());
331      if (!MO.isReg() || MO.isUndef() || !MO.getReg())
332        continue;
333      if (Register::isVirtualRegister(MO.getReg())) {
334        if (MO.isDef())
335          isVirtDefInsn = true;
336        else if (MO.isKill())
337          isVirtKillInsn = true;
338        continue;
339      }
340      for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI)
341        Candidates.reset(*AI);
342    }
343    // If we're not in a virtual reg's live range, this is a valid
344    // restore point.
345    if (!inVirtLiveRange) RestorePointMI = MI;
346
347    // Update whether we're in the live range of a virtual register
348    if (isVirtKillInsn) inVirtLiveRange = false;
349    if (isVirtDefInsn) inVirtLiveRange = true;
350
351    // Was our survivor untouched by this instruction?
352    if (Candidates.test(Survivor))
353      continue;
354
355    // All candidates gone?
356    if (Candidates.none())
357      break;
358
359    Survivor = Candidates.find_first();
360  }
361  // If we ran off the end, that's where we want to restore.
362  if (MI == ME) RestorePointMI = ME;
363  assert(RestorePointMI != StartMI &&
364         "No available scavenger restore location!");
365
366  // We ran out of candidates, so stop the search.
367  UseMI = RestorePointMI;
368  return Survivor;
369}
370
371/// Given the bitvector \p Available of free register units at position
372/// \p From. Search backwards to find a register that is part of \p
373/// Candidates and not used/clobbered until the point \p To. If there is
374/// multiple candidates continue searching and pick the one that is not used/
375/// clobbered for the longest time.
376/// Returns the register and the earliest position we know it to be free or
377/// the position MBB.end() if no register is available.
378static std::pair<MCPhysReg, MachineBasicBlock::iterator>
379findSurvivorBackwards(const MachineRegisterInfo &MRI,
380    MachineBasicBlock::iterator From, MachineBasicBlock::iterator To,
381    const LiveRegUnits &LiveOut, ArrayRef<MCPhysReg> AllocationOrder,
382    bool RestoreAfter) {
383  bool FoundTo = false;
384  MCPhysReg Survivor = 0;
385  MachineBasicBlock::iterator Pos;
386  MachineBasicBlock &MBB = *From->getParent();
387  unsigned InstrLimit = 25;
388  unsigned InstrCountDown = InstrLimit;
389  const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
390  LiveRegUnits Used(TRI);
391
392  for (MachineBasicBlock::iterator I = From;; --I) {
393    const MachineInstr &MI = *I;
394
395    Used.accumulate(MI);
396
397    if (I == To) {
398      // See if one of the registers in RC wasn't used so far.
399      for (MCPhysReg Reg : AllocationOrder) {
400        if (!MRI.isReserved(Reg) && Used.available(Reg) &&
401            LiveOut.available(Reg))
402          return std::make_pair(Reg, MBB.end());
403      }
404      // Otherwise we will continue up to InstrLimit instructions to find
405      // the register which is not defined/used for the longest time.
406      FoundTo = true;
407      Pos = To;
408      // Note: It was fine so far to start our search at From, however now that
409      // we have to spill, and can only place the restore after From then
410      // add the regs used/defed by std::next(From) to the set.
411      if (RestoreAfter)
412        Used.accumulate(*std::next(From));
413    }
414    if (FoundTo) {
415      if (Survivor == 0 || !Used.available(Survivor)) {
416        MCPhysReg AvilableReg = 0;
417        for (MCPhysReg Reg : AllocationOrder) {
418          if (!MRI.isReserved(Reg) && Used.available(Reg)) {
419            AvilableReg = Reg;
420            break;
421          }
422        }
423        if (AvilableReg == 0)
424          break;
425        Survivor = AvilableReg;
426      }
427      if (--InstrCountDown == 0)
428        break;
429
430      // Keep searching when we find a vreg since the spilled register will
431      // be usefull for this other vreg as well later.
432      bool FoundVReg = false;
433      for (const MachineOperand &MO : MI.operands()) {
434        if (MO.isReg() && Register::isVirtualRegister(MO.getReg())) {
435          FoundVReg = true;
436          break;
437        }
438      }
439      if (FoundVReg) {
440        InstrCountDown = InstrLimit;
441        Pos = I;
442      }
443      if (I == MBB.begin())
444        break;
445    }
446  }
447
448  return std::make_pair(Survivor, Pos);
449}
450
451static unsigned getFrameIndexOperandNum(MachineInstr &MI) {
452  unsigned i = 0;
453  while (!MI.getOperand(i).isFI()) {
454    ++i;
455    assert(i < MI.getNumOperands() && "Instr doesn't have FrameIndex operand!");
456  }
457  return i;
458}
459
460RegScavenger::ScavengedInfo &
461RegScavenger::spill(Register Reg, const TargetRegisterClass &RC, int SPAdj,
462                    MachineBasicBlock::iterator Before,
463                    MachineBasicBlock::iterator &UseMI) {
464  // Find an available scavenging slot with size and alignment matching
465  // the requirements of the class RC.
466  const MachineFunction &MF = *Before->getMF();
467  const MachineFrameInfo &MFI = MF.getFrameInfo();
468  unsigned NeedSize = TRI->getSpillSize(RC);
469  unsigned NeedAlign = TRI->getSpillAlignment(RC);
470
471  unsigned SI = Scavenged.size(), Diff = std::numeric_limits<unsigned>::max();
472  int FIB = MFI.getObjectIndexBegin(), FIE = MFI.getObjectIndexEnd();
473  for (unsigned I = 0; I < Scavenged.size(); ++I) {
474    if (Scavenged[I].Reg != 0)
475      continue;
476    // Verify that this slot is valid for this register.
477    int FI = Scavenged[I].FrameIndex;
478    if (FI < FIB || FI >= FIE)
479      continue;
480    unsigned S = MFI.getObjectSize(FI);
481    unsigned A = MFI.getObjectAlignment(FI);
482    if (NeedSize > S || NeedAlign > A)
483      continue;
484    // Avoid wasting slots with large size and/or large alignment. Pick one
485    // that is the best fit for this register class (in street metric).
486    // Picking a larger slot than necessary could happen if a slot for a
487    // larger register is reserved before a slot for a smaller one. When
488    // trying to spill a smaller register, the large slot would be found
489    // first, thus making it impossible to spill the larger register later.
490    unsigned D = (S-NeedSize) + (A-NeedAlign);
491    if (D < Diff) {
492      SI = I;
493      Diff = D;
494    }
495  }
496
497  if (SI == Scavenged.size()) {
498    // We need to scavenge a register but have no spill slot, the target
499    // must know how to do it (if not, we'll assert below).
500    Scavenged.push_back(ScavengedInfo(FIE));
501  }
502
503  // Avoid infinite regress
504  Scavenged[SI].Reg = Reg;
505
506  // If the target knows how to save/restore the register, let it do so;
507  // otherwise, use the emergency stack spill slot.
508  if (!TRI->saveScavengerRegister(*MBB, Before, UseMI, &RC, Reg)) {
509    // Spill the scavenged register before \p Before.
510    int FI = Scavenged[SI].FrameIndex;
511    if (FI < FIB || FI >= FIE) {
512      std::string Msg = std::string("Error while trying to spill ") +
513          TRI->getName(Reg) + " from class " + TRI->getRegClassName(&RC) +
514          ": Cannot scavenge register without an emergency spill slot!";
515      report_fatal_error(Msg.c_str());
516    }
517    TII->storeRegToStackSlot(*MBB, Before, Reg, true, Scavenged[SI].FrameIndex,
518                             &RC, TRI);
519    MachineBasicBlock::iterator II = std::prev(Before);
520
521    unsigned FIOperandNum = getFrameIndexOperandNum(*II);
522    TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
523
524    // Restore the scavenged register before its use (or first terminator).
525    TII->loadRegFromStackSlot(*MBB, UseMI, Reg, Scavenged[SI].FrameIndex,
526                              &RC, TRI);
527    II = std::prev(UseMI);
528
529    FIOperandNum = getFrameIndexOperandNum(*II);
530    TRI->eliminateFrameIndex(II, SPAdj, FIOperandNum, this);
531  }
532  return Scavenged[SI];
533}
534
535Register RegScavenger::scavengeRegister(const TargetRegisterClass *RC,
536                                        MachineBasicBlock::iterator I,
537                                        int SPAdj, bool AllowSpill) {
538  MachineInstr &MI = *I;
539  const MachineFunction &MF = *MI.getMF();
540  // Consider all allocatable registers in the register class initially
541  BitVector Candidates = TRI->getAllocatableSet(MF, RC);
542
543  // Exclude all the registers being used by the instruction.
544  for (const MachineOperand &MO : MI.operands()) {
545    if (MO.isReg() && MO.getReg() != 0 && !(MO.isUse() && MO.isUndef()) &&
546        !Register::isVirtualRegister(MO.getReg()))
547      for (MCRegAliasIterator AI(MO.getReg(), TRI, true); AI.isValid(); ++AI)
548        Candidates.reset(*AI);
549  }
550
551  // Try to find a register that's unused if there is one, as then we won't
552  // have to spill.
553  BitVector Available = getRegsAvailable(RC);
554  Available &= Candidates;
555  if (Available.any())
556    Candidates = Available;
557
558  // Find the register whose use is furthest away.
559  MachineBasicBlock::iterator UseMI;
560  Register SReg = findSurvivorReg(I, Candidates, 25, UseMI);
561
562  // If we found an unused register there is no reason to spill it.
563  if (!isRegUsed(SReg)) {
564    LLVM_DEBUG(dbgs() << "Scavenged register: " << printReg(SReg, TRI) << "\n");
565    return SReg;
566  }
567
568  if (!AllowSpill)
569    return 0;
570
571  ScavengedInfo &Scavenged = spill(SReg, *RC, SPAdj, I, UseMI);
572  Scavenged.Restore = &*std::prev(UseMI);
573
574  LLVM_DEBUG(dbgs() << "Scavenged register (with spill): "
575                    << printReg(SReg, TRI) << "\n");
576
577  return SReg;
578}
579
580Register RegScavenger::scavengeRegisterBackwards(const TargetRegisterClass &RC,
581                                                 MachineBasicBlock::iterator To,
582                                                 bool RestoreAfter, int SPAdj,
583                                                 bool AllowSpill) {
584  const MachineBasicBlock &MBB = *To->getParent();
585  const MachineFunction &MF = *MBB.getParent();
586
587  // Find the register whose use is furthest away.
588  MachineBasicBlock::iterator UseMI;
589  ArrayRef<MCPhysReg> AllocationOrder = RC.getRawAllocationOrder(MF);
590  std::pair<MCPhysReg, MachineBasicBlock::iterator> P =
591      findSurvivorBackwards(*MRI, MBBI, To, LiveUnits, AllocationOrder,
592                            RestoreAfter);
593  MCPhysReg Reg = P.first;
594  MachineBasicBlock::iterator SpillBefore = P.second;
595  assert(Reg != 0 && "No register left to scavenge!");
596  // Found an available register?
597  if (SpillBefore == MBB.end()) {
598    LLVM_DEBUG(dbgs() << "Scavenged free register: " << printReg(Reg, TRI)
599               << '\n');
600    return Reg;
601  }
602
603  if (!AllowSpill)
604    return 0;
605
606  MachineBasicBlock::iterator ReloadAfter =
607    RestoreAfter ? std::next(MBBI) : MBBI;
608  MachineBasicBlock::iterator ReloadBefore = std::next(ReloadAfter);
609  if (ReloadBefore != MBB.end())
610    LLVM_DEBUG(dbgs() << "Reload before: " << *ReloadBefore << '\n');
611  ScavengedInfo &Scavenged = spill(Reg, RC, SPAdj, SpillBefore, ReloadBefore);
612  Scavenged.Restore = &*std::prev(SpillBefore);
613  LiveUnits.removeReg(Reg);
614  LLVM_DEBUG(dbgs() << "Scavenged register with spill: " << printReg(Reg, TRI)
615             << " until " << *SpillBefore);
616  return Reg;
617}
618
619/// Allocate a register for the virtual register \p VReg. The last use of
620/// \p VReg is around the current position of the register scavenger \p RS.
621/// \p ReserveAfter controls whether the scavenged register needs to be reserved
622/// after the current instruction, otherwise it will only be reserved before the
623/// current instruction.
624static Register scavengeVReg(MachineRegisterInfo &MRI, RegScavenger &RS,
625                             Register VReg, bool ReserveAfter) {
626  const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
627#ifndef NDEBUG
628  // Verify that all definitions and uses are in the same basic block.
629  const MachineBasicBlock *CommonMBB = nullptr;
630  // Real definition for the reg, re-definitions are not considered.
631  const MachineInstr *RealDef = nullptr;
632  for (MachineOperand &MO : MRI.reg_nodbg_operands(VReg)) {
633    MachineBasicBlock *MBB = MO.getParent()->getParent();
634    if (CommonMBB == nullptr)
635      CommonMBB = MBB;
636    assert(MBB == CommonMBB && "All defs+uses must be in the same basic block");
637    if (MO.isDef()) {
638      const MachineInstr &MI = *MO.getParent();
639      if (!MI.readsRegister(VReg, &TRI)) {
640        assert((!RealDef || RealDef == &MI) &&
641               "Can have at most one definition which is not a redefinition");
642        RealDef = &MI;
643      }
644    }
645  }
646  assert(RealDef != nullptr && "Must have at least 1 Def");
647#endif
648
649  // We should only have one definition of the register. However to accommodate
650  // the requirements of two address code we also allow definitions in
651  // subsequent instructions provided they also read the register. That way
652  // we get a single contiguous lifetime.
653  //
654  // Definitions in MRI.def_begin() are unordered, search for the first.
655  MachineRegisterInfo::def_iterator FirstDef =
656    std::find_if(MRI.def_begin(VReg), MRI.def_end(),
657                 [VReg, &TRI](const MachineOperand &MO) {
658      return !MO.getParent()->readsRegister(VReg, &TRI);
659    });
660  assert(FirstDef != MRI.def_end() &&
661         "Must have one definition that does not redefine vreg");
662  MachineInstr &DefMI = *FirstDef->getParent();
663
664  // The register scavenger will report a free register inserting an emergency
665  // spill/reload if necessary.
666  int SPAdj = 0;
667  const TargetRegisterClass &RC = *MRI.getRegClass(VReg);
668  Register SReg = RS.scavengeRegisterBackwards(RC, DefMI.getIterator(),
669                                               ReserveAfter, SPAdj);
670  MRI.replaceRegWith(VReg, SReg);
671  ++NumScavengedRegs;
672  return SReg;
673}
674
675/// Allocate (scavenge) vregs inside a single basic block.
676/// Returns true if the target spill callback created new vregs and a 2nd pass
677/// is necessary.
678static bool scavengeFrameVirtualRegsInBlock(MachineRegisterInfo &MRI,
679                                            RegScavenger &RS,
680                                            MachineBasicBlock &MBB) {
681  const TargetRegisterInfo &TRI = *MRI.getTargetRegisterInfo();
682  RS.enterBasicBlockEnd(MBB);
683
684  unsigned InitialNumVirtRegs = MRI.getNumVirtRegs();
685  bool NextInstructionReadsVReg = false;
686  for (MachineBasicBlock::iterator I = MBB.end(); I != MBB.begin(); ) {
687    --I;
688    // Move RegScavenger to the position between *I and *std::next(I).
689    RS.backward(I);
690
691    // Look for unassigned vregs in the uses of *std::next(I).
692    if (NextInstructionReadsVReg) {
693      MachineBasicBlock::iterator N = std::next(I);
694      const MachineInstr &NMI = *N;
695      for (const MachineOperand &MO : NMI.operands()) {
696        if (!MO.isReg())
697          continue;
698        Register Reg = MO.getReg();
699        // We only care about virtual registers and ignore virtual registers
700        // created by the target callbacks in the process (those will be handled
701        // in a scavenging round).
702        if (!Register::isVirtualRegister(Reg) ||
703            Register::virtReg2Index(Reg) >= InitialNumVirtRegs)
704          continue;
705        if (!MO.readsReg())
706          continue;
707
708        Register SReg = scavengeVReg(MRI, RS, Reg, true);
709        N->addRegisterKilled(SReg, &TRI, false);
710        RS.setRegUsed(SReg);
711      }
712    }
713
714    // Look for unassigned vregs in the defs of *I.
715    NextInstructionReadsVReg = false;
716    const MachineInstr &MI = *I;
717    for (const MachineOperand &MO : MI.operands()) {
718      if (!MO.isReg())
719        continue;
720      Register Reg = MO.getReg();
721      // Only vregs, no newly created vregs (see above).
722      if (!Register::isVirtualRegister(Reg) ||
723          Register::virtReg2Index(Reg) >= InitialNumVirtRegs)
724        continue;
725      // We have to look at all operands anyway so we can precalculate here
726      // whether there is a reading operand. This allows use to skip the use
727      // step in the next iteration if there was none.
728      assert(!MO.isInternalRead() && "Cannot assign inside bundles");
729      assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
730      if (MO.readsReg()) {
731        NextInstructionReadsVReg = true;
732      }
733      if (MO.isDef()) {
734        Register SReg = scavengeVReg(MRI, RS, Reg, false);
735        I->addRegisterDead(SReg, &TRI, false);
736      }
737    }
738  }
739#ifndef NDEBUG
740  for (const MachineOperand &MO : MBB.front().operands()) {
741    if (!MO.isReg() || !Register::isVirtualRegister(MO.getReg()))
742      continue;
743    assert(!MO.isInternalRead() && "Cannot assign inside bundles");
744    assert((!MO.isUndef() || MO.isDef()) && "Cannot handle undef uses");
745    assert(!MO.readsReg() && "Vreg use in first instruction not allowed");
746  }
747#endif
748
749  return MRI.getNumVirtRegs() != InitialNumVirtRegs;
750}
751
752void llvm::scavengeFrameVirtualRegs(MachineFunction &MF, RegScavenger &RS) {
753  // FIXME: Iterating over the instruction stream is unnecessary. We can simply
754  // iterate over the vreg use list, which at this point only contains machine
755  // operands for which eliminateFrameIndex need a new scratch reg.
756  MachineRegisterInfo &MRI = MF.getRegInfo();
757  // Shortcut.
758  if (MRI.getNumVirtRegs() == 0) {
759    MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
760    return;
761  }
762
763  // Run through the instructions and find any virtual registers.
764  for (MachineBasicBlock &MBB : MF) {
765    if (MBB.empty())
766      continue;
767
768    bool Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
769    if (Again) {
770      LLVM_DEBUG(dbgs() << "Warning: Required two scavenging passes for block "
771                        << MBB.getName() << '\n');
772      Again = scavengeFrameVirtualRegsInBlock(MRI, RS, MBB);
773      // The target required a 2nd run (because it created new vregs while
774      // spilling). Refuse to do another pass to keep compiletime in check.
775      if (Again)
776        report_fatal_error("Incomplete scavenging after 2nd pass");
777    }
778  }
779
780  MRI.clearVirtRegs();
781  MF.getProperties().set(MachineFunctionProperties::Property::NoVRegs);
782}
783
784namespace {
785
786/// This class runs register scavenging independ of the PrologEpilogInserter.
787/// This is used in for testing.
788class ScavengerTest : public MachineFunctionPass {
789public:
790  static char ID;
791
792  ScavengerTest() : MachineFunctionPass(ID) {}
793
794  bool runOnMachineFunction(MachineFunction &MF) override {
795    const TargetSubtargetInfo &STI = MF.getSubtarget();
796    const TargetFrameLowering &TFL = *STI.getFrameLowering();
797
798    RegScavenger RS;
799    // Let's hope that calling those outside of PrologEpilogueInserter works
800    // well enough to initialize the scavenger with some emergency spillslots
801    // for the target.
802    BitVector SavedRegs;
803    TFL.determineCalleeSaves(MF, SavedRegs, &RS);
804    TFL.processFunctionBeforeFrameFinalized(MF, &RS);
805
806    // Let's scavenge the current function
807    scavengeFrameVirtualRegs(MF, RS);
808    return true;
809  }
810};
811
812} // end anonymous namespace
813
814char ScavengerTest::ID;
815
816INITIALIZE_PASS(ScavengerTest, "scavenger-test",
817                "Scavenge virtual registers inside basic blocks", false, false)
818