1//===- InterferenceCache.cpp - Caching per-block interference -------------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// InterferenceCache remembers per-block interference in LiveIntervalUnions.
10//
11//===----------------------------------------------------------------------===//
12
13#include "InterferenceCache.h"
14#include "llvm/ADT/ArrayRef.h"
15#include "llvm/CodeGen/LiveIntervals.h"
16#include "llvm/CodeGen/MachineBasicBlock.h"
17#include "llvm/CodeGen/MachineFunction.h"
18#include "llvm/CodeGen/MachineOperand.h"
19#include "llvm/CodeGen/TargetRegisterInfo.h"
20#include "llvm/MC/MCRegisterInfo.h"
21#include "llvm/Support/ErrorHandling.h"
22#include <cassert>
23#include <cstdint>
24#include <tuple>
25
26using namespace llvm;
27
28#define DEBUG_TYPE "regalloc"
29
30// Static member used for null interference cursors.
31const InterferenceCache::BlockInterference
32    InterferenceCache::Cursor::NoInterference;
33
34// Initializes PhysRegEntries (instead of a SmallVector, PhysRegEntries is a
35// buffer of size NumPhysRegs to speed up alloc/clear for targets with large
36// reg files). Calloced memory is used for good form, and quites tools like
37// Valgrind too, but zero initialized memory is not required by the algorithm:
38// this is because PhysRegEntries works like a SparseSet and its entries are
39// only valid when there is a corresponding CacheEntries assignment. There is
40// also support for when pass managers are reused for targets with different
41// numbers of PhysRegs: in this case PhysRegEntries is freed and reinitialized.
42void InterferenceCache::reinitPhysRegEntries() {
43  if (PhysRegEntriesCount == TRI->getNumRegs()) return;
44  free(PhysRegEntries);
45  PhysRegEntriesCount = TRI->getNumRegs();
46  PhysRegEntries = static_cast<unsigned char*>(
47      safe_calloc(PhysRegEntriesCount, sizeof(unsigned char)));
48}
49
50void InterferenceCache::init(MachineFunction *mf,
51                             LiveIntervalUnion *liuarray,
52                             SlotIndexes *indexes,
53                             LiveIntervals *lis,
54                             const TargetRegisterInfo *tri) {
55  MF = mf;
56  LIUArray = liuarray;
57  TRI = tri;
58  reinitPhysRegEntries();
59  for (Entry &E : Entries)
60    E.clear(mf, indexes, lis);
61}
62
63InterferenceCache::Entry *InterferenceCache::get(MCRegister PhysReg) {
64  unsigned char E = PhysRegEntries[PhysReg.id()];
65  if (E < CacheEntries && Entries[E].getPhysReg() == PhysReg) {
66    if (!Entries[E].valid(LIUArray, TRI))
67      Entries[E].revalidate(LIUArray, TRI);
68    return &Entries[E];
69  }
70  // No valid entry exists, pick the next round-robin entry.
71  E = RoundRobin;
72  if (++RoundRobin == CacheEntries)
73    RoundRobin = 0;
74  for (unsigned i = 0; i != CacheEntries; ++i) {
75    // Skip entries that are in use.
76    if (Entries[E].hasRefs()) {
77      if (++E == CacheEntries)
78        E = 0;
79      continue;
80    }
81    Entries[E].reset(PhysReg, LIUArray, TRI, MF);
82    PhysRegEntries[PhysReg] = E;
83    return &Entries[E];
84  }
85  llvm_unreachable("Ran out of interference cache entries.");
86}
87
88/// revalidate - LIU contents have changed, update tags.
89void InterferenceCache::Entry::revalidate(LiveIntervalUnion *LIUArray,
90                                          const TargetRegisterInfo *TRI) {
91  // Invalidate all block entries.
92  ++Tag;
93  // Invalidate all iterators.
94  PrevPos = SlotIndex();
95  unsigned i = 0;
96  for (MCRegUnit Unit : TRI->regunits(PhysReg))
97    RegUnits[i++].VirtTag = LIUArray[Unit].getTag();
98}
99
100void InterferenceCache::Entry::reset(MCRegister physReg,
101                                     LiveIntervalUnion *LIUArray,
102                                     const TargetRegisterInfo *TRI,
103                                     const MachineFunction *MF) {
104  assert(!hasRefs() && "Cannot reset cache entry with references");
105  // LIU's changed, invalidate cache.
106  ++Tag;
107  PhysReg = physReg;
108  Blocks.resize(MF->getNumBlockIDs());
109
110  // Reset iterators.
111  PrevPos = SlotIndex();
112  RegUnits.clear();
113  for (MCRegUnit Unit : TRI->regunits(PhysReg)) {
114    RegUnits.push_back(LIUArray[Unit]);
115    RegUnits.back().Fixed = &LIS->getRegUnit(Unit);
116  }
117}
118
119bool InterferenceCache::Entry::valid(LiveIntervalUnion *LIUArray,
120                                     const TargetRegisterInfo *TRI) {
121  unsigned i = 0, e = RegUnits.size();
122  for (MCRegUnit Unit : TRI->regunits(PhysReg)) {
123    if (i == e)
124      return false;
125    if (LIUArray[Unit].changedSince(RegUnits[i].VirtTag))
126      return false;
127    ++i;
128  }
129  return i == e;
130}
131
132void InterferenceCache::Entry::update(unsigned MBBNum) {
133  SlotIndex Start, Stop;
134  std::tie(Start, Stop) = Indexes->getMBBRange(MBBNum);
135
136  // Use advanceTo only when possible.
137  if (PrevPos != Start) {
138    if (!PrevPos.isValid() || Start < PrevPos) {
139      for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) {
140        RegUnitInfo &RUI = RegUnits[i];
141        RUI.VirtI.find(Start);
142        RUI.FixedI = RUI.Fixed->find(Start);
143      }
144    } else {
145      for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) {
146        RegUnitInfo &RUI = RegUnits[i];
147        RUI.VirtI.advanceTo(Start);
148        if (RUI.FixedI != RUI.Fixed->end())
149          RUI.FixedI = RUI.Fixed->advanceTo(RUI.FixedI, Start);
150      }
151    }
152    PrevPos = Start;
153  }
154
155  MachineFunction::const_iterator MFI =
156      MF->getBlockNumbered(MBBNum)->getIterator();
157  BlockInterference *BI = &Blocks[MBBNum];
158  ArrayRef<SlotIndex> RegMaskSlots;
159  ArrayRef<const uint32_t*> RegMaskBits;
160  while (true) {
161    BI->Tag = Tag;
162    BI->First = BI->Last = SlotIndex();
163
164    // Check for first interference from virtregs.
165    for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) {
166      LiveIntervalUnion::SegmentIter &I = RegUnits[i].VirtI;
167      if (!I.valid())
168        continue;
169      SlotIndex StartI = I.start();
170      if (StartI >= Stop)
171        continue;
172      if (!BI->First.isValid() || StartI < BI->First)
173        BI->First = StartI;
174    }
175
176    // Same thing for fixed interference.
177    for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) {
178      LiveInterval::const_iterator I = RegUnits[i].FixedI;
179      LiveInterval::const_iterator E = RegUnits[i].Fixed->end();
180      if (I == E)
181        continue;
182      SlotIndex StartI = I->start;
183      if (StartI >= Stop)
184        continue;
185      if (!BI->First.isValid() || StartI < BI->First)
186        BI->First = StartI;
187    }
188
189    // Also check for register mask interference.
190    RegMaskSlots = LIS->getRegMaskSlotsInBlock(MBBNum);
191    RegMaskBits = LIS->getRegMaskBitsInBlock(MBBNum);
192    SlotIndex Limit = BI->First.isValid() ? BI->First : Stop;
193    for (unsigned i = 0, e = RegMaskSlots.size();
194         i != e && RegMaskSlots[i] < Limit; ++i)
195      if (MachineOperand::clobbersPhysReg(RegMaskBits[i], PhysReg)) {
196        // Register mask i clobbers PhysReg before the LIU interference.
197        BI->First = RegMaskSlots[i];
198        break;
199      }
200
201    PrevPos = Stop;
202    if (BI->First.isValid())
203      break;
204
205    // No interference in this block? Go ahead and precompute the next block.
206    if (++MFI == MF->end())
207      return;
208    MBBNum = MFI->getNumber();
209    BI = &Blocks[MBBNum];
210    if (BI->Tag == Tag)
211      return;
212    std::tie(Start, Stop) = Indexes->getMBBRange(MBBNum);
213  }
214
215  // Check for last interference in block.
216  for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) {
217    LiveIntervalUnion::SegmentIter &I = RegUnits[i].VirtI;
218    if (!I.valid() || I.start() >= Stop)
219      continue;
220    I.advanceTo(Stop);
221    bool Backup = !I.valid() || I.start() >= Stop;
222    if (Backup)
223      --I;
224    SlotIndex StopI = I.stop();
225    if (!BI->Last.isValid() || StopI > BI->Last)
226      BI->Last = StopI;
227    if (Backup)
228      ++I;
229  }
230
231  // Fixed interference.
232  for (unsigned i = 0, e = RegUnits.size(); i != e; ++i) {
233    LiveInterval::iterator &I = RegUnits[i].FixedI;
234    LiveRange *LR = RegUnits[i].Fixed;
235    if (I == LR->end() || I->start >= Stop)
236      continue;
237    I = LR->advanceTo(I, Stop);
238    bool Backup = I == LR->end() || I->start >= Stop;
239    if (Backup)
240      --I;
241    SlotIndex StopI = I->end;
242    if (!BI->Last.isValid() || StopI > BI->Last)
243      BI->Last = StopI;
244    if (Backup)
245      ++I;
246  }
247
248  // Also check for register mask interference.
249  SlotIndex Limit = BI->Last.isValid() ? BI->Last : Start;
250  for (unsigned i = RegMaskSlots.size();
251       i && RegMaskSlots[i-1].getDeadSlot() > Limit; --i)
252    if (MachineOperand::clobbersPhysReg(RegMaskBits[i-1], PhysReg)) {
253      // Register mask i-1 clobbers PhysReg after the LIU interference.
254      // Model the regmask clobber as a dead def.
255      BI->Last = RegMaskSlots[i-1].getDeadSlot();
256      break;
257    }
258}
259