MachineIRBuilder.cpp revision 360784
1//===-- llvm/CodeGen/GlobalISel/MachineIRBuilder.cpp - MIBuilder--*- C++ -*-==//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8/// \file
9/// This file implements the MachineIRBuidler class.
10//===----------------------------------------------------------------------===//
11#include "llvm/CodeGen/GlobalISel/MachineIRBuilder.h"
12#include "llvm/CodeGen/GlobalISel/GISelChangeObserver.h"
13
14#include "llvm/CodeGen/MachineFunction.h"
15#include "llvm/CodeGen/MachineInstr.h"
16#include "llvm/CodeGen/MachineInstrBuilder.h"
17#include "llvm/CodeGen/MachineRegisterInfo.h"
18#include "llvm/CodeGen/TargetInstrInfo.h"
19#include "llvm/CodeGen/TargetLowering.h"
20#include "llvm/CodeGen/TargetOpcodes.h"
21#include "llvm/CodeGen/TargetSubtargetInfo.h"
22#include "llvm/IR/DebugInfo.h"
23
24using namespace llvm;
25
26void MachineIRBuilder::setMF(MachineFunction &MF) {
27  State.MF = &MF;
28  State.MBB = nullptr;
29  State.MRI = &MF.getRegInfo();
30  State.TII = MF.getSubtarget().getInstrInfo();
31  State.DL = DebugLoc();
32  State.II = MachineBasicBlock::iterator();
33  State.Observer = nullptr;
34}
35
36void MachineIRBuilder::setMBB(MachineBasicBlock &MBB) {
37  State.MBB = &MBB;
38  State.II = MBB.end();
39  assert(&getMF() == MBB.getParent() &&
40         "Basic block is in a different function");
41}
42
43void MachineIRBuilder::setInstr(MachineInstr &MI) {
44  assert(MI.getParent() && "Instruction is not part of a basic block");
45  setMBB(*MI.getParent());
46  State.II = MI.getIterator();
47}
48
49void MachineIRBuilder::setCSEInfo(GISelCSEInfo *Info) { State.CSEInfo = Info; }
50
51void MachineIRBuilder::setInsertPt(MachineBasicBlock &MBB,
52                                   MachineBasicBlock::iterator II) {
53  assert(MBB.getParent() == &getMF() &&
54         "Basic block is in a different function");
55  State.MBB = &MBB;
56  State.II = II;
57}
58
59void MachineIRBuilder::recordInsertion(MachineInstr *InsertedInstr) const {
60  if (State.Observer)
61    State.Observer->createdInstr(*InsertedInstr);
62}
63
64void MachineIRBuilder::setChangeObserver(GISelChangeObserver &Observer) {
65  State.Observer = &Observer;
66}
67
68void MachineIRBuilder::stopObservingChanges() { State.Observer = nullptr; }
69
70//------------------------------------------------------------------------------
71// Build instruction variants.
72//------------------------------------------------------------------------------
73
74MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opcode) {
75  return insertInstr(buildInstrNoInsert(Opcode));
76}
77
78MachineInstrBuilder MachineIRBuilder::buildInstrNoInsert(unsigned Opcode) {
79  MachineInstrBuilder MIB = BuildMI(getMF(), getDL(), getTII().get(Opcode));
80  return MIB;
81}
82
83MachineInstrBuilder MachineIRBuilder::insertInstr(MachineInstrBuilder MIB) {
84  getMBB().insert(getInsertPt(), MIB);
85  recordInsertion(MIB);
86  return MIB;
87}
88
89MachineInstrBuilder
90MachineIRBuilder::buildDirectDbgValue(Register Reg, const MDNode *Variable,
91                                      const MDNode *Expr) {
92  assert(isa<DILocalVariable>(Variable) && "not a variable");
93  assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
94  assert(
95      cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
96      "Expected inlined-at fields to agree");
97  return insertInstr(BuildMI(getMF(), getDL(),
98                             getTII().get(TargetOpcode::DBG_VALUE),
99                             /*IsIndirect*/ false, Reg, Variable, Expr));
100}
101
102MachineInstrBuilder
103MachineIRBuilder::buildIndirectDbgValue(Register Reg, const MDNode *Variable,
104                                        const MDNode *Expr) {
105  assert(isa<DILocalVariable>(Variable) && "not a variable");
106  assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
107  assert(
108      cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
109      "Expected inlined-at fields to agree");
110  return insertInstr(BuildMI(getMF(), getDL(),
111                             getTII().get(TargetOpcode::DBG_VALUE),
112                             /*IsIndirect*/ true, Reg, Variable, Expr));
113}
114
115MachineInstrBuilder MachineIRBuilder::buildFIDbgValue(int FI,
116                                                      const MDNode *Variable,
117                                                      const MDNode *Expr) {
118  assert(isa<DILocalVariable>(Variable) && "not a variable");
119  assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
120  assert(
121      cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
122      "Expected inlined-at fields to agree");
123  return buildInstr(TargetOpcode::DBG_VALUE)
124      .addFrameIndex(FI)
125      .addImm(0)
126      .addMetadata(Variable)
127      .addMetadata(Expr);
128}
129
130MachineInstrBuilder MachineIRBuilder::buildConstDbgValue(const Constant &C,
131                                                         const MDNode *Variable,
132                                                         const MDNode *Expr) {
133  assert(isa<DILocalVariable>(Variable) && "not a variable");
134  assert(cast<DIExpression>(Expr)->isValid() && "not an expression");
135  assert(
136      cast<DILocalVariable>(Variable)->isValidLocationForIntrinsic(getDL()) &&
137      "Expected inlined-at fields to agree");
138  auto MIB = buildInstr(TargetOpcode::DBG_VALUE);
139  if (auto *CI = dyn_cast<ConstantInt>(&C)) {
140    if (CI->getBitWidth() > 64)
141      MIB.addCImm(CI);
142    else
143      MIB.addImm(CI->getZExtValue());
144  } else if (auto *CFP = dyn_cast<ConstantFP>(&C)) {
145    MIB.addFPImm(CFP);
146  } else {
147    // Insert %noreg if we didn't find a usable constant and had to drop it.
148    MIB.addReg(0U);
149  }
150
151  return MIB.addImm(0).addMetadata(Variable).addMetadata(Expr);
152}
153
154MachineInstrBuilder MachineIRBuilder::buildDbgLabel(const MDNode *Label) {
155  assert(isa<DILabel>(Label) && "not a label");
156  assert(cast<DILabel>(Label)->isValidLocationForIntrinsic(State.DL) &&
157         "Expected inlined-at fields to agree");
158  auto MIB = buildInstr(TargetOpcode::DBG_LABEL);
159
160  return MIB.addMetadata(Label);
161}
162
163MachineInstrBuilder MachineIRBuilder::buildDynStackAlloc(const DstOp &Res,
164                                                         const SrcOp &Size,
165                                                         unsigned Align) {
166  assert(Res.getLLTTy(*getMRI()).isPointer() && "expected ptr dst type");
167  auto MIB = buildInstr(TargetOpcode::G_DYN_STACKALLOC);
168  Res.addDefToMIB(*getMRI(), MIB);
169  Size.addSrcToMIB(MIB);
170  MIB.addImm(Align);
171  return MIB;
172}
173
174MachineInstrBuilder MachineIRBuilder::buildFrameIndex(const DstOp &Res,
175                                                      int Idx) {
176  assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
177  auto MIB = buildInstr(TargetOpcode::G_FRAME_INDEX);
178  Res.addDefToMIB(*getMRI(), MIB);
179  MIB.addFrameIndex(Idx);
180  return MIB;
181}
182
183MachineInstrBuilder MachineIRBuilder::buildGlobalValue(const DstOp &Res,
184                                                       const GlobalValue *GV) {
185  assert(Res.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
186  assert(Res.getLLTTy(*getMRI()).getAddressSpace() ==
187             GV->getType()->getAddressSpace() &&
188         "address space mismatch");
189
190  auto MIB = buildInstr(TargetOpcode::G_GLOBAL_VALUE);
191  Res.addDefToMIB(*getMRI(), MIB);
192  MIB.addGlobalAddress(GV);
193  return MIB;
194}
195
196MachineInstrBuilder MachineIRBuilder::buildJumpTable(const LLT PtrTy,
197                                                     unsigned JTI) {
198  return buildInstr(TargetOpcode::G_JUMP_TABLE, {PtrTy}, {})
199      .addJumpTableIndex(JTI);
200}
201
202void MachineIRBuilder::validateBinaryOp(const LLT &Res, const LLT &Op0,
203                                        const LLT &Op1) {
204  assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
205  assert((Res == Op0 && Res == Op1) && "type mismatch");
206}
207
208void MachineIRBuilder::validateShiftOp(const LLT &Res, const LLT &Op0,
209                                       const LLT &Op1) {
210  assert((Res.isScalar() || Res.isVector()) && "invalid operand type");
211  assert((Res == Op0) && "type mismatch");
212}
213
214MachineInstrBuilder MachineIRBuilder::buildPtrAdd(const DstOp &Res,
215                                                  const SrcOp &Op0,
216                                                  const SrcOp &Op1) {
217  assert(Res.getLLTTy(*getMRI()).isPointer() &&
218         Res.getLLTTy(*getMRI()) == Op0.getLLTTy(*getMRI()) && "type mismatch");
219  assert(Op1.getLLTTy(*getMRI()).isScalar() && "invalid offset type");
220
221  return buildInstr(TargetOpcode::G_PTR_ADD, {Res}, {Op0, Op1});
222}
223
224Optional<MachineInstrBuilder>
225MachineIRBuilder::materializePtrAdd(Register &Res, Register Op0,
226                                    const LLT &ValueTy, uint64_t Value) {
227  assert(Res == 0 && "Res is a result argument");
228  assert(ValueTy.isScalar()  && "invalid offset type");
229
230  if (Value == 0) {
231    Res = Op0;
232    return None;
233  }
234
235  Res = getMRI()->createGenericVirtualRegister(getMRI()->getType(Op0));
236  auto Cst = buildConstant(ValueTy, Value);
237  return buildPtrAdd(Res, Op0, Cst.getReg(0));
238}
239
240MachineInstrBuilder MachineIRBuilder::buildPtrMask(const DstOp &Res,
241                                                   const SrcOp &Op0,
242                                                   uint32_t NumBits) {
243  assert(Res.getLLTTy(*getMRI()).isPointer() &&
244         Res.getLLTTy(*getMRI()) == Op0.getLLTTy(*getMRI()) && "type mismatch");
245
246  auto MIB = buildInstr(TargetOpcode::G_PTR_MASK);
247  Res.addDefToMIB(*getMRI(), MIB);
248  Op0.addSrcToMIB(MIB);
249  MIB.addImm(NumBits);
250  return MIB;
251}
252
253MachineInstrBuilder MachineIRBuilder::buildBr(MachineBasicBlock &Dest) {
254  return buildInstr(TargetOpcode::G_BR).addMBB(&Dest);
255}
256
257MachineInstrBuilder MachineIRBuilder::buildBrIndirect(Register Tgt) {
258  assert(getMRI()->getType(Tgt).isPointer() && "invalid branch destination");
259  return buildInstr(TargetOpcode::G_BRINDIRECT).addUse(Tgt);
260}
261
262MachineInstrBuilder MachineIRBuilder::buildBrJT(Register TablePtr,
263                                                unsigned JTI,
264                                                Register IndexReg) {
265  assert(getMRI()->getType(TablePtr).isPointer() &&
266         "Table reg must be a pointer");
267  return buildInstr(TargetOpcode::G_BRJT)
268      .addUse(TablePtr)
269      .addJumpTableIndex(JTI)
270      .addUse(IndexReg);
271}
272
273MachineInstrBuilder MachineIRBuilder::buildCopy(const DstOp &Res,
274                                                const SrcOp &Op) {
275  return buildInstr(TargetOpcode::COPY, Res, Op);
276}
277
278MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
279                                                    const ConstantInt &Val) {
280  LLT Ty = Res.getLLTTy(*getMRI());
281  LLT EltTy = Ty.getScalarType();
282  assert(EltTy.getScalarSizeInBits() == Val.getBitWidth() &&
283         "creating constant with the wrong size");
284
285  if (Ty.isVector()) {
286    auto Const = buildInstr(TargetOpcode::G_CONSTANT)
287    .addDef(getMRI()->createGenericVirtualRegister(EltTy))
288    .addCImm(&Val);
289    return buildSplatVector(Res, Const);
290  }
291
292  auto Const = buildInstr(TargetOpcode::G_CONSTANT);
293  Res.addDefToMIB(*getMRI(), Const);
294  Const.addCImm(&Val);
295  return Const;
296}
297
298MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
299                                                    int64_t Val) {
300  auto IntN = IntegerType::get(getMF().getFunction().getContext(),
301                               Res.getLLTTy(*getMRI()).getScalarSizeInBits());
302  ConstantInt *CI = ConstantInt::get(IntN, Val, true);
303  return buildConstant(Res, *CI);
304}
305
306MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
307                                                     const ConstantFP &Val) {
308  LLT Ty = Res.getLLTTy(*getMRI());
309  LLT EltTy = Ty.getScalarType();
310
311  assert(APFloat::getSizeInBits(Val.getValueAPF().getSemantics())
312         == EltTy.getSizeInBits() &&
313         "creating fconstant with the wrong size");
314
315  assert(!Ty.isPointer() && "invalid operand type");
316
317  if (Ty.isVector()) {
318    auto Const = buildInstr(TargetOpcode::G_FCONSTANT)
319    .addDef(getMRI()->createGenericVirtualRegister(EltTy))
320    .addFPImm(&Val);
321
322    return buildSplatVector(Res, Const);
323  }
324
325  auto Const = buildInstr(TargetOpcode::G_FCONSTANT);
326  Res.addDefToMIB(*getMRI(), Const);
327  Const.addFPImm(&Val);
328  return Const;
329}
330
331MachineInstrBuilder MachineIRBuilder::buildConstant(const DstOp &Res,
332                                                    const APInt &Val) {
333  ConstantInt *CI = ConstantInt::get(getMF().getFunction().getContext(), Val);
334  return buildConstant(Res, *CI);
335}
336
337MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
338                                                     double Val) {
339  LLT DstTy = Res.getLLTTy(*getMRI());
340  auto &Ctx = getMF().getFunction().getContext();
341  auto *CFP =
342      ConstantFP::get(Ctx, getAPFloatFromSize(Val, DstTy.getScalarSizeInBits()));
343  return buildFConstant(Res, *CFP);
344}
345
346MachineInstrBuilder MachineIRBuilder::buildFConstant(const DstOp &Res,
347                                                     const APFloat &Val) {
348  auto &Ctx = getMF().getFunction().getContext();
349  auto *CFP = ConstantFP::get(Ctx, Val);
350  return buildFConstant(Res, *CFP);
351}
352
353MachineInstrBuilder MachineIRBuilder::buildBrCond(Register Tst,
354                                                  MachineBasicBlock &Dest) {
355  assert(getMRI()->getType(Tst).isScalar() && "invalid operand type");
356
357  return buildInstr(TargetOpcode::G_BRCOND).addUse(Tst).addMBB(&Dest);
358}
359
360MachineInstrBuilder MachineIRBuilder::buildLoad(const DstOp &Res,
361                                                const SrcOp &Addr,
362                                                MachineMemOperand &MMO) {
363  return buildLoadInstr(TargetOpcode::G_LOAD, Res, Addr, MMO);
364}
365
366MachineInstrBuilder MachineIRBuilder::buildLoadInstr(unsigned Opcode,
367                                                     const DstOp &Res,
368                                                     const SrcOp &Addr,
369                                                     MachineMemOperand &MMO) {
370  assert(Res.getLLTTy(*getMRI()).isValid() && "invalid operand type");
371  assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
372
373  auto MIB = buildInstr(Opcode);
374  Res.addDefToMIB(*getMRI(), MIB);
375  Addr.addSrcToMIB(MIB);
376  MIB.addMemOperand(&MMO);
377  return MIB;
378}
379
380MachineInstrBuilder MachineIRBuilder::buildStore(const SrcOp &Val,
381                                                 const SrcOp &Addr,
382                                                 MachineMemOperand &MMO) {
383  assert(Val.getLLTTy(*getMRI()).isValid() && "invalid operand type");
384  assert(Addr.getLLTTy(*getMRI()).isPointer() && "invalid operand type");
385
386  auto MIB = buildInstr(TargetOpcode::G_STORE);
387  Val.addSrcToMIB(MIB);
388  Addr.addSrcToMIB(MIB);
389  MIB.addMemOperand(&MMO);
390  return MIB;
391}
392
393MachineInstrBuilder MachineIRBuilder::buildUAddo(const DstOp &Res,
394                                                 const DstOp &CarryOut,
395                                                 const SrcOp &Op0,
396                                                 const SrcOp &Op1) {
397  return buildInstr(TargetOpcode::G_UADDO, {Res, CarryOut}, {Op0, Op1});
398}
399
400MachineInstrBuilder MachineIRBuilder::buildUAdde(const DstOp &Res,
401                                                 const DstOp &CarryOut,
402                                                 const SrcOp &Op0,
403                                                 const SrcOp &Op1,
404                                                 const SrcOp &CarryIn) {
405  return buildInstr(TargetOpcode::G_UADDE, {Res, CarryOut},
406                    {Op0, Op1, CarryIn});
407}
408
409MachineInstrBuilder MachineIRBuilder::buildAnyExt(const DstOp &Res,
410                                                  const SrcOp &Op) {
411  return buildInstr(TargetOpcode::G_ANYEXT, Res, Op);
412}
413
414MachineInstrBuilder MachineIRBuilder::buildSExt(const DstOp &Res,
415                                                const SrcOp &Op) {
416  return buildInstr(TargetOpcode::G_SEXT, Res, Op);
417}
418
419MachineInstrBuilder MachineIRBuilder::buildZExt(const DstOp &Res,
420                                                const SrcOp &Op) {
421  return buildInstr(TargetOpcode::G_ZEXT, Res, Op);
422}
423
424unsigned MachineIRBuilder::getBoolExtOp(bool IsVec, bool IsFP) const {
425  const auto *TLI = getMF().getSubtarget().getTargetLowering();
426  switch (TLI->getBooleanContents(IsVec, IsFP)) {
427  case TargetLoweringBase::ZeroOrNegativeOneBooleanContent:
428    return TargetOpcode::G_SEXT;
429  case TargetLoweringBase::ZeroOrOneBooleanContent:
430    return TargetOpcode::G_ZEXT;
431  default:
432    return TargetOpcode::G_ANYEXT;
433  }
434}
435
436MachineInstrBuilder MachineIRBuilder::buildBoolExt(const DstOp &Res,
437                                                   const SrcOp &Op,
438                                                   bool IsFP) {
439  unsigned ExtOp = getBoolExtOp(getMRI()->getType(Op.getReg()).isVector(), IsFP);
440  return buildInstr(ExtOp, Res, Op);
441}
442
443MachineInstrBuilder MachineIRBuilder::buildExtOrTrunc(unsigned ExtOpc,
444                                                      const DstOp &Res,
445                                                      const SrcOp &Op) {
446  assert((TargetOpcode::G_ANYEXT == ExtOpc || TargetOpcode::G_ZEXT == ExtOpc ||
447          TargetOpcode::G_SEXT == ExtOpc) &&
448         "Expecting Extending Opc");
449  assert(Res.getLLTTy(*getMRI()).isScalar() ||
450         Res.getLLTTy(*getMRI()).isVector());
451  assert(Res.getLLTTy(*getMRI()).isScalar() ==
452         Op.getLLTTy(*getMRI()).isScalar());
453
454  unsigned Opcode = TargetOpcode::COPY;
455  if (Res.getLLTTy(*getMRI()).getSizeInBits() >
456      Op.getLLTTy(*getMRI()).getSizeInBits())
457    Opcode = ExtOpc;
458  else if (Res.getLLTTy(*getMRI()).getSizeInBits() <
459           Op.getLLTTy(*getMRI()).getSizeInBits())
460    Opcode = TargetOpcode::G_TRUNC;
461  else
462    assert(Res.getLLTTy(*getMRI()) == Op.getLLTTy(*getMRI()));
463
464  return buildInstr(Opcode, Res, Op);
465}
466
467MachineInstrBuilder MachineIRBuilder::buildSExtOrTrunc(const DstOp &Res,
468                                                       const SrcOp &Op) {
469  return buildExtOrTrunc(TargetOpcode::G_SEXT, Res, Op);
470}
471
472MachineInstrBuilder MachineIRBuilder::buildZExtOrTrunc(const DstOp &Res,
473                                                       const SrcOp &Op) {
474  return buildExtOrTrunc(TargetOpcode::G_ZEXT, Res, Op);
475}
476
477MachineInstrBuilder MachineIRBuilder::buildAnyExtOrTrunc(const DstOp &Res,
478                                                         const SrcOp &Op) {
479  return buildExtOrTrunc(TargetOpcode::G_ANYEXT, Res, Op);
480}
481
482MachineInstrBuilder MachineIRBuilder::buildCast(const DstOp &Dst,
483                                                const SrcOp &Src) {
484  LLT SrcTy = Src.getLLTTy(*getMRI());
485  LLT DstTy = Dst.getLLTTy(*getMRI());
486  if (SrcTy == DstTy)
487    return buildCopy(Dst, Src);
488
489  unsigned Opcode;
490  if (SrcTy.isPointer() && DstTy.isScalar())
491    Opcode = TargetOpcode::G_PTRTOINT;
492  else if (DstTy.isPointer() && SrcTy.isScalar())
493    Opcode = TargetOpcode::G_INTTOPTR;
494  else {
495    assert(!SrcTy.isPointer() && !DstTy.isPointer() && "n G_ADDRCAST yet");
496    Opcode = TargetOpcode::G_BITCAST;
497  }
498
499  return buildInstr(Opcode, Dst, Src);
500}
501
502MachineInstrBuilder MachineIRBuilder::buildExtract(const DstOp &Dst,
503                                                   const SrcOp &Src,
504                                                   uint64_t Index) {
505  LLT SrcTy = Src.getLLTTy(*getMRI());
506  LLT DstTy = Dst.getLLTTy(*getMRI());
507
508#ifndef NDEBUG
509  assert(SrcTy.isValid() && "invalid operand type");
510  assert(DstTy.isValid() && "invalid operand type");
511  assert(Index + DstTy.getSizeInBits() <= SrcTy.getSizeInBits() &&
512         "extracting off end of register");
513#endif
514
515  if (DstTy.getSizeInBits() == SrcTy.getSizeInBits()) {
516    assert(Index == 0 && "insertion past the end of a register");
517    return buildCast(Dst, Src);
518  }
519
520  auto Extract = buildInstr(TargetOpcode::G_EXTRACT);
521  Dst.addDefToMIB(*getMRI(), Extract);
522  Src.addSrcToMIB(Extract);
523  Extract.addImm(Index);
524  return Extract;
525}
526
527void MachineIRBuilder::buildSequence(Register Res, ArrayRef<Register> Ops,
528                                     ArrayRef<uint64_t> Indices) {
529#ifndef NDEBUG
530  assert(Ops.size() == Indices.size() && "incompatible args");
531  assert(!Ops.empty() && "invalid trivial sequence");
532  assert(std::is_sorted(Indices.begin(), Indices.end()) &&
533         "sequence offsets must be in ascending order");
534
535  assert(getMRI()->getType(Res).isValid() && "invalid operand type");
536  for (auto Op : Ops)
537    assert(getMRI()->getType(Op).isValid() && "invalid operand type");
538#endif
539
540  LLT ResTy = getMRI()->getType(Res);
541  LLT OpTy = getMRI()->getType(Ops[0]);
542  unsigned OpSize = OpTy.getSizeInBits();
543  bool MaybeMerge = true;
544  for (unsigned i = 0; i < Ops.size(); ++i) {
545    if (getMRI()->getType(Ops[i]) != OpTy || Indices[i] != i * OpSize) {
546      MaybeMerge = false;
547      break;
548    }
549  }
550
551  if (MaybeMerge && Ops.size() * OpSize == ResTy.getSizeInBits()) {
552    buildMerge(Res, Ops);
553    return;
554  }
555
556  Register ResIn = getMRI()->createGenericVirtualRegister(ResTy);
557  buildUndef(ResIn);
558
559  for (unsigned i = 0; i < Ops.size(); ++i) {
560    Register ResOut = i + 1 == Ops.size()
561                          ? Res
562                          : getMRI()->createGenericVirtualRegister(ResTy);
563    buildInsert(ResOut, ResIn, Ops[i], Indices[i]);
564    ResIn = ResOut;
565  }
566}
567
568MachineInstrBuilder MachineIRBuilder::buildUndef(const DstOp &Res) {
569  return buildInstr(TargetOpcode::G_IMPLICIT_DEF, {Res}, {});
570}
571
572MachineInstrBuilder MachineIRBuilder::buildMerge(const DstOp &Res,
573                                                 ArrayRef<Register> Ops) {
574  // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<SrcOp>,
575  // we need some temporary storage for the DstOp objects. Here we use a
576  // sufficiently large SmallVector to not go through the heap.
577  SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
578  assert(TmpVec.size() > 1);
579  return buildInstr(TargetOpcode::G_MERGE_VALUES, Res, TmpVec);
580}
581
582MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<LLT> Res,
583                                                   const SrcOp &Op) {
584  // Unfortunately to convert from ArrayRef<LLT> to ArrayRef<DstOp>,
585  // we need some temporary storage for the DstOp objects. Here we use a
586  // sufficiently large SmallVector to not go through the heap.
587  SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
588  assert(TmpVec.size() > 1);
589  return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
590}
591
592MachineInstrBuilder MachineIRBuilder::buildUnmerge(LLT Res,
593                                                   const SrcOp &Op) {
594  unsigned NumReg = Op.getLLTTy(*getMRI()).getSizeInBits() / Res.getSizeInBits();
595  SmallVector<Register, 8> TmpVec;
596  for (unsigned I = 0; I != NumReg; ++I)
597    TmpVec.push_back(getMRI()->createGenericVirtualRegister(Res));
598  return buildUnmerge(TmpVec, Op);
599}
600
601MachineInstrBuilder MachineIRBuilder::buildUnmerge(ArrayRef<Register> Res,
602                                                   const SrcOp &Op) {
603  // Unfortunately to convert from ArrayRef<Register> to ArrayRef<DstOp>,
604  // we need some temporary storage for the DstOp objects. Here we use a
605  // sufficiently large SmallVector to not go through the heap.
606  SmallVector<DstOp, 8> TmpVec(Res.begin(), Res.end());
607  assert(TmpVec.size() > 1);
608  return buildInstr(TargetOpcode::G_UNMERGE_VALUES, TmpVec, Op);
609}
610
611MachineInstrBuilder MachineIRBuilder::buildBuildVector(const DstOp &Res,
612                                                       ArrayRef<Register> Ops) {
613  // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
614  // we need some temporary storage for the DstOp objects. Here we use a
615  // sufficiently large SmallVector to not go through the heap.
616  SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
617  return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
618}
619
620MachineInstrBuilder MachineIRBuilder::buildSplatVector(const DstOp &Res,
621                                                       const SrcOp &Src) {
622  SmallVector<SrcOp, 8> TmpVec(Res.getLLTTy(*getMRI()).getNumElements(), Src);
623  return buildInstr(TargetOpcode::G_BUILD_VECTOR, Res, TmpVec);
624}
625
626MachineInstrBuilder
627MachineIRBuilder::buildBuildVectorTrunc(const DstOp &Res,
628                                        ArrayRef<Register> Ops) {
629  // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
630  // we need some temporary storage for the DstOp objects. Here we use a
631  // sufficiently large SmallVector to not go through the heap.
632  SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
633  return buildInstr(TargetOpcode::G_BUILD_VECTOR_TRUNC, Res, TmpVec);
634}
635
636MachineInstrBuilder
637MachineIRBuilder::buildConcatVectors(const DstOp &Res, ArrayRef<Register> Ops) {
638  // Unfortunately to convert from ArrayRef<Register> to ArrayRef<SrcOp>,
639  // we need some temporary storage for the DstOp objects. Here we use a
640  // sufficiently large SmallVector to not go through the heap.
641  SmallVector<SrcOp, 8> TmpVec(Ops.begin(), Ops.end());
642  return buildInstr(TargetOpcode::G_CONCAT_VECTORS, Res, TmpVec);
643}
644
645MachineInstrBuilder MachineIRBuilder::buildInsert(Register Res, Register Src,
646                                                  Register Op, unsigned Index) {
647  assert(Index + getMRI()->getType(Op).getSizeInBits() <=
648             getMRI()->getType(Res).getSizeInBits() &&
649         "insertion past the end of a register");
650
651  if (getMRI()->getType(Res).getSizeInBits() ==
652      getMRI()->getType(Op).getSizeInBits()) {
653    return buildCast(Res, Op);
654  }
655
656  return buildInstr(TargetOpcode::G_INSERT)
657      .addDef(Res)
658      .addUse(Src)
659      .addUse(Op)
660      .addImm(Index);
661}
662
663MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
664                                                     ArrayRef<Register> ResultRegs,
665                                                     bool HasSideEffects) {
666  auto MIB =
667      buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
668                                : TargetOpcode::G_INTRINSIC);
669  for (unsigned ResultReg : ResultRegs)
670    MIB.addDef(ResultReg);
671  MIB.addIntrinsicID(ID);
672  return MIB;
673}
674
675MachineInstrBuilder MachineIRBuilder::buildIntrinsic(Intrinsic::ID ID,
676                                                     ArrayRef<DstOp> Results,
677                                                     bool HasSideEffects) {
678  auto MIB =
679      buildInstr(HasSideEffects ? TargetOpcode::G_INTRINSIC_W_SIDE_EFFECTS
680                                : TargetOpcode::G_INTRINSIC);
681  for (DstOp Result : Results)
682    Result.addDefToMIB(*getMRI(), MIB);
683  MIB.addIntrinsicID(ID);
684  return MIB;
685}
686
687MachineInstrBuilder MachineIRBuilder::buildTrunc(const DstOp &Res,
688                                                 const SrcOp &Op) {
689  return buildInstr(TargetOpcode::G_TRUNC, Res, Op);
690}
691
692MachineInstrBuilder MachineIRBuilder::buildFPTrunc(const DstOp &Res,
693                                                   const SrcOp &Op,
694                                                   Optional<unsigned> Flags) {
695  return buildInstr(TargetOpcode::G_FPTRUNC, Res, Op, Flags);
696}
697
698MachineInstrBuilder MachineIRBuilder::buildICmp(CmpInst::Predicate Pred,
699                                                const DstOp &Res,
700                                                const SrcOp &Op0,
701                                                const SrcOp &Op1) {
702  return buildInstr(TargetOpcode::G_ICMP, Res, {Pred, Op0, Op1});
703}
704
705MachineInstrBuilder MachineIRBuilder::buildFCmp(CmpInst::Predicate Pred,
706                                                const DstOp &Res,
707                                                const SrcOp &Op0,
708                                                const SrcOp &Op1,
709                                                Optional<unsigned> Flags) {
710
711  return buildInstr(TargetOpcode::G_FCMP, Res, {Pred, Op0, Op1}, Flags);
712}
713
714MachineInstrBuilder MachineIRBuilder::buildSelect(const DstOp &Res,
715                                                  const SrcOp &Tst,
716                                                  const SrcOp &Op0,
717                                                  const SrcOp &Op1,
718                                                  Optional<unsigned> Flags) {
719
720  return buildInstr(TargetOpcode::G_SELECT, {Res}, {Tst, Op0, Op1}, Flags);
721}
722
723MachineInstrBuilder
724MachineIRBuilder::buildInsertVectorElement(const DstOp &Res, const SrcOp &Val,
725                                           const SrcOp &Elt, const SrcOp &Idx) {
726  return buildInstr(TargetOpcode::G_INSERT_VECTOR_ELT, Res, {Val, Elt, Idx});
727}
728
729MachineInstrBuilder
730MachineIRBuilder::buildExtractVectorElement(const DstOp &Res, const SrcOp &Val,
731                                            const SrcOp &Idx) {
732  return buildInstr(TargetOpcode::G_EXTRACT_VECTOR_ELT, Res, {Val, Idx});
733}
734
735MachineInstrBuilder MachineIRBuilder::buildAtomicCmpXchgWithSuccess(
736    Register OldValRes, Register SuccessRes, Register Addr, Register CmpVal,
737    Register NewVal, MachineMemOperand &MMO) {
738#ifndef NDEBUG
739  LLT OldValResTy = getMRI()->getType(OldValRes);
740  LLT SuccessResTy = getMRI()->getType(SuccessRes);
741  LLT AddrTy = getMRI()->getType(Addr);
742  LLT CmpValTy = getMRI()->getType(CmpVal);
743  LLT NewValTy = getMRI()->getType(NewVal);
744  assert(OldValResTy.isScalar() && "invalid operand type");
745  assert(SuccessResTy.isScalar() && "invalid operand type");
746  assert(AddrTy.isPointer() && "invalid operand type");
747  assert(CmpValTy.isValid() && "invalid operand type");
748  assert(NewValTy.isValid() && "invalid operand type");
749  assert(OldValResTy == CmpValTy && "type mismatch");
750  assert(OldValResTy == NewValTy && "type mismatch");
751#endif
752
753  return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG_WITH_SUCCESS)
754      .addDef(OldValRes)
755      .addDef(SuccessRes)
756      .addUse(Addr)
757      .addUse(CmpVal)
758      .addUse(NewVal)
759      .addMemOperand(&MMO);
760}
761
762MachineInstrBuilder
763MachineIRBuilder::buildAtomicCmpXchg(Register OldValRes, Register Addr,
764                                     Register CmpVal, Register NewVal,
765                                     MachineMemOperand &MMO) {
766#ifndef NDEBUG
767  LLT OldValResTy = getMRI()->getType(OldValRes);
768  LLT AddrTy = getMRI()->getType(Addr);
769  LLT CmpValTy = getMRI()->getType(CmpVal);
770  LLT NewValTy = getMRI()->getType(NewVal);
771  assert(OldValResTy.isScalar() && "invalid operand type");
772  assert(AddrTy.isPointer() && "invalid operand type");
773  assert(CmpValTy.isValid() && "invalid operand type");
774  assert(NewValTy.isValid() && "invalid operand type");
775  assert(OldValResTy == CmpValTy && "type mismatch");
776  assert(OldValResTy == NewValTy && "type mismatch");
777#endif
778
779  return buildInstr(TargetOpcode::G_ATOMIC_CMPXCHG)
780      .addDef(OldValRes)
781      .addUse(Addr)
782      .addUse(CmpVal)
783      .addUse(NewVal)
784      .addMemOperand(&MMO);
785}
786
787MachineInstrBuilder MachineIRBuilder::buildAtomicRMW(
788  unsigned Opcode, const DstOp &OldValRes,
789  const SrcOp &Addr, const SrcOp &Val,
790  MachineMemOperand &MMO) {
791
792#ifndef NDEBUG
793  LLT OldValResTy = OldValRes.getLLTTy(*getMRI());
794  LLT AddrTy = Addr.getLLTTy(*getMRI());
795  LLT ValTy = Val.getLLTTy(*getMRI());
796  assert(OldValResTy.isScalar() && "invalid operand type");
797  assert(AddrTy.isPointer() && "invalid operand type");
798  assert(ValTy.isValid() && "invalid operand type");
799  assert(OldValResTy == ValTy && "type mismatch");
800  assert(MMO.isAtomic() && "not atomic mem operand");
801#endif
802
803  auto MIB = buildInstr(Opcode);
804  OldValRes.addDefToMIB(*getMRI(), MIB);
805  Addr.addSrcToMIB(MIB);
806  Val.addSrcToMIB(MIB);
807  MIB.addMemOperand(&MMO);
808  return MIB;
809}
810
811MachineInstrBuilder
812MachineIRBuilder::buildAtomicRMWXchg(Register OldValRes, Register Addr,
813                                     Register Val, MachineMemOperand &MMO) {
814  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XCHG, OldValRes, Addr, Val,
815                        MMO);
816}
817MachineInstrBuilder
818MachineIRBuilder::buildAtomicRMWAdd(Register OldValRes, Register Addr,
819                                    Register Val, MachineMemOperand &MMO) {
820  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_ADD, OldValRes, Addr, Val,
821                        MMO);
822}
823MachineInstrBuilder
824MachineIRBuilder::buildAtomicRMWSub(Register OldValRes, Register Addr,
825                                    Register Val, MachineMemOperand &MMO) {
826  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_SUB, OldValRes, Addr, Val,
827                        MMO);
828}
829MachineInstrBuilder
830MachineIRBuilder::buildAtomicRMWAnd(Register OldValRes, Register Addr,
831                                    Register Val, MachineMemOperand &MMO) {
832  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_AND, OldValRes, Addr, Val,
833                        MMO);
834}
835MachineInstrBuilder
836MachineIRBuilder::buildAtomicRMWNand(Register OldValRes, Register Addr,
837                                     Register Val, MachineMemOperand &MMO) {
838  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_NAND, OldValRes, Addr, Val,
839                        MMO);
840}
841MachineInstrBuilder MachineIRBuilder::buildAtomicRMWOr(Register OldValRes,
842                                                       Register Addr,
843                                                       Register Val,
844                                                       MachineMemOperand &MMO) {
845  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_OR, OldValRes, Addr, Val,
846                        MMO);
847}
848MachineInstrBuilder
849MachineIRBuilder::buildAtomicRMWXor(Register OldValRes, Register Addr,
850                                    Register Val, MachineMemOperand &MMO) {
851  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_XOR, OldValRes, Addr, Val,
852                        MMO);
853}
854MachineInstrBuilder
855MachineIRBuilder::buildAtomicRMWMax(Register OldValRes, Register Addr,
856                                    Register Val, MachineMemOperand &MMO) {
857  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MAX, OldValRes, Addr, Val,
858                        MMO);
859}
860MachineInstrBuilder
861MachineIRBuilder::buildAtomicRMWMin(Register OldValRes, Register Addr,
862                                    Register Val, MachineMemOperand &MMO) {
863  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_MIN, OldValRes, Addr, Val,
864                        MMO);
865}
866MachineInstrBuilder
867MachineIRBuilder::buildAtomicRMWUmax(Register OldValRes, Register Addr,
868                                     Register Val, MachineMemOperand &MMO) {
869  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMAX, OldValRes, Addr, Val,
870                        MMO);
871}
872MachineInstrBuilder
873MachineIRBuilder::buildAtomicRMWUmin(Register OldValRes, Register Addr,
874                                     Register Val, MachineMemOperand &MMO) {
875  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_UMIN, OldValRes, Addr, Val,
876                        MMO);
877}
878
879MachineInstrBuilder
880MachineIRBuilder::buildAtomicRMWFAdd(
881  const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val,
882  MachineMemOperand &MMO) {
883  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FADD, OldValRes, Addr, Val,
884                        MMO);
885}
886
887MachineInstrBuilder
888MachineIRBuilder::buildAtomicRMWFSub(const DstOp &OldValRes, const SrcOp &Addr, const SrcOp &Val,
889                                     MachineMemOperand &MMO) {
890  return buildAtomicRMW(TargetOpcode::G_ATOMICRMW_FSUB, OldValRes, Addr, Val,
891                        MMO);
892}
893
894MachineInstrBuilder
895MachineIRBuilder::buildFence(unsigned Ordering, unsigned Scope) {
896  return buildInstr(TargetOpcode::G_FENCE)
897    .addImm(Ordering)
898    .addImm(Scope);
899}
900
901MachineInstrBuilder
902MachineIRBuilder::buildBlockAddress(Register Res, const BlockAddress *BA) {
903#ifndef NDEBUG
904  assert(getMRI()->getType(Res).isPointer() && "invalid res type");
905#endif
906
907  return buildInstr(TargetOpcode::G_BLOCK_ADDR).addDef(Res).addBlockAddress(BA);
908}
909
910void MachineIRBuilder::validateTruncExt(const LLT &DstTy, const LLT &SrcTy,
911                                        bool IsExtend) {
912#ifndef NDEBUG
913  if (DstTy.isVector()) {
914    assert(SrcTy.isVector() && "mismatched cast between vector and non-vector");
915    assert(SrcTy.getNumElements() == DstTy.getNumElements() &&
916           "different number of elements in a trunc/ext");
917  } else
918    assert(DstTy.isScalar() && SrcTy.isScalar() && "invalid extend/trunc");
919
920  if (IsExtend)
921    assert(DstTy.getSizeInBits() > SrcTy.getSizeInBits() &&
922           "invalid narrowing extend");
923  else
924    assert(DstTy.getSizeInBits() < SrcTy.getSizeInBits() &&
925           "invalid widening trunc");
926#endif
927}
928
929void MachineIRBuilder::validateSelectOp(const LLT &ResTy, const LLT &TstTy,
930                                        const LLT &Op0Ty, const LLT &Op1Ty) {
931#ifndef NDEBUG
932  assert((ResTy.isScalar() || ResTy.isVector() || ResTy.isPointer()) &&
933         "invalid operand type");
934  assert((ResTy == Op0Ty && ResTy == Op1Ty) && "type mismatch");
935  if (ResTy.isScalar() || ResTy.isPointer())
936    assert(TstTy.isScalar() && "type mismatch");
937  else
938    assert((TstTy.isScalar() ||
939            (TstTy.isVector() &&
940             TstTy.getNumElements() == Op0Ty.getNumElements())) &&
941           "type mismatch");
942#endif
943}
944
945MachineInstrBuilder MachineIRBuilder::buildInstr(unsigned Opc,
946                                                 ArrayRef<DstOp> DstOps,
947                                                 ArrayRef<SrcOp> SrcOps,
948                                                 Optional<unsigned> Flags) {
949  switch (Opc) {
950  default:
951    break;
952  case TargetOpcode::G_SELECT: {
953    assert(DstOps.size() == 1 && "Invalid select");
954    assert(SrcOps.size() == 3 && "Invalid select");
955    validateSelectOp(
956        DstOps[0].getLLTTy(*getMRI()), SrcOps[0].getLLTTy(*getMRI()),
957        SrcOps[1].getLLTTy(*getMRI()), SrcOps[2].getLLTTy(*getMRI()));
958    break;
959  }
960  case TargetOpcode::G_ADD:
961  case TargetOpcode::G_AND:
962  case TargetOpcode::G_MUL:
963  case TargetOpcode::G_OR:
964  case TargetOpcode::G_SUB:
965  case TargetOpcode::G_XOR:
966  case TargetOpcode::G_UDIV:
967  case TargetOpcode::G_SDIV:
968  case TargetOpcode::G_UREM:
969  case TargetOpcode::G_SREM:
970  case TargetOpcode::G_SMIN:
971  case TargetOpcode::G_SMAX:
972  case TargetOpcode::G_UMIN:
973  case TargetOpcode::G_UMAX: {
974    // All these are binary ops.
975    assert(DstOps.size() == 1 && "Invalid Dst");
976    assert(SrcOps.size() == 2 && "Invalid Srcs");
977    validateBinaryOp(DstOps[0].getLLTTy(*getMRI()),
978                     SrcOps[0].getLLTTy(*getMRI()),
979                     SrcOps[1].getLLTTy(*getMRI()));
980    break;
981  }
982  case TargetOpcode::G_SHL:
983  case TargetOpcode::G_ASHR:
984  case TargetOpcode::G_LSHR: {
985    assert(DstOps.size() == 1 && "Invalid Dst");
986    assert(SrcOps.size() == 2 && "Invalid Srcs");
987    validateShiftOp(DstOps[0].getLLTTy(*getMRI()),
988                    SrcOps[0].getLLTTy(*getMRI()),
989                    SrcOps[1].getLLTTy(*getMRI()));
990    break;
991  }
992  case TargetOpcode::G_SEXT:
993  case TargetOpcode::G_ZEXT:
994  case TargetOpcode::G_ANYEXT:
995    assert(DstOps.size() == 1 && "Invalid Dst");
996    assert(SrcOps.size() == 1 && "Invalid Srcs");
997    validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
998                     SrcOps[0].getLLTTy(*getMRI()), true);
999    break;
1000  case TargetOpcode::G_TRUNC:
1001  case TargetOpcode::G_FPTRUNC: {
1002    assert(DstOps.size() == 1 && "Invalid Dst");
1003    assert(SrcOps.size() == 1 && "Invalid Srcs");
1004    validateTruncExt(DstOps[0].getLLTTy(*getMRI()),
1005                     SrcOps[0].getLLTTy(*getMRI()), false);
1006    break;
1007  }
1008  case TargetOpcode::COPY:
1009    assert(DstOps.size() == 1 && "Invalid Dst");
1010    // If the caller wants to add a subreg source it has to be done separately
1011    // so we may not have any SrcOps at this point yet.
1012    break;
1013  case TargetOpcode::G_FCMP:
1014  case TargetOpcode::G_ICMP: {
1015    assert(DstOps.size() == 1 && "Invalid Dst Operands");
1016    assert(SrcOps.size() == 3 && "Invalid Src Operands");
1017    // For F/ICMP, the first src operand is the predicate, followed by
1018    // the two comparands.
1019    assert(SrcOps[0].getSrcOpKind() == SrcOp::SrcType::Ty_Predicate &&
1020           "Expecting predicate");
1021    assert([&]() -> bool {
1022      CmpInst::Predicate Pred = SrcOps[0].getPredicate();
1023      return Opc == TargetOpcode::G_ICMP ? CmpInst::isIntPredicate(Pred)
1024                                         : CmpInst::isFPPredicate(Pred);
1025    }() && "Invalid predicate");
1026    assert(SrcOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
1027           "Type mismatch");
1028    assert([&]() -> bool {
1029      LLT Op0Ty = SrcOps[1].getLLTTy(*getMRI());
1030      LLT DstTy = DstOps[0].getLLTTy(*getMRI());
1031      if (Op0Ty.isScalar() || Op0Ty.isPointer())
1032        return DstTy.isScalar();
1033      else
1034        return DstTy.isVector() &&
1035               DstTy.getNumElements() == Op0Ty.getNumElements();
1036    }() && "Type Mismatch");
1037    break;
1038  }
1039  case TargetOpcode::G_UNMERGE_VALUES: {
1040    assert(!DstOps.empty() && "Invalid trivial sequence");
1041    assert(SrcOps.size() == 1 && "Invalid src for Unmerge");
1042    assert(std::all_of(DstOps.begin(), DstOps.end(),
1043                       [&, this](const DstOp &Op) {
1044                         return Op.getLLTTy(*getMRI()) ==
1045                                DstOps[0].getLLTTy(*getMRI());
1046                       }) &&
1047           "type mismatch in output list");
1048    assert(DstOps.size() * DstOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1049               SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1050           "input operands do not cover output register");
1051    break;
1052  }
1053  case TargetOpcode::G_MERGE_VALUES: {
1054    assert(!SrcOps.empty() && "invalid trivial sequence");
1055    assert(DstOps.size() == 1 && "Invalid Dst");
1056    assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1057                       [&, this](const SrcOp &Op) {
1058                         return Op.getLLTTy(*getMRI()) ==
1059                                SrcOps[0].getLLTTy(*getMRI());
1060                       }) &&
1061           "type mismatch in input list");
1062    assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1063               DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1064           "input operands do not cover output register");
1065    if (SrcOps.size() == 1)
1066      return buildCast(DstOps[0], SrcOps[0]);
1067    if (DstOps[0].getLLTTy(*getMRI()).isVector()) {
1068      if (SrcOps[0].getLLTTy(*getMRI()).isVector())
1069        return buildInstr(TargetOpcode::G_CONCAT_VECTORS, DstOps, SrcOps);
1070      return buildInstr(TargetOpcode::G_BUILD_VECTOR, DstOps, SrcOps);
1071    }
1072    break;
1073  }
1074  case TargetOpcode::G_EXTRACT_VECTOR_ELT: {
1075    assert(DstOps.size() == 1 && "Invalid Dst size");
1076    assert(SrcOps.size() == 2 && "Invalid Src size");
1077    assert(SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
1078    assert((DstOps[0].getLLTTy(*getMRI()).isScalar() ||
1079            DstOps[0].getLLTTy(*getMRI()).isPointer()) &&
1080           "Invalid operand type");
1081    assert(SrcOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand type");
1082    assert(SrcOps[0].getLLTTy(*getMRI()).getElementType() ==
1083               DstOps[0].getLLTTy(*getMRI()) &&
1084           "Type mismatch");
1085    break;
1086  }
1087  case TargetOpcode::G_INSERT_VECTOR_ELT: {
1088    assert(DstOps.size() == 1 && "Invalid dst size");
1089    assert(SrcOps.size() == 3 && "Invalid src size");
1090    assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1091           SrcOps[0].getLLTTy(*getMRI()).isVector() && "Invalid operand type");
1092    assert(DstOps[0].getLLTTy(*getMRI()).getElementType() ==
1093               SrcOps[1].getLLTTy(*getMRI()) &&
1094           "Type mismatch");
1095    assert(SrcOps[2].getLLTTy(*getMRI()).isScalar() && "Invalid index");
1096    assert(DstOps[0].getLLTTy(*getMRI()).getNumElements() ==
1097               SrcOps[0].getLLTTy(*getMRI()).getNumElements() &&
1098           "Type mismatch");
1099    break;
1100  }
1101  case TargetOpcode::G_BUILD_VECTOR: {
1102    assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1103           "Must have at least 2 operands");
1104    assert(DstOps.size() == 1 && "Invalid DstOps");
1105    assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1106           "Res type must be a vector");
1107    assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1108                       [&, this](const SrcOp &Op) {
1109                         return Op.getLLTTy(*getMRI()) ==
1110                                SrcOps[0].getLLTTy(*getMRI());
1111                       }) &&
1112           "type mismatch in input list");
1113    assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1114               DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1115           "input scalars do not exactly cover the output vector register");
1116    break;
1117  }
1118  case TargetOpcode::G_BUILD_VECTOR_TRUNC: {
1119    assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1120           "Must have at least 2 operands");
1121    assert(DstOps.size() == 1 && "Invalid DstOps");
1122    assert(DstOps[0].getLLTTy(*getMRI()).isVector() &&
1123           "Res type must be a vector");
1124    assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1125                       [&, this](const SrcOp &Op) {
1126                         return Op.getLLTTy(*getMRI()) ==
1127                                SrcOps[0].getLLTTy(*getMRI());
1128                       }) &&
1129           "type mismatch in input list");
1130    if (SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1131        DstOps[0].getLLTTy(*getMRI()).getElementType().getSizeInBits())
1132      return buildInstr(TargetOpcode::G_BUILD_VECTOR, DstOps, SrcOps);
1133    break;
1134  }
1135  case TargetOpcode::G_CONCAT_VECTORS: {
1136    assert(DstOps.size() == 1 && "Invalid DstOps");
1137    assert((!SrcOps.empty() || SrcOps.size() < 2) &&
1138           "Must have at least 2 operands");
1139    assert(std::all_of(SrcOps.begin(), SrcOps.end(),
1140                       [&, this](const SrcOp &Op) {
1141                         return (Op.getLLTTy(*getMRI()).isVector() &&
1142                                 Op.getLLTTy(*getMRI()) ==
1143                                     SrcOps[0].getLLTTy(*getMRI()));
1144                       }) &&
1145           "type mismatch in input list");
1146    assert(SrcOps.size() * SrcOps[0].getLLTTy(*getMRI()).getSizeInBits() ==
1147               DstOps[0].getLLTTy(*getMRI()).getSizeInBits() &&
1148           "input vectors do not exactly cover the output vector register");
1149    break;
1150  }
1151  case TargetOpcode::G_UADDE: {
1152    assert(DstOps.size() == 2 && "Invalid no of dst operands");
1153    assert(SrcOps.size() == 3 && "Invalid no of src operands");
1154    assert(DstOps[0].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
1155    assert((DstOps[0].getLLTTy(*getMRI()) == SrcOps[0].getLLTTy(*getMRI())) &&
1156           (DstOps[0].getLLTTy(*getMRI()) == SrcOps[1].getLLTTy(*getMRI())) &&
1157           "Invalid operand");
1158    assert(DstOps[1].getLLTTy(*getMRI()).isScalar() && "Invalid operand");
1159    assert(DstOps[1].getLLTTy(*getMRI()) == SrcOps[2].getLLTTy(*getMRI()) &&
1160           "type mismatch");
1161    break;
1162  }
1163  }
1164
1165  auto MIB = buildInstr(Opc);
1166  for (const DstOp &Op : DstOps)
1167    Op.addDefToMIB(*getMRI(), MIB);
1168  for (const SrcOp &Op : SrcOps)
1169    Op.addSrcToMIB(MIB);
1170  if (Flags)
1171    MIB->setFlags(*Flags);
1172  return MIB;
1173}
1174