CGVTables.cpp revision 259278
1//===--- CGVTables.cpp - Emit LLVM Code for C++ vtables -------------------===//
2//
3//                     The LLVM Compiler Infrastructure
4//
5// This file is distributed under the University of Illinois Open Source
6// License. See LICENSE.TXT for details.
7//
8//===----------------------------------------------------------------------===//
9//
10// This contains code dealing with C++ code generation of virtual tables.
11//
12//===----------------------------------------------------------------------===//
13
14#include "CodeGenFunction.h"
15#include "CGCXXABI.h"
16#include "CodeGenModule.h"
17#include "clang/AST/CXXInheritance.h"
18#include "clang/AST/RecordLayout.h"
19#include "clang/Frontend/CodeGenOptions.h"
20#include "llvm/ADT/DenseSet.h"
21#include "llvm/ADT/SetVector.h"
22#include "llvm/Support/Compiler.h"
23#include "llvm/Support/Format.h"
24#include "llvm/Transforms/Utils/Cloning.h"
25#include <algorithm>
26#include <cstdio>
27
28using namespace clang;
29using namespace CodeGen;
30
31CodeGenVTables::CodeGenVTables(CodeGenModule &CGM)
32  : CGM(CGM), VTContext(CGM.getContext()) { }
33
34llvm::Constant *CodeGenModule::GetAddrOfThunk(GlobalDecl GD,
35                                              const ThunkInfo &Thunk) {
36  const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
37
38  // Compute the mangled name.
39  SmallString<256> Name;
40  llvm::raw_svector_ostream Out(Name);
41  if (const CXXDestructorDecl* DD = dyn_cast<CXXDestructorDecl>(MD))
42    getCXXABI().getMangleContext().mangleCXXDtorThunk(DD, GD.getDtorType(),
43                                                      Thunk.This, Out);
44  else
45    getCXXABI().getMangleContext().mangleThunk(MD, Thunk, Out);
46  Out.flush();
47
48  llvm::Type *Ty = getTypes().GetFunctionTypeForVTable(GD);
49  return GetOrCreateLLVMFunction(Name, Ty, GD, /*ForVTable=*/true);
50}
51
52static llvm::Value *PerformTypeAdjustment(CodeGenFunction &CGF,
53                                          llvm::Value *Ptr,
54                                          int64_t NonVirtualAdjustment,
55                                          int64_t VirtualAdjustment,
56                                          bool IsReturnAdjustment) {
57  if (!NonVirtualAdjustment && !VirtualAdjustment)
58    return Ptr;
59
60  llvm::Type *Int8PtrTy = CGF.Int8PtrTy;
61  llvm::Value *V = CGF.Builder.CreateBitCast(Ptr, Int8PtrTy);
62
63  if (NonVirtualAdjustment && !IsReturnAdjustment) {
64    // Perform the non-virtual adjustment for a base-to-derived cast.
65    V = CGF.Builder.CreateConstInBoundsGEP1_64(V, NonVirtualAdjustment);
66  }
67
68  if (VirtualAdjustment) {
69    llvm::Type *PtrDiffTy =
70      CGF.ConvertType(CGF.getContext().getPointerDiffType());
71
72    // Perform the virtual adjustment.
73    llvm::Value *VTablePtrPtr =
74      CGF.Builder.CreateBitCast(V, Int8PtrTy->getPointerTo());
75
76    llvm::Value *VTablePtr = CGF.Builder.CreateLoad(VTablePtrPtr);
77
78    llvm::Value *OffsetPtr =
79      CGF.Builder.CreateConstInBoundsGEP1_64(VTablePtr, VirtualAdjustment);
80
81    OffsetPtr = CGF.Builder.CreateBitCast(OffsetPtr, PtrDiffTy->getPointerTo());
82
83    // Load the adjustment offset from the vtable.
84    llvm::Value *Offset = CGF.Builder.CreateLoad(OffsetPtr);
85
86    // Adjust our pointer.
87    V = CGF.Builder.CreateInBoundsGEP(V, Offset);
88  }
89
90  if (NonVirtualAdjustment && IsReturnAdjustment) {
91    // Perform the non-virtual adjustment for a derived-to-base cast.
92    V = CGF.Builder.CreateConstInBoundsGEP1_64(V, NonVirtualAdjustment);
93  }
94
95  // Cast back to the original type.
96  return CGF.Builder.CreateBitCast(V, Ptr->getType());
97}
98
99static void setThunkVisibility(CodeGenModule &CGM, const CXXMethodDecl *MD,
100                               const ThunkInfo &Thunk, llvm::Function *Fn) {
101  CGM.setGlobalVisibility(Fn, MD);
102
103  if (!CGM.getCodeGenOpts().HiddenWeakVTables)
104    return;
105
106  // If the thunk has weak/linkonce linkage, but the function must be
107  // emitted in every translation unit that references it, then we can
108  // emit its thunks with hidden visibility, since its thunks must be
109  // emitted when the function is.
110
111  // This follows CodeGenModule::setTypeVisibility; see the comments
112  // there for explanation.
113
114  if ((Fn->getLinkage() != llvm::GlobalVariable::LinkOnceODRLinkage &&
115       Fn->getLinkage() != llvm::GlobalVariable::WeakODRLinkage) ||
116      Fn->getVisibility() != llvm::GlobalVariable::DefaultVisibility)
117    return;
118
119  if (MD->getExplicitVisibility(ValueDecl::VisibilityForValue))
120    return;
121
122  switch (MD->getTemplateSpecializationKind()) {
123  case TSK_ExplicitInstantiationDefinition:
124  case TSK_ExplicitInstantiationDeclaration:
125    return;
126
127  case TSK_Undeclared:
128    break;
129
130  case TSK_ExplicitSpecialization:
131  case TSK_ImplicitInstantiation:
132    return;
133    break;
134  }
135
136  // If there's an explicit definition, and that definition is
137  // out-of-line, then we can't assume that all users will have a
138  // definition to emit.
139  const FunctionDecl *Def = 0;
140  if (MD->hasBody(Def) && Def->isOutOfLine())
141    return;
142
143  Fn->setVisibility(llvm::GlobalValue::HiddenVisibility);
144}
145
146#ifndef NDEBUG
147static bool similar(const ABIArgInfo &infoL, CanQualType typeL,
148                    const ABIArgInfo &infoR, CanQualType typeR) {
149  return (infoL.getKind() == infoR.getKind() &&
150          (typeL == typeR ||
151           (isa<PointerType>(typeL) && isa<PointerType>(typeR)) ||
152           (isa<ReferenceType>(typeL) && isa<ReferenceType>(typeR))));
153}
154#endif
155
156static RValue PerformReturnAdjustment(CodeGenFunction &CGF,
157                                      QualType ResultType, RValue RV,
158                                      const ThunkInfo &Thunk) {
159  // Emit the return adjustment.
160  bool NullCheckValue = !ResultType->isReferenceType();
161
162  llvm::BasicBlock *AdjustNull = 0;
163  llvm::BasicBlock *AdjustNotNull = 0;
164  llvm::BasicBlock *AdjustEnd = 0;
165
166  llvm::Value *ReturnValue = RV.getScalarVal();
167
168  if (NullCheckValue) {
169    AdjustNull = CGF.createBasicBlock("adjust.null");
170    AdjustNotNull = CGF.createBasicBlock("adjust.notnull");
171    AdjustEnd = CGF.createBasicBlock("adjust.end");
172
173    llvm::Value *IsNull = CGF.Builder.CreateIsNull(ReturnValue);
174    CGF.Builder.CreateCondBr(IsNull, AdjustNull, AdjustNotNull);
175    CGF.EmitBlock(AdjustNotNull);
176  }
177
178  ReturnValue = PerformTypeAdjustment(CGF, ReturnValue,
179                                      Thunk.Return.NonVirtual,
180                                      Thunk.Return.VBaseOffsetOffset,
181                                      /*IsReturnAdjustment*/true);
182
183  if (NullCheckValue) {
184    CGF.Builder.CreateBr(AdjustEnd);
185    CGF.EmitBlock(AdjustNull);
186    CGF.Builder.CreateBr(AdjustEnd);
187    CGF.EmitBlock(AdjustEnd);
188
189    llvm::PHINode *PHI = CGF.Builder.CreatePHI(ReturnValue->getType(), 2);
190    PHI->addIncoming(ReturnValue, AdjustNotNull);
191    PHI->addIncoming(llvm::Constant::getNullValue(ReturnValue->getType()),
192                     AdjustNull);
193    ReturnValue = PHI;
194  }
195
196  return RValue::get(ReturnValue);
197}
198
199// This function does roughly the same thing as GenerateThunk, but in a
200// very different way, so that va_start and va_end work correctly.
201// FIXME: This function assumes "this" is the first non-sret LLVM argument of
202//        a function, and that there is an alloca built in the entry block
203//        for all accesses to "this".
204// FIXME: This function assumes there is only one "ret" statement per function.
205// FIXME: Cloning isn't correct in the presence of indirect goto!
206// FIXME: This implementation of thunks bloats codesize by duplicating the
207//        function definition.  There are alternatives:
208//        1. Add some sort of stub support to LLVM for cases where we can
209//           do a this adjustment, then a sibcall.
210//        2. We could transform the definition to take a va_list instead of an
211//           actual variable argument list, then have the thunks (including a
212//           no-op thunk for the regular definition) call va_start/va_end.
213//           There's a bit of per-call overhead for this solution, but it's
214//           better for codesize if the definition is long.
215void CodeGenFunction::GenerateVarArgsThunk(
216                                      llvm::Function *Fn,
217                                      const CGFunctionInfo &FnInfo,
218                                      GlobalDecl GD, const ThunkInfo &Thunk) {
219  const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
220  const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
221  QualType ResultType = FPT->getResultType();
222
223  // Get the original function
224  assert(FnInfo.isVariadic());
225  llvm::Type *Ty = CGM.getTypes().GetFunctionType(FnInfo);
226  llvm::Value *Callee = CGM.GetAddrOfFunction(GD, Ty, /*ForVTable=*/true);
227  llvm::Function *BaseFn = cast<llvm::Function>(Callee);
228
229  // Clone to thunk.
230  llvm::ValueToValueMapTy VMap;
231  llvm::Function *NewFn = llvm::CloneFunction(BaseFn, VMap,
232                                              /*ModuleLevelChanges=*/false);
233  CGM.getModule().getFunctionList().push_back(NewFn);
234  Fn->replaceAllUsesWith(NewFn);
235  NewFn->takeName(Fn);
236  Fn->eraseFromParent();
237  Fn = NewFn;
238
239  // "Initialize" CGF (minimally).
240  CurFn = Fn;
241
242  // Get the "this" value
243  llvm::Function::arg_iterator AI = Fn->arg_begin();
244  if (CGM.ReturnTypeUsesSRet(FnInfo))
245    ++AI;
246
247  // Find the first store of "this", which will be to the alloca associated
248  // with "this".
249  llvm::Value *ThisPtr = &*AI;
250  llvm::BasicBlock *EntryBB = Fn->begin();
251  llvm::Instruction *ThisStore = 0;
252  for (llvm::BasicBlock::iterator I = EntryBB->begin(), E = EntryBB->end();
253       I != E; I++) {
254    if (isa<llvm::StoreInst>(I) && I->getOperand(0) == ThisPtr) {
255      ThisStore = cast<llvm::StoreInst>(I);
256      break;
257    }
258  }
259  assert(ThisStore && "Store of this should be in entry block?");
260  // Adjust "this", if necessary.
261  Builder.SetInsertPoint(ThisStore);
262  llvm::Value *AdjustedThisPtr =
263    PerformTypeAdjustment(*this, ThisPtr,
264                          Thunk.This.NonVirtual,
265                          Thunk.This.VCallOffsetOffset,
266                          /*IsReturnAdjustment*/false);
267  ThisStore->setOperand(0, AdjustedThisPtr);
268
269  if (!Thunk.Return.isEmpty()) {
270    // Fix up the returned value, if necessary.
271    for (llvm::Function::iterator I = Fn->begin(), E = Fn->end(); I != E; I++) {
272      llvm::Instruction *T = I->getTerminator();
273      if (isa<llvm::ReturnInst>(T)) {
274        RValue RV = RValue::get(T->getOperand(0));
275        T->eraseFromParent();
276        Builder.SetInsertPoint(&*I);
277        RV = PerformReturnAdjustment(*this, ResultType, RV, Thunk);
278        Builder.CreateRet(RV.getScalarVal());
279        break;
280      }
281    }
282  }
283}
284
285void CodeGenFunction::GenerateThunk(llvm::Function *Fn,
286                                    const CGFunctionInfo &FnInfo,
287                                    GlobalDecl GD, const ThunkInfo &Thunk) {
288  const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
289  const FunctionProtoType *FPT = MD->getType()->getAs<FunctionProtoType>();
290  QualType ResultType = FPT->getResultType();
291  QualType ThisType = MD->getThisType(getContext());
292
293  FunctionArgList FunctionArgs;
294
295  // FIXME: It would be nice if more of this code could be shared with
296  // CodeGenFunction::GenerateCode.
297
298  // Create the implicit 'this' parameter declaration.
299  CurGD = GD;
300  CGM.getCXXABI().BuildInstanceFunctionParams(*this, ResultType, FunctionArgs);
301
302  // Add the rest of the parameters.
303  for (FunctionDecl::param_const_iterator I = MD->param_begin(),
304       E = MD->param_end(); I != E; ++I) {
305    ParmVarDecl *Param = *I;
306
307    FunctionArgs.push_back(Param);
308  }
309
310  // Initialize debug info if needed.
311  maybeInitializeDebugInfo();
312
313  StartFunction(GlobalDecl(), ResultType, Fn, FnInfo, FunctionArgs,
314                SourceLocation());
315
316  CGM.getCXXABI().EmitInstanceFunctionProlog(*this);
317  CXXThisValue = CXXABIThisValue;
318
319  // Adjust the 'this' pointer if necessary.
320  llvm::Value *AdjustedThisPtr =
321    PerformTypeAdjustment(*this, LoadCXXThis(),
322                          Thunk.This.NonVirtual,
323                          Thunk.This.VCallOffsetOffset,
324                          /*IsReturnAdjustment*/false);
325
326  CallArgList CallArgs;
327
328  // Add our adjusted 'this' pointer.
329  CallArgs.add(RValue::get(AdjustedThisPtr), ThisType);
330
331  // Add the rest of the parameters.
332  for (FunctionDecl::param_const_iterator I = MD->param_begin(),
333       E = MD->param_end(); I != E; ++I) {
334    ParmVarDecl *param = *I;
335    EmitDelegateCallArg(CallArgs, param);
336  }
337
338  // Get our callee.
339  llvm::Type *Ty =
340    CGM.getTypes().GetFunctionType(CGM.getTypes().arrangeGlobalDeclaration(GD));
341  llvm::Value *Callee = CGM.GetAddrOfFunction(GD, Ty, /*ForVTable=*/true);
342
343#ifndef NDEBUG
344  const CGFunctionInfo &CallFnInfo =
345    CGM.getTypes().arrangeCXXMethodCall(CallArgs, FPT,
346                                       RequiredArgs::forPrototypePlus(FPT, 1));
347  assert(CallFnInfo.getRegParm() == FnInfo.getRegParm() &&
348         CallFnInfo.isNoReturn() == FnInfo.isNoReturn() &&
349         CallFnInfo.getCallingConvention() == FnInfo.getCallingConvention());
350  assert(isa<CXXDestructorDecl>(MD) || // ignore dtor return types
351         similar(CallFnInfo.getReturnInfo(), CallFnInfo.getReturnType(),
352                 FnInfo.getReturnInfo(), FnInfo.getReturnType()));
353  assert(CallFnInfo.arg_size() == FnInfo.arg_size());
354  for (unsigned i = 0, e = FnInfo.arg_size(); i != e; ++i)
355    assert(similar(CallFnInfo.arg_begin()[i].info,
356                   CallFnInfo.arg_begin()[i].type,
357                   FnInfo.arg_begin()[i].info, FnInfo.arg_begin()[i].type));
358#endif
359
360  // Determine whether we have a return value slot to use.
361  ReturnValueSlot Slot;
362  if (!ResultType->isVoidType() &&
363      FnInfo.getReturnInfo().getKind() == ABIArgInfo::Indirect &&
364      !hasScalarEvaluationKind(CurFnInfo->getReturnType()))
365    Slot = ReturnValueSlot(ReturnValue, ResultType.isVolatileQualified());
366
367  // Now emit our call.
368  RValue RV = EmitCall(FnInfo, Callee, Slot, CallArgs, MD);
369
370  if (!Thunk.Return.isEmpty())
371    RV = PerformReturnAdjustment(*this, ResultType, RV, Thunk);
372
373  if (!ResultType->isVoidType() && Slot.isNull())
374    CGM.getCXXABI().EmitReturnFromThunk(*this, RV, ResultType);
375
376  // Disable the final ARC autorelease.
377  AutoreleaseResult = false;
378
379  FinishFunction();
380
381  // Set the right linkage.
382  CGM.setFunctionLinkage(MD, Fn);
383
384  // Set the right visibility.
385  setThunkVisibility(CGM, MD, Thunk, Fn);
386}
387
388void CodeGenVTables::EmitThunk(GlobalDecl GD, const ThunkInfo &Thunk,
389                               bool UseAvailableExternallyLinkage)
390{
391  const CGFunctionInfo &FnInfo = CGM.getTypes().arrangeGlobalDeclaration(GD);
392
393  // FIXME: re-use FnInfo in this computation.
394  llvm::Constant *Entry = CGM.GetAddrOfThunk(GD, Thunk);
395
396  // Strip off a bitcast if we got one back.
397  if (llvm::ConstantExpr *CE = dyn_cast<llvm::ConstantExpr>(Entry)) {
398    assert(CE->getOpcode() == llvm::Instruction::BitCast);
399    Entry = CE->getOperand(0);
400  }
401
402  // There's already a declaration with the same name, check if it has the same
403  // type or if we need to replace it.
404  if (cast<llvm::GlobalValue>(Entry)->getType()->getElementType() !=
405      CGM.getTypes().GetFunctionTypeForVTable(GD)) {
406    llvm::GlobalValue *OldThunkFn = cast<llvm::GlobalValue>(Entry);
407
408    // If the types mismatch then we have to rewrite the definition.
409    assert(OldThunkFn->isDeclaration() &&
410           "Shouldn't replace non-declaration");
411
412    // Remove the name from the old thunk function and get a new thunk.
413    OldThunkFn->setName(StringRef());
414    Entry = CGM.GetAddrOfThunk(GD, Thunk);
415
416    // If needed, replace the old thunk with a bitcast.
417    if (!OldThunkFn->use_empty()) {
418      llvm::Constant *NewPtrForOldDecl =
419        llvm::ConstantExpr::getBitCast(Entry, OldThunkFn->getType());
420      OldThunkFn->replaceAllUsesWith(NewPtrForOldDecl);
421    }
422
423    // Remove the old thunk.
424    OldThunkFn->eraseFromParent();
425  }
426
427  llvm::Function *ThunkFn = cast<llvm::Function>(Entry);
428
429  if (!ThunkFn->isDeclaration()) {
430    if (UseAvailableExternallyLinkage) {
431      // There is already a thunk emitted for this function, do nothing.
432      return;
433    }
434
435    // If a function has a body, it should have available_externally linkage.
436    assert(ThunkFn->hasAvailableExternallyLinkage() &&
437           "Function should have available_externally linkage!");
438
439    // Change the linkage.
440    CGM.setFunctionLinkage(cast<CXXMethodDecl>(GD.getDecl()), ThunkFn);
441    return;
442  }
443
444  CGM.SetLLVMFunctionAttributesForDefinition(GD.getDecl(), ThunkFn);
445
446  if (ThunkFn->isVarArg()) {
447    // Varargs thunks are special; we can't just generate a call because
448    // we can't copy the varargs.  Our implementation is rather
449    // expensive/sucky at the moment, so don't generate the thunk unless
450    // we have to.
451    // FIXME: Do something better here; GenerateVarArgsThunk is extremely ugly.
452    if (!UseAvailableExternallyLinkage)
453      CodeGenFunction(CGM).GenerateVarArgsThunk(ThunkFn, FnInfo, GD, Thunk);
454  } else {
455    // Normal thunk body generation.
456    CodeGenFunction(CGM).GenerateThunk(ThunkFn, FnInfo, GD, Thunk);
457    if (UseAvailableExternallyLinkage)
458      ThunkFn->setLinkage(llvm::GlobalValue::AvailableExternallyLinkage);
459  }
460}
461
462void CodeGenVTables::MaybeEmitThunkAvailableExternally(GlobalDecl GD,
463                                                       const ThunkInfo &Thunk) {
464  // We only want to do this when building with optimizations.
465  if (!CGM.getCodeGenOpts().OptimizationLevel)
466    return;
467
468  // We can't emit thunks for member functions with incomplete types.
469  const CXXMethodDecl *MD = cast<CXXMethodDecl>(GD.getDecl());
470  if (!CGM.getTypes().isFuncTypeConvertible(
471                                cast<FunctionType>(MD->getType().getTypePtr())))
472    return;
473
474  EmitThunk(GD, Thunk, /*UseAvailableExternallyLinkage=*/true);
475}
476
477void CodeGenVTables::EmitThunks(GlobalDecl GD)
478{
479  const CXXMethodDecl *MD =
480    cast<CXXMethodDecl>(GD.getDecl())->getCanonicalDecl();
481
482  // We don't need to generate thunks for the base destructor.
483  if (isa<CXXDestructorDecl>(MD) && GD.getDtorType() == Dtor_Base)
484    return;
485
486  const VTableContext::ThunkInfoVectorTy *ThunkInfoVector =
487    VTContext.getThunkInfo(MD);
488  if (!ThunkInfoVector)
489    return;
490
491  for (unsigned I = 0, E = ThunkInfoVector->size(); I != E; ++I)
492    EmitThunk(GD, (*ThunkInfoVector)[I],
493              /*UseAvailableExternallyLinkage=*/false);
494}
495
496llvm::Constant *
497CodeGenVTables::CreateVTableInitializer(const CXXRecordDecl *RD,
498                                        const VTableComponent *Components,
499                                        unsigned NumComponents,
500                                const VTableLayout::VTableThunkTy *VTableThunks,
501                                        unsigned NumVTableThunks) {
502  SmallVector<llvm::Constant *, 64> Inits;
503
504  llvm::Type *Int8PtrTy = CGM.Int8PtrTy;
505
506  llvm::Type *PtrDiffTy =
507    CGM.getTypes().ConvertType(CGM.getContext().getPointerDiffType());
508
509  QualType ClassType = CGM.getContext().getTagDeclType(RD);
510  llvm::Constant *RTTI = CGM.GetAddrOfRTTIDescriptor(ClassType);
511
512  unsigned NextVTableThunkIndex = 0;
513
514  llvm::Constant *PureVirtualFn = 0, *DeletedVirtualFn = 0;
515
516  for (unsigned I = 0; I != NumComponents; ++I) {
517    VTableComponent Component = Components[I];
518
519    llvm::Constant *Init = 0;
520
521    switch (Component.getKind()) {
522    case VTableComponent::CK_VCallOffset:
523      Init = llvm::ConstantInt::get(PtrDiffTy,
524                                    Component.getVCallOffset().getQuantity());
525      Init = llvm::ConstantExpr::getIntToPtr(Init, Int8PtrTy);
526      break;
527    case VTableComponent::CK_VBaseOffset:
528      Init = llvm::ConstantInt::get(PtrDiffTy,
529                                    Component.getVBaseOffset().getQuantity());
530      Init = llvm::ConstantExpr::getIntToPtr(Init, Int8PtrTy);
531      break;
532    case VTableComponent::CK_OffsetToTop:
533      Init = llvm::ConstantInt::get(PtrDiffTy,
534                                    Component.getOffsetToTop().getQuantity());
535      Init = llvm::ConstantExpr::getIntToPtr(Init, Int8PtrTy);
536      break;
537    case VTableComponent::CK_RTTI:
538      Init = llvm::ConstantExpr::getBitCast(RTTI, Int8PtrTy);
539      break;
540    case VTableComponent::CK_FunctionPointer:
541    case VTableComponent::CK_CompleteDtorPointer:
542    case VTableComponent::CK_DeletingDtorPointer: {
543      GlobalDecl GD;
544
545      // Get the right global decl.
546      switch (Component.getKind()) {
547      default:
548        llvm_unreachable("Unexpected vtable component kind");
549      case VTableComponent::CK_FunctionPointer:
550        GD = Component.getFunctionDecl();
551        break;
552      case VTableComponent::CK_CompleteDtorPointer:
553        GD = GlobalDecl(Component.getDestructorDecl(), Dtor_Complete);
554        break;
555      case VTableComponent::CK_DeletingDtorPointer:
556        GD = GlobalDecl(Component.getDestructorDecl(), Dtor_Deleting);
557        break;
558      }
559
560      if (cast<CXXMethodDecl>(GD.getDecl())->isPure()) {
561        // We have a pure virtual member function.
562        if (!PureVirtualFn) {
563          llvm::FunctionType *Ty =
564            llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
565          StringRef PureCallName = CGM.getCXXABI().GetPureVirtualCallName();
566          PureVirtualFn = CGM.CreateRuntimeFunction(Ty, PureCallName);
567          PureVirtualFn = llvm::ConstantExpr::getBitCast(PureVirtualFn,
568                                                         CGM.Int8PtrTy);
569        }
570        Init = PureVirtualFn;
571      } else if (cast<CXXMethodDecl>(GD.getDecl())->isDeleted()) {
572        if (!DeletedVirtualFn) {
573          llvm::FunctionType *Ty =
574            llvm::FunctionType::get(CGM.VoidTy, /*isVarArg=*/false);
575          StringRef DeletedCallName =
576            CGM.getCXXABI().GetDeletedVirtualCallName();
577          DeletedVirtualFn = CGM.CreateRuntimeFunction(Ty, DeletedCallName);
578          DeletedVirtualFn = llvm::ConstantExpr::getBitCast(DeletedVirtualFn,
579                                                         CGM.Int8PtrTy);
580        }
581        Init = DeletedVirtualFn;
582      } else {
583        // Check if we should use a thunk.
584        if (NextVTableThunkIndex < NumVTableThunks &&
585            VTableThunks[NextVTableThunkIndex].first == I) {
586          const ThunkInfo &Thunk = VTableThunks[NextVTableThunkIndex].second;
587
588          MaybeEmitThunkAvailableExternally(GD, Thunk);
589          Init = CGM.GetAddrOfThunk(GD, Thunk);
590
591          NextVTableThunkIndex++;
592        } else {
593          llvm::Type *Ty = CGM.getTypes().GetFunctionTypeForVTable(GD);
594
595          Init = CGM.GetAddrOfFunction(GD, Ty, /*ForVTable=*/true);
596        }
597
598        Init = llvm::ConstantExpr::getBitCast(Init, Int8PtrTy);
599      }
600      break;
601    }
602
603    case VTableComponent::CK_UnusedFunctionPointer:
604      Init = llvm::ConstantExpr::getNullValue(Int8PtrTy);
605      break;
606    };
607
608    Inits.push_back(Init);
609  }
610
611  llvm::ArrayType *ArrayType = llvm::ArrayType::get(Int8PtrTy, NumComponents);
612  return llvm::ConstantArray::get(ArrayType, Inits);
613}
614
615llvm::GlobalVariable *CodeGenVTables::GetAddrOfVTable(const CXXRecordDecl *RD) {
616  llvm::GlobalVariable *&VTable = VTables[RD];
617  if (VTable)
618    return VTable;
619
620  // Queue up this v-table for possible deferred emission.
621  CGM.addDeferredVTable(RD);
622
623  SmallString<256> OutName;
624  llvm::raw_svector_ostream Out(OutName);
625  CGM.getCXXABI().getMangleContext().mangleCXXVTable(RD, Out);
626  Out.flush();
627  StringRef Name = OutName.str();
628
629  llvm::ArrayType *ArrayType =
630    llvm::ArrayType::get(CGM.Int8PtrTy,
631                        VTContext.getVTableLayout(RD).getNumVTableComponents());
632
633  VTable =
634    CGM.CreateOrReplaceCXXRuntimeVariable(Name, ArrayType,
635                                          llvm::GlobalValue::ExternalLinkage);
636  VTable->setUnnamedAddr(true);
637  return VTable;
638}
639
640void
641CodeGenVTables::EmitVTableDefinition(llvm::GlobalVariable *VTable,
642                                     llvm::GlobalVariable::LinkageTypes Linkage,
643                                     const CXXRecordDecl *RD) {
644  const VTableLayout &VTLayout = VTContext.getVTableLayout(RD);
645
646  // Create and set the initializer.
647  llvm::Constant *Init =
648    CreateVTableInitializer(RD,
649                            VTLayout.vtable_component_begin(),
650                            VTLayout.getNumVTableComponents(),
651                            VTLayout.vtable_thunk_begin(),
652                            VTLayout.getNumVTableThunks());
653  VTable->setInitializer(Init);
654
655  // Set the correct linkage.
656  VTable->setLinkage(Linkage);
657
658  // Set the right visibility.
659  CGM.setTypeVisibility(VTable, RD, CodeGenModule::TVK_ForVTable);
660}
661
662llvm::GlobalVariable *
663CodeGenVTables::GenerateConstructionVTable(const CXXRecordDecl *RD,
664                                      const BaseSubobject &Base,
665                                      bool BaseIsVirtual,
666                                   llvm::GlobalVariable::LinkageTypes Linkage,
667                                      VTableAddressPointsMapTy& AddressPoints) {
668  OwningPtr<VTableLayout> VTLayout(
669    VTContext.createConstructionVTableLayout(Base.getBase(),
670                                             Base.getBaseOffset(),
671                                             BaseIsVirtual, RD));
672
673  // Add the address points.
674  AddressPoints = VTLayout->getAddressPoints();
675
676  // Get the mangled construction vtable name.
677  SmallString<256> OutName;
678  llvm::raw_svector_ostream Out(OutName);
679  CGM.getCXXABI().getMangleContext().
680    mangleCXXCtorVTable(RD, Base.getBaseOffset().getQuantity(), Base.getBase(),
681                        Out);
682  Out.flush();
683  StringRef Name = OutName.str();
684
685  llvm::ArrayType *ArrayType =
686    llvm::ArrayType::get(CGM.Int8PtrTy, VTLayout->getNumVTableComponents());
687
688  // Construction vtable symbols are not part of the Itanium ABI, so we cannot
689  // guarantee that they actually will be available externally. Instead, when
690  // emitting an available_externally VTT, we provide references to an internal
691  // linkage construction vtable. The ABI only requires complete-object vtables
692  // to be the same for all instances of a type, not construction vtables.
693  if (Linkage == llvm::GlobalVariable::AvailableExternallyLinkage)
694    Linkage = llvm::GlobalVariable::InternalLinkage;
695
696  // Create the variable that will hold the construction vtable.
697  llvm::GlobalVariable *VTable =
698    CGM.CreateOrReplaceCXXRuntimeVariable(Name, ArrayType, Linkage);
699  CGM.setTypeVisibility(VTable, RD, CodeGenModule::TVK_ForConstructionVTable);
700
701  // V-tables are always unnamed_addr.
702  VTable->setUnnamedAddr(true);
703
704  // Create and set the initializer.
705  llvm::Constant *Init =
706    CreateVTableInitializer(Base.getBase(),
707                            VTLayout->vtable_component_begin(),
708                            VTLayout->getNumVTableComponents(),
709                            VTLayout->vtable_thunk_begin(),
710                            VTLayout->getNumVTableThunks());
711  VTable->setInitializer(Init);
712
713  return VTable;
714}
715
716/// Compute the required linkage of the v-table for the given class.
717///
718/// Note that we only call this at the end of the translation unit.
719llvm::GlobalVariable::LinkageTypes
720CodeGenModule::getVTableLinkage(const CXXRecordDecl *RD) {
721  if (RD->getLinkage() != ExternalLinkage)
722    return llvm::GlobalVariable::InternalLinkage;
723
724  // We're at the end of the translation unit, so the current key
725  // function is fully correct.
726  if (const CXXMethodDecl *keyFunction = Context.getCurrentKeyFunction(RD)) {
727    // If this class has a key function, use that to determine the
728    // linkage of the vtable.
729    const FunctionDecl *def = 0;
730    if (keyFunction->hasBody(def))
731      keyFunction = cast<CXXMethodDecl>(def);
732
733    switch (keyFunction->getTemplateSpecializationKind()) {
734      case TSK_Undeclared:
735      case TSK_ExplicitSpecialization:
736        // When compiling with optimizations turned on, we emit all vtables,
737        // even if the key function is not defined in the current translation
738        // unit. If this is the case, use available_externally linkage.
739        if (!def && CodeGenOpts.OptimizationLevel)
740          return llvm::GlobalVariable::AvailableExternallyLinkage;
741
742        if (keyFunction->isInlined())
743          return !Context.getLangOpts().AppleKext ?
744                   llvm::GlobalVariable::LinkOnceODRLinkage :
745                   llvm::Function::InternalLinkage;
746
747        return llvm::GlobalVariable::ExternalLinkage;
748
749      case TSK_ImplicitInstantiation:
750        return !Context.getLangOpts().AppleKext ?
751                 llvm::GlobalVariable::LinkOnceODRLinkage :
752                 llvm::Function::InternalLinkage;
753
754      case TSK_ExplicitInstantiationDefinition:
755        return !Context.getLangOpts().AppleKext ?
756                 llvm::GlobalVariable::WeakODRLinkage :
757                 llvm::Function::InternalLinkage;
758
759      case TSK_ExplicitInstantiationDeclaration:
760        return !Context.getLangOpts().AppleKext ?
761                 llvm::GlobalVariable::AvailableExternallyLinkage :
762                 llvm::Function::InternalLinkage;
763    }
764  }
765
766  // -fapple-kext mode does not support weak linkage, so we must use
767  // internal linkage.
768  if (Context.getLangOpts().AppleKext)
769    return llvm::Function::InternalLinkage;
770
771  switch (RD->getTemplateSpecializationKind()) {
772  case TSK_Undeclared:
773  case TSK_ExplicitSpecialization:
774  case TSK_ImplicitInstantiation:
775    return llvm::GlobalVariable::LinkOnceODRLinkage;
776
777  case TSK_ExplicitInstantiationDeclaration:
778    return llvm::GlobalVariable::AvailableExternallyLinkage;
779
780  case TSK_ExplicitInstantiationDefinition:
781      return llvm::GlobalVariable::WeakODRLinkage;
782  }
783
784  llvm_unreachable("Invalid TemplateSpecializationKind!");
785}
786
787/// This is a callback from Sema to tell us that it believes that a
788/// particular v-table is required to be emitted in this translation
789/// unit.
790///
791/// The reason we don't simply trust this callback is because Sema
792/// will happily report that something is used even when it's used
793/// only in code that we don't actually have to emit.
794///
795/// \param isRequired - if true, the v-table is mandatory, e.g.
796///   because the translation unit defines the key function
797void CodeGenModule::EmitVTable(CXXRecordDecl *theClass, bool isRequired) {
798  if (!isRequired) return;
799
800  VTables.GenerateClassData(theClass);
801}
802
803void
804CodeGenVTables::GenerateClassData(const CXXRecordDecl *RD) {
805  // First off, check whether we've already emitted the v-table and
806  // associated stuff.
807  llvm::GlobalVariable *VTable = GetAddrOfVTable(RD);
808  if (VTable->hasInitializer())
809    return;
810
811  llvm::GlobalVariable::LinkageTypes Linkage = CGM.getVTableLinkage(RD);
812  EmitVTableDefinition(VTable, Linkage, RD);
813
814  if (RD->getNumVBases()) {
815    if (!CGM.getTarget().getCXXABI().isMicrosoft()) {
816      llvm::GlobalVariable *VTT = GetAddrOfVTT(RD);
817      EmitVTTDefinition(VTT, Linkage, RD);
818    } else {
819      // FIXME: Emit vbtables here.
820    }
821  }
822
823  // If this is the magic class __cxxabiv1::__fundamental_type_info,
824  // we will emit the typeinfo for the fundamental types. This is the
825  // same behaviour as GCC.
826  const DeclContext *DC = RD->getDeclContext();
827  if (RD->getIdentifier() &&
828      RD->getIdentifier()->isStr("__fundamental_type_info") &&
829      isa<NamespaceDecl>(DC) &&
830      cast<NamespaceDecl>(DC)->getIdentifier() &&
831      cast<NamespaceDecl>(DC)->getIdentifier()->isStr("__cxxabiv1") &&
832      DC->getParent()->isTranslationUnit())
833    CGM.EmitFundamentalRTTIDescriptors();
834}
835
836/// At this point in the translation unit, does it appear that can we
837/// rely on the vtable being defined elsewhere in the program?
838///
839/// The response is really only definitive when called at the end of
840/// the translation unit.
841///
842/// The only semantic restriction here is that the object file should
843/// not contain a v-table definition when that v-table is defined
844/// strongly elsewhere.  Otherwise, we'd just like to avoid emitting
845/// v-tables when unnecessary.
846bool CodeGenVTables::isVTableExternal(const CXXRecordDecl *RD) {
847  assert(RD->isDynamicClass() && "Non dynamic classes have no VTable.");
848
849  // If we have an explicit instantiation declaration (and not a
850  // definition), the v-table is defined elsewhere.
851  TemplateSpecializationKind TSK = RD->getTemplateSpecializationKind();
852  if (TSK == TSK_ExplicitInstantiationDeclaration)
853    return true;
854
855  // Otherwise, if the class is an instantiated template, the
856  // v-table must be defined here.
857  if (TSK == TSK_ImplicitInstantiation ||
858      TSK == TSK_ExplicitInstantiationDefinition)
859    return false;
860
861  // Otherwise, if the class doesn't have a key function (possibly
862  // anymore), the v-table must be defined here.
863  const CXXMethodDecl *keyFunction = CGM.getContext().getCurrentKeyFunction(RD);
864  if (!keyFunction)
865    return false;
866
867  // Otherwise, if we don't have a definition of the key function, the
868  // v-table must be defined somewhere else.
869  return !keyFunction->hasBody();
870}
871
872/// Given that we're currently at the end of the translation unit, and
873/// we've emitted a reference to the v-table for this class, should
874/// we define that v-table?
875static bool shouldEmitVTableAtEndOfTranslationUnit(CodeGenModule &CGM,
876                                                   const CXXRecordDecl *RD) {
877  // If we're building with optimization, we always emit v-tables
878  // since that allows for virtual function calls to be devirtualized.
879  // If the v-table is defined strongly elsewhere, this definition
880  // will be emitted available_externally.
881  //
882  // However, we don't want to do this in -fapple-kext mode, because
883  // kext mode does not permit devirtualization.
884  if (CGM.getCodeGenOpts().OptimizationLevel && !CGM.getLangOpts().AppleKext)
885    return true;
886
887  return !CGM.getVTables().isVTableExternal(RD);
888}
889
890/// Given that at some point we emitted a reference to one or more
891/// v-tables, and that we are now at the end of the translation unit,
892/// decide whether we should emit them.
893void CodeGenModule::EmitDeferredVTables() {
894#ifndef NDEBUG
895  // Remember the size of DeferredVTables, because we're going to assume
896  // that this entire operation doesn't modify it.
897  size_t savedSize = DeferredVTables.size();
898#endif
899
900  typedef std::vector<const CXXRecordDecl *>::const_iterator const_iterator;
901  for (const_iterator i = DeferredVTables.begin(),
902                      e = DeferredVTables.end(); i != e; ++i) {
903    const CXXRecordDecl *RD = *i;
904    if (shouldEmitVTableAtEndOfTranslationUnit(*this, RD))
905      VTables.GenerateClassData(RD);
906  }
907
908  assert(savedSize == DeferredVTables.size() &&
909         "deferred extra v-tables during v-table emission?");
910  DeferredVTables.clear();
911}
912