ARMTargetStreamer.cpp revision 360784
1//===- ARMTargetStreamer.cpp - ARMTargetStreamer class --*- C++ -*---------===//
2//
3// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4// See https://llvm.org/LICENSE.txt for license information.
5// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6//
7//===----------------------------------------------------------------------===//
8//
9// This file implements the ARMTargetStreamer class.
10//
11//===----------------------------------------------------------------------===//
12
13#include "MCTargetDesc/ARMMCTargetDesc.h"
14#include "llvm/MC/ConstantPools.h"
15#include "llvm/MC/MCAsmInfo.h"
16#include "llvm/MC/MCContext.h"
17#include "llvm/MC/MCExpr.h"
18#include "llvm/MC/MCStreamer.h"
19#include "llvm/MC/MCSubtargetInfo.h"
20#include "llvm/Support/ARMBuildAttributes.h"
21#include "llvm/Support/TargetParser.h"
22
23using namespace llvm;
24
25//
26// ARMTargetStreamer Implemenation
27//
28
29ARMTargetStreamer::ARMTargetStreamer(MCStreamer &S)
30    : MCTargetStreamer(S), ConstantPools(new AssemblerConstantPools()) {}
31
32ARMTargetStreamer::~ARMTargetStreamer() = default;
33
34// The constant pool handling is shared by all ARMTargetStreamer
35// implementations.
36const MCExpr *ARMTargetStreamer::addConstantPoolEntry(const MCExpr *Expr, SMLoc Loc) {
37  return ConstantPools->addEntry(Streamer, Expr, 4, Loc);
38}
39
40void ARMTargetStreamer::emitCurrentConstantPool() {
41  ConstantPools->emitForCurrentSection(Streamer);
42  ConstantPools->clearCacheForCurrentSection(Streamer);
43}
44
45// finish() - write out any non-empty assembler constant pools.
46void ARMTargetStreamer::finish() { ConstantPools->emitAll(Streamer); }
47
48// reset() - Reset any state
49void ARMTargetStreamer::reset() {}
50
51void ARMTargetStreamer::emitInst(uint32_t Inst, char Suffix) {
52  unsigned Size;
53  char Buffer[4];
54  const bool LittleEndian = getStreamer().getContext().getAsmInfo()->isLittleEndian();
55
56  switch (Suffix) {
57  case '\0':
58    Size = 4;
59
60    for (unsigned II = 0, IE = Size; II != IE; II++) {
61      const unsigned I = LittleEndian ? (Size - II - 1) : II;
62      Buffer[Size - II - 1] = uint8_t(Inst >> I * CHAR_BIT);
63    }
64
65    break;
66  case 'n':
67  case 'w':
68    Size = (Suffix == 'n' ? 2 : 4);
69
70    // Thumb wide instructions are emitted as a pair of 16-bit words of the
71    // appropriate endianness.
72    for (unsigned II = 0, IE = Size; II != IE; II = II + 2) {
73      const unsigned I0 = LittleEndian ? II + 0 : II + 1;
74      const unsigned I1 = LittleEndian ? II + 1 : II + 0;
75      Buffer[Size - II - 2] = uint8_t(Inst >> I0 * CHAR_BIT);
76      Buffer[Size - II - 1] = uint8_t(Inst >> I1 * CHAR_BIT);
77    }
78
79    break;
80  default:
81    llvm_unreachable("Invalid Suffix");
82  }
83  getStreamer().EmitBytes(StringRef(Buffer, Size));
84}
85
86// The remaining callbacks should be handled separately by each
87// streamer.
88void ARMTargetStreamer::emitFnStart() {}
89void ARMTargetStreamer::emitFnEnd() {}
90void ARMTargetStreamer::emitCantUnwind() {}
91void ARMTargetStreamer::emitPersonality(const MCSymbol *Personality) {}
92void ARMTargetStreamer::emitPersonalityIndex(unsigned Index) {}
93void ARMTargetStreamer::emitHandlerData() {}
94void ARMTargetStreamer::emitSetFP(unsigned FpReg, unsigned SpReg,
95                                  int64_t Offset) {}
96void ARMTargetStreamer::emitMovSP(unsigned Reg, int64_t Offset) {}
97void ARMTargetStreamer::emitPad(int64_t Offset) {}
98void ARMTargetStreamer::emitRegSave(const SmallVectorImpl<unsigned> &RegList,
99                                    bool isVector) {}
100void ARMTargetStreamer::emitUnwindRaw(int64_t StackOffset,
101                                      const SmallVectorImpl<uint8_t> &Opcodes) {
102}
103void ARMTargetStreamer::switchVendor(StringRef Vendor) {}
104void ARMTargetStreamer::emitAttribute(unsigned Attribute, unsigned Value) {}
105void ARMTargetStreamer::emitTextAttribute(unsigned Attribute,
106                                          StringRef String) {}
107void ARMTargetStreamer::emitIntTextAttribute(unsigned Attribute,
108                                             unsigned IntValue,
109                                             StringRef StringValue) {}
110void ARMTargetStreamer::emitArch(ARM::ArchKind Arch) {}
111void ARMTargetStreamer::emitArchExtension(unsigned ArchExt) {}
112void ARMTargetStreamer::emitObjectArch(ARM::ArchKind Arch) {}
113void ARMTargetStreamer::emitFPU(unsigned FPU) {}
114void ARMTargetStreamer::finishAttributeSection() {}
115void
116ARMTargetStreamer::AnnotateTLSDescriptorSequence(const MCSymbolRefExpr *SRE) {}
117void ARMTargetStreamer::emitThumbSet(MCSymbol *Symbol, const MCExpr *Value) {}
118
119static ARMBuildAttrs::CPUArch getArchForCPU(const MCSubtargetInfo &STI) {
120  if (STI.getCPU() == "xscale")
121    return ARMBuildAttrs::v5TEJ;
122
123  if (STI.hasFeature(ARM::HasV8Ops)) {
124    if (STI.hasFeature(ARM::FeatureRClass))
125      return ARMBuildAttrs::v8_R;
126    return ARMBuildAttrs::v8_A;
127  } else if (STI.hasFeature(ARM::HasV8_1MMainlineOps))
128    return ARMBuildAttrs::v8_1_M_Main;
129  else if (STI.hasFeature(ARM::HasV8MMainlineOps))
130    return ARMBuildAttrs::v8_M_Main;
131  else if (STI.hasFeature(ARM::HasV7Ops)) {
132    if (STI.hasFeature(ARM::FeatureMClass) && STI.hasFeature(ARM::FeatureDSP))
133      return ARMBuildAttrs::v7E_M;
134    return ARMBuildAttrs::v7;
135  } else if (STI.hasFeature(ARM::HasV6T2Ops))
136    return ARMBuildAttrs::v6T2;
137  else if (STI.hasFeature(ARM::HasV8MBaselineOps))
138    return ARMBuildAttrs::v8_M_Base;
139  else if (STI.hasFeature(ARM::HasV6MOps))
140    return ARMBuildAttrs::v6S_M;
141  else if (STI.hasFeature(ARM::HasV6Ops))
142    return ARMBuildAttrs::v6;
143  else if (STI.hasFeature(ARM::HasV5TEOps))
144    return ARMBuildAttrs::v5TE;
145  else if (STI.hasFeature(ARM::HasV5TOps))
146    return ARMBuildAttrs::v5T;
147  else if (STI.hasFeature(ARM::HasV4TOps))
148    return ARMBuildAttrs::v4T;
149  else
150    return ARMBuildAttrs::v4;
151}
152
153static bool isV8M(const MCSubtargetInfo &STI) {
154  // Note that v8M Baseline is a subset of v6T2!
155  return (STI.hasFeature(ARM::HasV8MBaselineOps) &&
156          !STI.hasFeature(ARM::HasV6T2Ops)) ||
157         STI.hasFeature(ARM::HasV8MMainlineOps);
158}
159
160/// Emit the build attributes that only depend on the hardware that we expect
161// /to be available, and not on the ABI, or any source-language choices.
162void ARMTargetStreamer::emitTargetAttributes(const MCSubtargetInfo &STI) {
163  switchVendor("aeabi");
164
165  const StringRef CPUString = STI.getCPU();
166  if (!CPUString.empty() && !CPUString.startswith("generic")) {
167    // FIXME: remove krait check when GNU tools support krait cpu
168    if (STI.hasFeature(ARM::ProcKrait)) {
169      emitTextAttribute(ARMBuildAttrs::CPU_name, "cortex-a9");
170      // We consider krait as a "cortex-a9" + hwdiv CPU
171      // Enable hwdiv through ".arch_extension idiv"
172      if (STI.hasFeature(ARM::FeatureHWDivThumb) ||
173          STI.hasFeature(ARM::FeatureHWDivARM))
174        emitArchExtension(ARM::AEK_HWDIVTHUMB | ARM::AEK_HWDIVARM);
175    } else {
176      emitTextAttribute(ARMBuildAttrs::CPU_name, CPUString);
177    }
178  }
179
180  emitAttribute(ARMBuildAttrs::CPU_arch, getArchForCPU(STI));
181
182  if (STI.hasFeature(ARM::FeatureAClass)) {
183    emitAttribute(ARMBuildAttrs::CPU_arch_profile,
184                      ARMBuildAttrs::ApplicationProfile);
185  } else if (STI.hasFeature(ARM::FeatureRClass)) {
186    emitAttribute(ARMBuildAttrs::CPU_arch_profile,
187                      ARMBuildAttrs::RealTimeProfile);
188  } else if (STI.hasFeature(ARM::FeatureMClass)) {
189    emitAttribute(ARMBuildAttrs::CPU_arch_profile,
190                      ARMBuildAttrs::MicroControllerProfile);
191  }
192
193  emitAttribute(ARMBuildAttrs::ARM_ISA_use, STI.hasFeature(ARM::FeatureNoARM)
194                                                ? ARMBuildAttrs::Not_Allowed
195                                                : ARMBuildAttrs::Allowed);
196
197  if (isV8M(STI)) {
198    emitAttribute(ARMBuildAttrs::THUMB_ISA_use,
199                      ARMBuildAttrs::AllowThumbDerived);
200  } else if (STI.hasFeature(ARM::FeatureThumb2)) {
201    emitAttribute(ARMBuildAttrs::THUMB_ISA_use,
202                      ARMBuildAttrs::AllowThumb32);
203  } else if (STI.hasFeature(ARM::HasV4TOps)) {
204    emitAttribute(ARMBuildAttrs::THUMB_ISA_use, ARMBuildAttrs::Allowed);
205  }
206
207  if (STI.hasFeature(ARM::FeatureNEON)) {
208    /* NEON is not exactly a VFP architecture, but GAS emit one of
209     * neon/neon-fp-armv8/neon-vfpv4/vfpv3/vfpv2 for .fpu parameters */
210    if (STI.hasFeature(ARM::FeatureFPARMv8)) {
211      if (STI.hasFeature(ARM::FeatureCrypto))
212        emitFPU(ARM::FK_CRYPTO_NEON_FP_ARMV8);
213      else
214        emitFPU(ARM::FK_NEON_FP_ARMV8);
215    } else if (STI.hasFeature(ARM::FeatureVFP4))
216      emitFPU(ARM::FK_NEON_VFPV4);
217    else
218      emitFPU(STI.hasFeature(ARM::FeatureFP16) ? ARM::FK_NEON_FP16
219                                               : ARM::FK_NEON);
220    // Emit Tag_Advanced_SIMD_arch for ARMv8 architecture
221    if (STI.hasFeature(ARM::HasV8Ops))
222      emitAttribute(ARMBuildAttrs::Advanced_SIMD_arch,
223                    STI.hasFeature(ARM::HasV8_1aOps)
224                        ? ARMBuildAttrs::AllowNeonARMv8_1a
225                        : ARMBuildAttrs::AllowNeonARMv8);
226  } else {
227    if (STI.hasFeature(ARM::FeatureFPARMv8_D16_SP))
228      // FPv5 and FP-ARMv8 have the same instructions, so are modeled as one
229      // FPU, but there are two different names for it depending on the CPU.
230      emitFPU(STI.hasFeature(ARM::FeatureD32)
231                  ? ARM::FK_FP_ARMV8
232                  : (STI.hasFeature(ARM::FeatureFP64) ? ARM::FK_FPV5_D16
233                                                      : ARM::FK_FPV5_SP_D16));
234    else if (STI.hasFeature(ARM::FeatureVFP4_D16_SP))
235      emitFPU(STI.hasFeature(ARM::FeatureD32)
236                  ? ARM::FK_VFPV4
237                  : (STI.hasFeature(ARM::FeatureFP64) ? ARM::FK_VFPV4_D16
238                                                      : ARM::FK_FPV4_SP_D16));
239    else if (STI.hasFeature(ARM::FeatureVFP3_D16_SP))
240      emitFPU(
241          STI.hasFeature(ARM::FeatureD32)
242              // +d32
243              ? (STI.hasFeature(ARM::FeatureFP16) ? ARM::FK_VFPV3_FP16
244                                                  : ARM::FK_VFPV3)
245              // -d32
246              : (STI.hasFeature(ARM::FeatureFP64)
247                     ? (STI.hasFeature(ARM::FeatureFP16)
248                            ? ARM::FK_VFPV3_D16_FP16
249                            : ARM::FK_VFPV3_D16)
250                     : (STI.hasFeature(ARM::FeatureFP16) ? ARM::FK_VFPV3XD_FP16
251                                                         : ARM::FK_VFPV3XD)));
252    else if (STI.hasFeature(ARM::FeatureVFP2_SP))
253      emitFPU(ARM::FK_VFPV2);
254  }
255
256  // ABI_HardFP_use attribute to indicate single precision FP.
257  if (STI.hasFeature(ARM::FeatureVFP2_SP) && !STI.hasFeature(ARM::FeatureFP64))
258    emitAttribute(ARMBuildAttrs::ABI_HardFP_use,
259                  ARMBuildAttrs::HardFPSinglePrecision);
260
261  if (STI.hasFeature(ARM::FeatureFP16))
262    emitAttribute(ARMBuildAttrs::FP_HP_extension, ARMBuildAttrs::AllowHPFP);
263
264  if (STI.hasFeature(ARM::FeatureMP))
265    emitAttribute(ARMBuildAttrs::MPextension_use, ARMBuildAttrs::AllowMP);
266
267  if (STI.hasFeature(ARM::HasMVEFloatOps))
268    emitAttribute(ARMBuildAttrs::MVE_arch, ARMBuildAttrs::AllowMVEIntegerAndFloat);
269  else if (STI.hasFeature(ARM::HasMVEIntegerOps))
270    emitAttribute(ARMBuildAttrs::MVE_arch, ARMBuildAttrs::AllowMVEInteger);
271
272  // Hardware divide in ARM mode is part of base arch, starting from ARMv8.
273  // If only Thumb hwdiv is present, it must also be in base arch (ARMv7-R/M).
274  // It is not possible to produce DisallowDIV: if hwdiv is present in the base
275  // arch, supplying -hwdiv downgrades the effective arch, via ClearImpliedBits.
276  // AllowDIVExt is only emitted if hwdiv isn't available in the base arch;
277  // otherwise, the default value (AllowDIVIfExists) applies.
278  if (STI.hasFeature(ARM::FeatureHWDivARM) && !STI.hasFeature(ARM::HasV8Ops))
279    emitAttribute(ARMBuildAttrs::DIV_use, ARMBuildAttrs::AllowDIVExt);
280
281  if (STI.hasFeature(ARM::FeatureDSP) && isV8M(STI))
282    emitAttribute(ARMBuildAttrs::DSP_extension, ARMBuildAttrs::Allowed);
283
284  if (STI.hasFeature(ARM::FeatureStrictAlign))
285    emitAttribute(ARMBuildAttrs::CPU_unaligned_access,
286                  ARMBuildAttrs::Not_Allowed);
287  else
288    emitAttribute(ARMBuildAttrs::CPU_unaligned_access,
289                  ARMBuildAttrs::Allowed);
290
291  if (STI.hasFeature(ARM::FeatureTrustZone) &&
292      STI.hasFeature(ARM::FeatureVirtualization))
293    emitAttribute(ARMBuildAttrs::Virtualization_use,
294                  ARMBuildAttrs::AllowTZVirtualization);
295  else if (STI.hasFeature(ARM::FeatureTrustZone))
296    emitAttribute(ARMBuildAttrs::Virtualization_use, ARMBuildAttrs::AllowTZ);
297  else if (STI.hasFeature(ARM::FeatureVirtualization))
298    emitAttribute(ARMBuildAttrs::Virtualization_use,
299                  ARMBuildAttrs::AllowVirtualization);
300}
301