;
31using namespaceCodeGen;
73structCGRecordLowering {
85MemberInfo(
CharUnitsOffset, InfoKind Kind, llvm::Type *Data,
88MemberInfo(
CharUnitsOffset, InfoKind Kind, llvm::Type *Data,
92 bool operator <(
constMemberInfo& a)
const{
returnOffset < a.Offset; }
98 staticMemberInfo StorageInfo(
CharUnitsOffset, llvm::Type *
Data) {
99 returnMemberInfo(Offset, MemberInfo::Field,
Data);
107 boolisDiscreteBitFieldABI()
const{
109 D->isMsStruct(Context);
124 boolisOverlappingVBaseABI()
const{
129llvm::Type *getIntNType(uint64_t NumBits)
const{
130 unsignedAlignedBits = llvm::alignTo(NumBits, Context.
getCharWidth());
131 returnllvm::Type::getIntNTy(Types.getLLVMContext(), AlignedBits);
134llvm::Type *getCharType()
const{
135 returnllvm::Type::getIntNTy(Types.getLLVMContext(),
139llvm::Type *getByteArrayType(
CharUnitsNumChars)
const{
140assert(!NumChars.
isZero() &&
"Empty byte arrays aren't allowed.");
141llvm::Type *
Type= getCharType();
147llvm::Type *getStorageType(
const FieldDecl*FD)
const{
148llvm::Type *
Type= Types.ConvertTypeForMem(FD->
getType());
150 if(isDiscreteBitFieldABI())
return Type;
152(
unsigned)Context.
toBits(getSize(
Type))));
156 returnTypes.getCGRecordLayout(RD).getBaseSubobjectLLVMType();
158 CharUnitsbitsToCharUnits(uint64_t BitOffset)
const{
167 boolisZeroInitializable(
const FieldDecl*FD)
const{
168 returnTypes.isZeroInitializable(FD->
getType());
170 boolisZeroInitializable(
const RecordDecl*RD)
const{
171 returnTypes.isZeroInitializable(RD);
173 voidappendPaddingBytes(
CharUnitsSize) {
174 if(!
Size.isZero())
175FieldTypes.push_back(getByteArrayType(Size));
182llvm::Type *StorageType);
184 voidlower(
boolNonVirtualBaseType);
185 voidlowerUnion(
boolisNonVirtualBaseType);
186 voidaccumulateFields(
boolisNonVirtualBaseType);
188accumulateBitFields(
boolisNonVirtualBaseType,
191 voidcomputeVolatileBitfields();
192 voidaccumulateBases();
193 voidaccumulateVPtrs();
194 voidaccumulateVBases();
199 voidcalculateZeroInit();
200 CharUnitscalculateTailClippingOffset(
boolisNonVirtualBaseType)
const;
201 voidcheckBitfieldClipping(
boolisNonVirtualBaseType)
const;
203 voiddeterminePacked(
boolNVBaseType);
205 voidinsertPadding();
207 voidfillOutputFields();
214 constllvm::DataLayout &DataLayout;
216std::vector<MemberInfo> Members;
219llvm::DenseMap<const FieldDecl *, unsigned> Fields;
220llvm::DenseMap<const FieldDecl *, CGBitFieldInfo> BitFields;
221llvm::DenseMap<const CXXRecordDecl *, unsigned> NonVirtualBases;
222llvm::DenseMap<const CXXRecordDecl *, unsigned> VirtualBases;
223 boolIsZeroInitializable : 1;
224 boolIsZeroInitializableAsBase : 1;
227CGRecordLowering(
constCGRecordLowering &) =
delete;
228 voidoperator =(
constCGRecordLowering &) =
delete;
234: Types(Types), Context(Types.getContext()),
D(
D),
236Layout(Types.getContext().getASTRecordLayout(
D)),
237DataLayout(Types.getDataLayout()), IsZeroInitializable(
true),
238IsZeroInitializableAsBase(
true), Packed(Packed) {}
240voidCGRecordLowering::setBitFieldInfo(
254 if(DataLayout.isBigEndian())
262voidCGRecordLowering::lower(
boolNVBaseType) {
283 CharUnits Size= NVBaseType ? Layout.getNonVirtualSize() : Layout.getSize();
284 if(
D->isUnion()) {
285lowerUnion(NVBaseType);
286computeVolatileBitfields();
289accumulateFields(NVBaseType);
294 if(Members.empty()) {
295appendPaddingBytes(Size);
296computeVolatileBitfields();
302llvm::stable_sort(Members);
303checkBitfieldClipping(NVBaseType);
304Members.push_back(StorageInfo(Size, getIntNType(8)));
305determinePacked(NVBaseType);
310computeVolatileBitfields();
313voidCGRecordLowering::lowerUnion(
boolisNonVirtualBaseType) {
315isNonVirtualBaseType ? Layout.getDataSize() : Layout.getSize();
316llvm::Type *StorageType =
nullptr;
317 boolSeenNamedMember =
false;
323 for(
const auto*Field :
D->fields()) {
324 if(
Field->isBitField()) {
325 if(
Field->isZeroLengthBitField())
327llvm::Type *FieldType = getStorageType(Field);
328 if(LayoutSize < getSize(FieldType))
329FieldType = getByteArrayType(LayoutSize);
332Fields[
Field->getCanonicalDecl()] = 0;
333llvm::Type *FieldType = getStorageType(Field);
340 if(!SeenNamedMember) {
341SeenNamedMember =
Field->getIdentifier();
342 if(!SeenNamedMember)
343 if(
const auto*FieldRD =
Field->getType()->getAsRecordDecl())
344SeenNamedMember = FieldRD->findFirstNamedDataMember();
345 if(SeenNamedMember && !isZeroInitializable(Field)) {
346IsZeroInitializable = IsZeroInitializableAsBase =
false;
347StorageType = FieldType;
352 if(!IsZeroInitializable)
356getAlignment(FieldType) > getAlignment(StorageType) ||
357(getAlignment(FieldType) == getAlignment(StorageType) &&
358getSize(FieldType) > getSize(StorageType)))
359StorageType = FieldType;
363 returnappendPaddingBytes(LayoutSize);
366 if(LayoutSize < getSize(StorageType))
367StorageType = getByteArrayType(LayoutSize);
368FieldTypes.push_back(StorageType);
369appendPaddingBytes(LayoutSize - getSize(StorageType));
371 const autoStorageAlignment = getAlignment(StorageType);
372assert((Layout.getSize() % StorageAlignment == 0 ||
373Layout.getDataSize() % StorageAlignment) &&
374 "Union's standard layout and no_unique_address layout must agree on " 376 if(Layout.getDataSize() % StorageAlignment)
380voidCGRecordLowering::accumulateFields(
boolisNonVirtualBaseType) {
382FieldEnd =
D->field_end();
383Field != FieldEnd;) {
384 if(
Field->isBitField()) {
385 Field= accumulateBitFields(isNonVirtualBaseType, Field, FieldEnd);
386assert((Field == FieldEnd || !
Field->isBitField()) &&
387 "Failed to accumulate all the bitfields");
394Members.push_back(MemberInfo(
395bitsToCharUnits(getFieldBitOffset(*Field)), MemberInfo::Field,
396 Field->isPotentiallyOverlapping()
397? getStorageType(
Field->getType()->getAsCXXRecordDecl())
398: getStorageType(*Field),
410CGRecordLowering::accumulateBitFields(
boolisNonVirtualBaseType,
413 if(isDiscreteBitFieldABI()) {
426 if(
Field->isZeroLengthBitField()) {
430 uint64_tBitOffset = getFieldBitOffset(*Field);
431llvm::Type *
Type= Types.ConvertTypeForMem(
Field->getType());
434 if(Run == FieldEnd || BitOffset >= Tail) {
436StartBitOffset = BitOffset;
437Tail = StartBitOffset + DataLayout.getTypeAllocSizeInBits(
Type);
441Members.push_back(StorageInfo(bitsToCharUnits(StartBitOffset),
Type));
445Members.push_back(MemberInfo(bitsToCharUnits(StartBitOffset),
446MemberInfo::Field,
nullptr, *Field));
536 boolAtAlignedBoundary =
false;
537 boolBarrier =
false;
539 if(Field != FieldEnd &&
Field->isBitField()) {
540 uint64_tBitOffset = getFieldBitOffset(*Field);
541 if(
Begin== FieldEnd) {
546assert((BitOffset % CharBits) == 0 &&
"Not at start of char");
547BeginOffset = bitsToCharUnits(BitOffset);
548BitSizeSinceBegin = 0;
549}
else if((BitOffset % CharBits) != 0) {
556assert(BitOffset == Context.
toBits(BeginOffset) + BitSizeSinceBegin &&
557 "Concatenating non-contiguous bitfields");
562 if(
Field->isZeroLengthBitField())
564AtAlignedBoundary =
true;
569 if(
Begin== FieldEnd)
573AtAlignedBoundary =
true;
579 boolInstallBest =
false;
580 if(AtAlignedBoundary) {
586 CharUnitsAccessSize = bitsToCharUnits(BitSizeSinceBegin + CharBits - 1);
587 if(BestEnd ==
Begin) {
591BestEndOffset = BeginOffset + AccessSize;
594 if(!BitSizeSinceBegin)
598}
else if(AccessSize > RegSize)
606llvm::Type *
Type= getIntNType(Context.
toBits(AccessSize));
614 if(Align > Layout.getAlignment())
623 if(InstallBest && BestEnd == Field)
626 if(getSize(
Type) == AccessSize)
627BestClipped =
false;
636 for(
autoProbe = Field; Probe != FieldEnd; ++Probe)
639assert((getFieldBitOffset(*Probe) % CharBits) == 0 &&
640 "Next storage is not byte-aligned");
641LimitOffset = bitsToCharUnits(getFieldBitOffset(*Probe));
646 if(ScissorOffset.
isZero()) {
647ScissorOffset = calculateTailClippingOffset(isNonVirtualBaseType);
648assert(!ScissorOffset.
isZero() &&
"Tail clipping at zero");
651LimitOffset = ScissorOffset;
655 if(BeginOffset + TypeSize <= LimitOffset) {
658BestEndOffset = BeginOffset + TypeSize;
660BestClipped =
false;
666 else if(Types.getCodeGenOpts().FineGrainedBitfieldAccesses)
673BitSizeSinceBegin = Context.
toBits(LimitOffset - BeginOffset);
679assert((Field == FieldEnd || !
Field->isBitField() ||
680(getFieldBitOffset(*Field) % CharBits) == 0) &&
681 "Installing but not at an aligned bitfield or limit");
682 CharUnitsAccessSize = BestEndOffset - BeginOffset;
683 if(!AccessSize.
isZero()) {
689assert(getSize(getIntNType(Context.
toBits(AccessSize))) >
691 "Clipped access need not be clipped");
692 Type= getByteArrayType(AccessSize);
694 Type= getIntNType(Context.
toBits(AccessSize));
695assert(getSize(
Type) == AccessSize &&
696 "Unclipped access must be clipped");
698Members.push_back(StorageInfo(BeginOffset,
Type));
700 if(!
Begin->isZeroLengthBitField())
702MemberInfo(BeginOffset, MemberInfo::Field,
nullptr, *
Begin));
708assert(Field != FieldEnd &&
Field->isBitField() &&
709 "Accumulating past end of bitfields");
710assert(!Barrier &&
"Accumulating across barrier");
712BitSizeSinceBegin +=
Field->getBitWidthValue();
720voidCGRecordLowering::accumulateBases() {
722 if(Layout.isPrimaryBaseVirtual()) {
725getStorageType(BaseDecl), BaseDecl));
728 for(
const auto&
Base: RD->bases()) {
729 if(
Base.isVirtual())
737Members.push_back(MemberInfo(Layout.getBaseClassOffset(BaseDecl),
738MemberInfo::Base, getStorageType(BaseDecl), BaseDecl));
755voidCGRecordLowering::computeVolatileBitfields() {
756 if(!
isAAPCS() || !Types.getCodeGenOpts().AAPCSBitfieldWidth)
759 for(
auto&I : BitFields) {
762llvm::Type *ResLTy = Types.ConvertTypeForMem(
Field->getType());
765 if((uint64_t)(Context.
toBits(Layout.getAlignment())) <
766ResLTy->getPrimitiveSizeInBits())
773 const unsignedOldOffset =
776 const unsignedAbsoluteOffset =
780 const unsignedStorageSize = ResLTy->getPrimitiveSizeInBits();
783 if(Info.
StorageSize== StorageSize && (OldOffset % StorageSize == 0))
787 unsignedOffset = AbsoluteOffset & (StorageSize - 1);
792 if(Offset + Info.
Size> StorageSize)
797Offset = StorageSize - (Offset + Info.
Size);
809 if(End >= RecordSize)
813 boolConflict =
false;
814 for(
const auto*F :
D->fields()) {
816 if(F->isBitField() && !F->isZeroLengthBitField())
826 if(F->isZeroLengthBitField()) {
827 if(End > FOffset && StorageOffset < FOffset) {
836Types.ConvertTypeForMem(F->getType())->getPrimitiveSizeInBits()) -
839 if(End < FOffset || FEnd < StorageOffset)
859voidCGRecordLowering::accumulateVPtrs() {
863llvm::PointerType::getUnqual(Types.getLLVMContext())));
867llvm::PointerType::getUnqual(Types.getLLVMContext())));
871CGRecordLowering::calculateTailClippingOffset(
boolisNonVirtualBaseType)
const{
880 if(!isNonVirtualBaseType && isOverlappingVBaseABI())
881 for(
const auto&
Base: RD->vbases()) {
887 if(Context.
isNearlyEmpty(BaseDecl) && !hasOwnStorage(RD, BaseDecl))
889ScissorOffset = std::min(ScissorOffset,
893 returnScissorOffset;
896voidCGRecordLowering::accumulateVBases() {
897 for(
const auto&
Base: RD->vbases()) {
904 if(isOverlappingVBaseABI() &&
906!hasOwnStorage(RD, BaseDecl)) {
907Members.push_back(MemberInfo(Offset, MemberInfo::VBase,
nullptr,
915Members.push_back(MemberInfo(Offset, MemberInfo::VBase,
916getStorageType(BaseDecl), BaseDecl));
925 for(
const auto&
Base:
Decl->bases())
926 if(!hasOwnStorage(
Base.getType()->getAsCXXRecordDecl(), Query))
931voidCGRecordLowering::calculateZeroInit() {
932 for(std::vector<MemberInfo>::const_iterator
Member= Members.begin(),
933MemberEnd = Members.end();
934IsZeroInitializableAsBase &&
Member!= MemberEnd; ++
Member) {
935 if(
Member->Kind == MemberInfo::Field) {
938IsZeroInitializable = IsZeroInitializableAsBase =
false;
939}
else if(
Member->Kind == MemberInfo::Base ||
940 Member->Kind == MemberInfo::VBase) {
941 if(isZeroInitializable(
Member->RD))
943IsZeroInitializable =
false;
944 if(
Member->Kind == MemberInfo::Base)
945IsZeroInitializableAsBase =
false;
951voidCGRecordLowering::checkBitfieldClipping(
boolIsNonVirtualBaseType)
const{
953 autoScissorOffset = calculateTailClippingOffset(IsNonVirtualBaseType);
955 for(
const auto&M : Members) {
960assert(M.Offset >= Tail &&
"Bitfield access unit is not clipped");
961Tail = M.Offset + getSize(M.Data);
962assert((Tail <= ScissorOffset || M.Offset >= ScissorOffset) &&
963 "Bitfield straddles scissor offset");
968voidCGRecordLowering::determinePacked(
boolNVBaseType) {
975 for(std::vector<MemberInfo>::const_iterator
Member= Members.begin(),
976MemberEnd = Members.end();
984 if(
Member->Offset < NVSize)
985NVAlignment = std::max(NVAlignment, getAlignment(
Member->Data));
986Alignment = std::max(Alignment, getAlignment(
Member->Data));
990 if(Members.back().Offset % Alignment)
995 if(NVSize % NVAlignment)
999Members.back().Data = getIntNType(Context.
toBits(Alignment));
1002voidCGRecordLowering::insertPadding() {
1003std::vector<std::pair<CharUnits, CharUnits> > Padding;
1005 for(std::vector<MemberInfo>::const_iterator
Member= Members.begin(),
1006MemberEnd = Members.end();
1011assert(Offset >= Size);
1015Padding.push_back(std::make_pair(Size, Offset - Size));
1018 if(Padding.empty())
1021 for(std::vector<std::pair<CharUnits, CharUnits> >::const_iterator
1022Pad = Padding.begin(), PadEnd = Padding.end();
1023Pad != PadEnd; ++Pad)
1024Members.push_back(StorageInfo(Pad->first, getByteArrayType(Pad->second)));
1025llvm::stable_sort(Members);
1028voidCGRecordLowering::fillOutputFields() {
1029 for(std::vector<MemberInfo>::const_iterator
Member= Members.begin(),
1030MemberEnd = Members.end();
1033FieldTypes.push_back(
Member->Data);
1034 if(
Member->Kind == MemberInfo::Field) {
1036Fields[
Member->FD->getCanonicalDecl()] = FieldTypes.size() - 1;
1039setBitFieldInfo(
Member->FD,
Member->Offset, FieldTypes.back());
1040}
else if(
Member->Kind == MemberInfo::Base)
1041NonVirtualBases[
Member->RD] = FieldTypes.size() - 1;
1042 else if(
Member->Kind == MemberInfo::VBase)
1043VirtualBases[
Member->RD] = FieldTypes.size() - 1;
1049uint64_t Offset, uint64_t Size,
1050uint64_t StorageSize,
1055llvm::Type *Ty = Types.ConvertTypeForMem(FD->
getType());
1058uint64_t TypeSizeInBits = Types.getContext().toBits(TypeSizeInBytes);
1062 if(
Size> TypeSizeInBits) {
1072 Size= TypeSizeInBits;
1079 if(Types.getDataLayout().isBigEndian()) {
1086std::unique_ptr<CGRecordLayout>
1088CGRecordLowering Builder(*
this,
D,
false);
1090Builder.lower(
false);
1093llvm::StructType *BaseTy =
nullptr;
1094 if(isa<CXXRecordDecl>(
D)) {
1096 if(Builder.Layout.getNonVirtualSize() != Builder.Layout.getSize()) {
1097CGRecordLowering BaseBuilder(*
this,
D,
Builder.Packed);
1098BaseBuilder.lower(
true);
1099BaseTy = llvm::StructType::create(
1100 getLLVMContext(), BaseBuilder.FieldTypes,
"", BaseBuilder.Packed);
1104assert(Builder.Packed == BaseBuilder.Packed &&
1105 "Non-virtual and complete types must agree on packedness");
1112Ty->setBody(Builder.FieldTypes, Builder.Packed);
1114 autoRL = std::make_unique<CGRecordLayout>(
1115Ty, BaseTy, (
bool)Builder.IsZeroInitializable,
1116(
bool)Builder.IsZeroInitializableAsBase);
1118RL->NonVirtualBases.swap(Builder.NonVirtualBases);
1119RL->CompleteObjectVirtualBases.swap(Builder.VirtualBases);
1122RL->FieldInfo.swap(Builder.Fields);
1125RL->BitFields.swap(Builder.BitFields);
1128 if(
getContext().getLangOpts().DumpRecordLayouts) {
1129llvm::outs() <<
"\n*** Dumping IRgen Record Layout\n";
1130llvm::outs() <<
"Record: ";
1131 D->
dump(llvm::outs());
1132llvm::outs() <<
"\nLayout: ";
1133RL->print(llvm::outs());
1141assert(TypeSizeInBits ==
getDataLayout().getTypeAllocSizeInBits(Ty) &&
1142 "Type size mismatch!");
1147uint64_t AlignedNonVirtualTypeSizeInBits =
1150assert(AlignedNonVirtualTypeSizeInBits ==
1152 "Type size mismatch!");
1156llvm::StructType *ST = RL->getLLVMType();
1157 constllvm::StructLayout *SL =
getDataLayout().getStructLayout(ST);
1161 for(
unsignedi = 0, e = AST_RL.
getFieldCount(); i != e; ++i, ++it) {
1171 unsignedFieldNo = RL->getLLVMFieldNo(FD);
1172assert(AST_RL.
getFieldOffset(i) == SL->getElementOffsetInBits(FieldNo) &&
1173 "Invalid field offset!");
1182llvm::Type *ElementTy = ST->getTypeAtIndex(RL->getLLVMFieldNo(FD));
1187 if(
D->isUnion()) {
1193assert(
static_cast<unsigned>(Info.
Offset+ Info.
Size) ==
1195 "Big endian union bitfield does not end at the back");
1197assert(Info.
Offset== 0 &&
1198 "Little endian union bitfield with a non-zero offset");
1199assert(Info.
StorageSize<= SL->getSizeInBits() &&
1200 "Union not large enough for bitfield storage");
1206 "Storage size does not match the element type size");
1208assert(Info.
Size> 0 &&
"Empty bitfield!");
1210 "Bitfield outside of its allocated storage");
1218OS <<
"<CGRecordLayout\n";
1219OS <<
" LLVMType:"<< *CompleteObjectType <<
"\n";
1220 if(BaseSubobjectType)
1221OS <<
" NonVirtualBaseLLVMType:"<< *BaseSubobjectType <<
"\n";
1222OS <<
" IsZeroInitializable:"<< IsZeroInitializable <<
"\n";
1223OS <<
" BitFields:[\n";
1226std::vector<std::pair<unsigned, const CGBitFieldInfo*> > BFIs;
1227 for(llvm::DenseMap<const FieldDecl*, CGBitFieldInfo>::const_iterator
1228it = BitFields.begin(), ie = BitFields.end();
1233it2 = RD->
field_begin(); *it2 != it->first; ++it2)
1235BFIs.push_back(std::make_pair(Index, &it->second));
1237llvm::array_pod_sort(BFIs.begin(), BFIs.end());
1238 for(
unsignedi = 0, e = BFIs.size(); i != e; ++i) {
1240BFIs[i].second->print(OS);
1248 print(llvm::errs());
1252OS <<
"<CGBitFieldInfo" 1262 print(llvm::errs());
Defines the clang::ASTContext interface.
static bool isAAPCS(const TargetInfo &TargetInfo)
Helper method to check if the underlying ABI is AAPCS.
Defines the C++ Decl subclasses, other than those for templates (found in DeclTemplate....
Holds long-lived AST nodes (such as types and decls) that can be referred to throughout the semantic ...
const ASTRecordLayout & getASTRecordLayout(const RecordDecl *D) const
Get or compute information about the layout of the specified record (struct/union/class) D,...
bool isNearlyEmpty(const CXXRecordDecl *RD) const
int64_t toBits(CharUnits CharSize) const
Convert a size in characters to a size in bits.
const TargetInfo & getTargetInfo() const
CharUnits toCharUnitsFromBits(int64_t BitSize) const
Convert a size in bits to a size in characters.
uint64_t getCharWidth() const
Return the size of the character type, in bits.
ASTRecordLayout - This class contains layout information for one RecordDecl, which is a struct/union/...
bool hasOwnVFPtr() const
hasOwnVFPtr - Does this class provide its own virtual-function table pointer, rather than inheriting ...
bool hasOwnVBPtr() const
hasOwnVBPtr - Does this class provide its own virtual-base table pointer, rather than inheriting one ...
CharUnits getSize() const
getSize - Get the record size in characters.
unsigned getFieldCount() const
getFieldCount - Get the number of fields in the layout.
uint64_t getFieldOffset(unsigned FieldNo) const
getFieldOffset - Get the offset of the given field index, in bits.
CharUnits getVBPtrOffset() const
getVBPtrOffset - Get the offset for virtual base table pointer.
CharUnits getDataSize() const
getDataSize() - Get the record data size, which is the record size without tail padding,...
CharUnits getVBaseClassOffset(const CXXRecordDecl *VBase) const
getVBaseClassOffset - Get the offset, in chars, for the given base class.
const VBaseOffsetsMapTy & getVBaseOffsetsMap() const
const CXXRecordDecl * getPrimaryBase() const
getPrimaryBase - Get the primary base for this record.
bool isPrimaryBaseVirtual() const
isPrimaryBaseVirtual - Get whether the primary base for this record is virtual or not.
CharUnits getNonVirtualSize() const
getNonVirtualSize - Get the non-virtual size (in chars) of an object, which is the size of the object...
Represents a C++ struct/union/class.
CharUnits - This is an opaque type for sizes expressed in character units.
bool isZero() const
isZero - Test whether the quantity equals zero.
QuantityType getQuantity() const
getQuantity - Get the raw integer representation of this quantity.
static CharUnits One()
One - Construct a CharUnits quantity of one.
bool isMultipleOf(CharUnits N) const
Test whether this is a multiple of the other value.
static CharUnits fromQuantity(QuantityType Quantity)
fromQuantity - Construct a CharUnits quantity from a raw integer type.
static CharUnits Zero()
Zero - Construct a CharUnits quantity of zero.
void print(raw_ostream &OS) const
This class organizes the cross-module state that is used while lowering AST types to LLVM types.
ASTContext & getContext() const
std::unique_ptr< CGRecordLayout > ComputeRecordLayout(const RecordDecl *D, llvm::StructType *Ty)
Compute a new LLVM record layout object for the given record.
llvm::LLVMContext & getLLVMContext()
const llvm::DataLayout & getDataLayout() const
void addRecordTypeName(const RecordDecl *RD, llvm::StructType *Ty, StringRef suffix)
addRecordTypeName - Compute a name from the given record decl with an optional suffix and name the gi...
specific_decl_iterator - Iterates over a subrange of declarations stored in a DeclContext,...
DeclContext * getParent()
getParent - Returns the containing DeclContext.
Decl - This represents one declaration (or definition), e.g.
Represents a member of a struct/union/class.
bool isBitField() const
Determines whether this field is a bitfield.
unsigned getBitWidthValue() const
Computes the bit width of this field, if this is a bit field.
unsigned getFieldIndex() const
Returns the index of this field within its record, as appropriate for passing to ASTRecordLayout::get...
FieldDecl * getCanonicalDecl() override
Retrieves the canonical declaration of this field.
DeclarationName getDeclName() const
Get the actual, stored name of the declaration, which may be a special name.
Represents a struct/union/class.
specific_decl_iterator< FieldDecl > field_iterator
field_iterator field_begin() const
bool isMicrosoft() const
Is this ABI an MSVC-compatible ABI?
virtual unsigned getRegisterWidth() const
Return the "preferred" register width on this target.
bool hasCheapUnalignedBitFieldAccess() const
Return true iff unaligned accesses are cheap.
TargetCXXABI getCXXABI() const
Get the C++ ABI currently in use.
virtual StringRef getABI() const
Get the ABI currently in use.
The base class of the type hierarchy.
bool isSignedIntegerOrEnumerationType() const
Determines whether this is an integer type that is signed or an enumeration types whose underlying ty...
bool isEmptyRecordForLayout(const ASTContext &Context, QualType T)
isEmptyRecordForLayout - Return true iff a structure contains only empty base classes (per isEmptyRec...
bool isEmptyFieldForLayout(const ASTContext &Context, const FieldDecl *FD)
isEmptyFieldForLayout - Return true iff the field is "empty", that is, either a zero-width bit-field ...
The JSON file list parser is used to communicate input to InstallAPI.
bool operator<(DeclarationName LHS, DeclarationName RHS)
Ordering on two declaration names.
Structure with information about how a bitfield should be accessed.
CharUnits StorageOffset
The offset of the bitfield storage from the start of the struct.
CharUnits VolatileStorageOffset
The offset of the bitfield storage from the start of the struct.
unsigned VolatileOffset
The offset within a contiguous run of bitfields that are represented as a single "field" within the L...
unsigned Offset
The offset within a contiguous run of bitfields that are represented as a single "field" within the L...
unsigned VolatileStorageSize
The storage size in bits which should be used when accessing this bitfield.
void print(raw_ostream &OS) const
unsigned Size
The total size of the bit-field, in bits.
unsigned StorageSize
The storage size in bits which should be used when accessing this bitfield.
unsigned IsSigned
Whether the bit-field is signed.
static CGBitFieldInfo MakeInfo(class CodeGenTypes &Types, const FieldDecl *FD, uint64_t Offset, uint64_t Size, uint64_t StorageSize, CharUnits StorageOffset)
Given a bit-field decl, build an appropriate helper object for accessing that field (which is expecte...
RetroSearch is an open source project built by @garambo | Open a GitHub Issue
Search and Browse the WWW like it's 1997 | Search results from DuckDuckGo
HTML:
3.2
| Encoding:
UTF-8
| Version:
0.7.4