;
46 return SM.isBeforeInTranslationUnit(
T.location(), R.
getBegin());
50 return!
SM.isBeforeInTranslationUnit(R.
getEnd(),
T.location());
54 return{
Begin, End};
77assert(
SM.getSLocEntry(TargetFile).isFile());
88 while(
First.isMacroID() &&
Last.isMacroID()) {
89 autoDecFirst =
SM.getDecomposedLoc(
First);
90 autoDecLast =
SM.getDecomposedLoc(
Last);
91 auto&ExpFirst =
SM.getSLocEntry(DecFirst.first).getExpansion();
92 auto&ExpLast =
SM.getSLocEntry(DecLast.first).getExpansion();
94 if(!ExpFirst.isMacroArgExpansion() || !ExpLast.isMacroArgExpansion())
98 if(ExpFirst.getExpansionLocStart() != ExpLast.getExpansionLocStart())
106 autoExpFileID =
SM.getFileID(ExpFirst.getExpansionLocStart());
107 if(ExpFileID == TargetFile)
111 First= ExpFirst.getSpellingLoc().getLocWithOffset(DecFirst.second);
112 Last= ExpLast.getSpellingLoc().getLocWithOffset(DecLast.second);
119 autoDecFirst =
SM.getDecomposedExpansionLoc(Candidate.
getBegin());
120 autoDecLast =
SM.getDecomposedExpansionLoc(Candidate.
getEnd());
122 if(Candidate.
isInvalid() || DecFirst.first != TargetFile ||
123DecLast.first != TargetFile)
127 auto Dec=
SM.getDecomposedLoc(
SM.getExpansionRange(Prev).getBegin());
128 if(
Dec.first != DecFirst.first ||
Dec.second >= DecFirst.second)
131 if(Next.isValid()) {
132 auto Dec=
SM.getDecomposedLoc(
SM.getExpansionRange(Next).getEnd());
133 if(
Dec.first != DecLast.first ||
Dec.second <= DecLast.second)
145: Location(Location), Length(Length), Kind(Kind) {
151assert(!
T.isAnnotation());
156 const char*Start =
SM.getCharacterData(location(), &
Invalid);
158 returnllvm::StringRef(Start,
length());
162assert(location().isFileID() &&
"must be a spelled token");
164 unsignedStartOffset;
165std::tie(
File, StartOffset) =
SM.getDecomposedLoc(location());
173 autoL =
Last.range(
SM);
174assert(F.file() == L.file() &&
"tokens from different files");
175assert((F == L || F.endOffset() <= L.beginOffset()) &&
176 "wrong order of tokens");
177 return FileRange(F.file(), F.beginOffset(), L.endOffset());
181 returnOS <<
T.str();
186assert(
File.isValid());
187assert(BeginOffset <= EndOffset);
195std::tie(File,
Begin) =
SM.getDecomposedLoc(BeginLoc);
196End =
Begin+ Length;
204assert(
SM.getFileID(BeginLoc) ==
SM.getFileID(EndLoc));
205assert(
SM.getFileOffset(BeginLoc) <=
SM.getFileOffset(EndLoc));
207std::tie(File,
Begin) =
SM.getDecomposedLoc(BeginLoc);
208End =
SM.getFileOffset(EndLoc);
213 returnOS << llvm::formatv(
"FileRange(file = {0}, offsets = {1}-{2})",
224assert(End <=
Text.size());
230 if(!ExpandedTokIndex.empty())
232ExpandedTokIndex.reserve(ExpandedTokens.size());
234 for(
size_tI = 0,
E= ExpandedTokens.size(); I !=
E; ++I) {
237ExpandedTokIndex[
Loc] = I;
244 if(!ExpandedTokIndex.empty()) {
248 const autoB = ExpandedTokIndex.find(R.
getBegin());
249 const auto E= ExpandedTokIndex.find(R.
getEnd());
250 if(B != ExpandedTokIndex.end() &&
E!= ExpandedTokIndex.end()) {
251 const Token*L = ExpandedTokens.data() + B->getSecond();
253 const Token*R = ExpandedTokens.data() +
E->getSecond() + 1;
270std::pair<const syntax::Token *, const TokenBuffer::Mapping *>
271TokenBuffer::spelledForExpandedToken(
const syntax::Token*Expanded)
const{
273assert(ExpandedTokens.data() <= Expanded &&
274Expanded < ExpandedTokens.data() + ExpandedTokens.size());
276 autoFileIt = Files.find(
278assert(FileIt != Files.end() &&
"no file for an expanded token");
280 constMarkedFile &
File= FileIt->second;
282 unsignedExpandedIndex = Expanded - ExpandedTokens.data();
284 autoIt = llvm::partition_point(
File.Mappings, [&](
constMapping &M) {
285return M.BeginExpanded <= ExpandedIndex;
288 if(It ==
File.Mappings.begin()) {
290 return{&
File.SpelledTokens[ExpandedIndex -
File.BeginExpanded],
296 if(ExpandedIndex < It->EndExpanded)
297 return{&
File.SpelledTokens[It->BeginSpelled],
&*It};
302&
File.SpelledTokens[It->EndSpelled + (ExpandedIndex - It->EndExpanded)],
306constTokenBuffer::Mapping *
307TokenBuffer::mappingStartingBeforeSpelled(
constMarkedFile &F,
309assert(F.SpelledTokens.data() <= Spelled);
310 unsignedSpelledI = Spelled - F.SpelledTokens.data();
311assert(SpelledI < F.SpelledTokens.size());
313 autoIt = llvm::partition_point(F.Mappings, [SpelledI](
constMapping &M) {
314return M.BeginSpelled <= SpelledI;
316 if(It == F.Mappings.begin())
326 const auto&
File= fileForSpelled(Spelled);
328 auto*FrontMapping = mappingStartingBeforeSpelled(
File, &Spelled.front());
329 unsignedSpelledFrontI = &Spelled.front() -
File.SpelledTokens.data();
330assert(SpelledFrontI <
File.SpelledTokens.size());
331 unsignedExpandedBegin;
335ExpandedBegin =
File.BeginExpanded + SpelledFrontI;
336}
else if(SpelledFrontI < FrontMapping->EndSpelled) {
338 if(SpelledFrontI != FrontMapping->BeginSpelled) {
343ExpandedBegin = FrontMapping->BeginExpanded;
348FrontMapping->EndExpanded + (SpelledFrontI - FrontMapping->EndSpelled);
351 auto*BackMapping = mappingStartingBeforeSpelled(
File, &Spelled.back());
352 unsignedSpelledBackI = &Spelled.back() -
File.SpelledTokens.data();
353 unsignedExpandedEnd;
357ExpandedEnd =
File.BeginExpanded + SpelledBackI + 1;
358}
else if(SpelledBackI < BackMapping->EndSpelled) {
360 if(SpelledBackI + 1 != BackMapping->EndSpelled) {
364ExpandedEnd = BackMapping->EndExpanded;
368BackMapping->EndExpanded + (SpelledBackI - BackMapping->EndSpelled) + 1;
371assert(ExpandedBegin < ExpandedTokens.size());
372assert(ExpandedEnd < ExpandedTokens.size());
374 if(ExpandedBegin == ExpandedEnd)
377ExpandedTokens.data() + ExpandedEnd)};
381 autoIt = Files.find(FID);
382assert(It != Files.end());
383 returnIt->second.SpelledTokens;
389 const auto*Tok = llvm::partition_point(
391[&](
const syntax::Token&Tok) { return Tok.endLocation() <= Loc; });
392 if(!Tok || Loc < Tok->location())
397std::string TokenBuffer::Mapping::str()
const{
399llvm::formatv(
"spelled tokens: [{0},{1}), expanded tokens: [{2},{3})",
400BeginSpelled, EndSpelled, BeginExpanded, EndExpanded));
403std::optional<llvm::ArrayRef<syntax::Token>>
408 if(!Expanded.empty() && Expanded.back().kind() == tok::eof) {
409Expanded = Expanded.drop_back();
413 if(Expanded.empty())
417 auto[FirstSpelled, FirstMapping] = spelledForExpandedToken(
First);
418 auto[LastSpelled, LastMapping] = spelledForExpandedToken(
Last);
422 if(FID != SourceMgr->
getFileID(LastSpelled->location()))
425 constMarkedFile &
File= Files.find(FID)->second;
429 if(FirstMapping && FirstMapping == LastMapping &&
435: (
First- 1)->location();
438: (
Last+ 1)->location();
440 First->location(),
Last->location(), Prev, Next, FID, *SourceMgr);
443 returngetTokensCovering(
File.SpelledTokens,
Range, *SourceMgr);
448 unsignedFirstExpanded = Expanded.begin() - ExpandedTokens.data();
449 unsignedLastExpanded = Expanded.end() - ExpandedTokens.data();
450 if(FirstMapping && FirstExpanded != FirstMapping->BeginExpanded)
452 if(LastMapping && LastMapping->EndExpanded != LastExpanded)
455FirstMapping ?
File.SpelledTokens.data() + FirstMapping->BeginSpelled
457LastMapping ?
File.SpelledTokens.data() + LastMapping->EndSpelled
462 constMapping &M)
const{
465F.SpelledTokens.data() + M.EndSpelled);
467ExpandedTokens.data() + M.EndExpanded);
471constTokenBuffer::MarkedFile &
473assert(!Spelled.empty());
474assert(Spelled.front().location().isFileID() &&
"not a spelled token");
475 autoFileIt = Files.find(SourceMgr->
getFileID(Spelled.front().location()));
476assert(FileIt != Files.end() &&
"file not tracked by token buffer");
477 const auto&
File= FileIt->second;
478assert(
File.SpelledTokens.data() <= Spelled.data() &&
480(
File.SpelledTokens.data() +
File.SpelledTokens.size()) &&
481 "Tokens not in spelled range");
483 autoT1 = Spelled.back().location();
484 autoT2 =
File.SpelledTokens.back().location();
485assert(T1 == T2 ||
sourceManager().isBeforeInTranslationUnit(T1, T2));
490std::optional<TokenBuffer::Expansion>
493 const auto&
File= fileForSpelled(*Spelled);
495 unsignedSpelledIndex = Spelled -
File.SpelledTokens.data();
496 autoM = llvm::partition_point(
File.Mappings, [&](
constMapping &M) {
497return M.BeginSpelled < SpelledIndex;
499 if(M ==
File.Mappings.end() || M->BeginSpelled != SpelledIndex)
501 returnmakeExpansion(
File, *M);
508 const auto&
File= fileForSpelled(Spelled);
511 unsignedSpelledBeginIndex = Spelled.begin() -
File.SpelledTokens.data();
512 unsignedSpelledEndIndex = Spelled.end() -
File.SpelledTokens.data();
513 autoM = llvm::partition_point(
File.Mappings, [&](
constMapping &M) {
514return M.EndSpelled <= SpelledBeginIndex;
516std::vector<TokenBuffer::Expansion> Expansions;
517 for(; M !=
File.Mappings.end() && M->BeginSpelled < SpelledEndIndex; ++M)
518Expansions.push_back(makeExpansion(
File, *M));
527 auto*Right = llvm::partition_point(
529 boolAcceptRight = Right != Tokens.end() && Right->location() <=
Loc;
531Right != Tokens.begin() && (Right - 1)->endLocation() >=
Loc;
533Right + (AcceptRight ? 1 : 0));
540 Loc, Tokens.spelledTokens(Tokens.sourceManager().getFileID(
Loc)));
547 if(Tok.kind() == tok::identifier)
557 Loc, Tokens.spelledTokens(Tokens.sourceManager().getFileID(
Loc)));
560std::vector<const syntax::Token *>
562 autoFileIt = Files.find(FID);
563assert(FileIt != Files.end() &&
"file not tracked by token buffer");
564 auto&
File= FileIt->second;
565std::vector<const syntax::Token *> Expansions;
566 auto&Spelled =
File.SpelledTokens;
567 for(
autoMapping :
File.Mappings) {
569 if(
Token->
kind() == tok::TokenKind::identifier)
570Expansions.push_back(
Token);
578std::vector<syntax::Token> Tokens;
582 if(
T.getKind() == tok::raw_identifier && !
T.needsCleaning() &&
585 T.setIdentifierInfo(&II);
591 autoSrcBuffer =
SM.getBufferData(FR.
file());
592 LexerL(
SM.getLocForStartOfFile(FR.
file()), LO, SrcBuffer.data(),
596SrcBuffer.data() + SrcBuffer.size());
603 if(
SM.getFileOffset(
T.getLocation()) < FR.
endOffset())
628 const auto&
SM= Collector->PP.getSourceManager();
647 if(LastExpansionEnd.isValid() &&
648!
SM.isBeforeInTranslationUnit(LastExpansionEnd,
Range.
getEnd()))
657 "Overlapping macros should have same expansion location");
686 if(
T.isAnnotation())
688DEBUG_WITH_TYPE(
"collect-tokens", llvm::dbgs()
699 autoCB = std::make_unique<CollectPPExpansions>(*
this);
700this->Collector = CB.get();
708 Builder(std::vector<syntax::Token> Expanded, PPExpansions CollectedExpansions,
710: Result(
SM), CollectedExpansions(
std::move(CollectedExpansions)),
SM(
SM),
712Result.ExpandedTokens = std::move(Expanded);
716assert(!Result.ExpandedTokens.empty());
717assert(Result.ExpandedTokens.back().kind() == tok::eof);
720buildSpelledTokens();
726 while(NextExpanded < Result.ExpandedTokens.size() - 1
) {
732 unsignedOldPosition = NextExpanded;
734 if(NextExpanded == OldPosition)
735diagnoseAdvanceFailure();
739 for(
const auto&
File: Result.Files)
740discard(
File.first);
743 for(
auto&pair : Result.Files) {
744 auto&mappings = pair.second.Mappings;
745assert(llvm::is_sorted(mappings, [](
constTokenBuffer::Mapping &M1,
746 constTokenBuffer::Mapping &M2) {
747 returnM1.BeginSpelled < M2.BeginSpelled &&
748M1.EndSpelled < M2.EndSpelled &&
749M1.BeginExpanded < M2.BeginExpanded &&
750M1.EndExpanded < M2.EndExpanded;
755 returnstd::move(Result);
763 voiddiscard(std::optional<FileID> Drain = std::nullopt) {
765Drain ?
SM.getLocForEndOfFile(*Drain)
766:
SM.getExpansionLoc(
767Result.ExpandedTokens[NextExpanded].location());
769 const auto&SpelledTokens = Result.Files[
File].SpelledTokens;
770 auto&NextSpelled = this->NextSpelled[
File];
772TokenBuffer::Mapping Mapping;
773Mapping.BeginSpelled = NextSpelled;
776Mapping.BeginExpanded = Mapping.EndExpanded =
777Drain ? Result.Files[*Drain].EndExpanded : NextExpanded;
780 autoFlushMapping = [&,
this] {
781Mapping.EndSpelled = NextSpelled;
782 if(Mapping.BeginSpelled != Mapping.EndSpelled)
783Result.Files[
File].Mappings.push_back(Mapping);
784Mapping.BeginSpelled = NextSpelled;
787 while(NextSpelled < SpelledTokens.size() &&
788SpelledTokens[NextSpelled].location() <
Target) {
793CollectedExpansions.lookup(SpelledTokens[NextSpelled].location());
796 while(NextSpelled < SpelledTokens.size() &&
797SpelledTokens[NextSpelled].location() <= KnownEnd)
812 const syntax::Token&Tok = Result.ExpandedTokens[NextExpanded];
815 const auto&SpelledTokens = Result.Files[
File].SpelledTokens;
816 auto&NextSpelled = this->NextSpelled[
File];
820 while(NextSpelled < SpelledTokens.size() &&
821NextExpanded < Result.ExpandedTokens.size() &&
822SpelledTokens[NextSpelled].location() ==
823Result.ExpandedTokens[NextExpanded].location()) {
830 autoEnd = CollectedExpansions.lookup(Expansion);
831assert(End.isValid() &&
"Macro expansion wasn't captured?");
834TokenBuffer::Mapping Mapping;
835Mapping.BeginExpanded = NextExpanded;
836Mapping.BeginSpelled = NextSpelled;
838 while(NextSpelled < SpelledTokens.size() &&
839SpelledTokens[NextSpelled].location() <= End)
842 while(NextExpanded < Result.ExpandedTokens.size() &&
844Result.ExpandedTokens[NextExpanded].location()) == Expansion)
847Mapping.EndExpanded = NextExpanded;
848Mapping.EndSpelled = NextSpelled;
849Result.Files[
File].Mappings.push_back(Mapping);
854 voiddiagnoseAdvanceFailure() {
857 for(
unsignedI = (NextExpanded < 10) ? 0 : NextExpanded - 10;
858I < NextExpanded + 5 && I < Result.ExpandedTokens.size(); ++I) {
860(I == NextExpanded) ?
"!! ": (I < NextExpanded) ?
"ok ":
" ";
861llvm::errs() << L << Result.ExpandedTokens[I].dumpForTests(
SM) <<
"\n";
864llvm_unreachable(
"Couldn't map expanded token to spelled tokens!");
869 voidbuildSpelledTokens() {
870 for(
unsignedI = 0; I < Result.ExpandedTokens.size(); ++I) {
871 const auto&Tok = Result.ExpandedTokens[I];
872 autoFID =
SM.getFileID(
SM.getExpansionLoc(Tok.
location()));
873 autoIt = Result.Files.try_emplace(FID);
874TokenBuffer::MarkedFile &
File= It.first->second;
877 File.EndExpanded = Tok.
kind() == tok::eof ? I : I + 1;
882 File.BeginExpanded = I;
888 unsignedNextExpanded = 0;
889llvm::DenseMap<FileID, unsigned> NextSpelled;
890PPExpansions CollectedExpansions;
896PP.setTokenWatcher(
nullptr);
897Collector->disable();
898 returnBuilder(std::move(Expanded), std::move(Expansions),
899PP.getSourceManager(), PP.getLangOpts())
904 returnstd::string(llvm::formatv(
"Token({0}, length = {1})",
909 returnstd::string(llvm::formatv(
"Token(`{0}`, {1}, length = {2})", text(
SM),
914 autoPrintToken = [
this](
const syntax::Token&
T) -> std::string {
915 if(
T.kind() == tok::eof)
917 returnstd::string(
T.text(*SourceMgr));
920 autoDumpTokens = [
this, &PrintToken](llvm::raw_ostream &OS,
922 if(Tokens.empty()) {
926OS << Tokens[0].text(*SourceMgr);
927 for(
unsignedI = 1; I < Tokens.size(); ++I) {
928 if(Tokens[I].kind() == tok::eof)
930OS <<
" "<< PrintToken(Tokens[I]);
935llvm::raw_string_ostream OS(Dump);
937OS <<
"expanded tokens:\n" 943std::vector<FileID> Keys;
944 for(
const auto&F : Files)
945Keys.push_back(F.first);
949 constMarkedFile &
File= Files.find(ID)->second;
953std::string
Path= llvm::sys::path::convert_to_slash(Entry->getName());
954OS << llvm::formatv(
"file '{0}'\n",
Path) <<
" spelled tokens:\n" 956DumpTokens(OS,
File.SpelledTokens);
959 if(
File.Mappings.empty()) {
960OS <<
" no mappings.\n";
963OS <<
" mappings:\n";
964 for(
auto&M :
File.Mappings) {
966 " ['{0}'_{1}, '{2}'_{3}) => ['{4}'_{5}, '{6}'_{7})\n",
967PrintToken(
File.SpelledTokens[M.BeginSpelled]), M.BeginSpelled,
968M.EndSpelled ==
File.SpelledTokens.size()
970: PrintToken(
File.SpelledTokens[M.EndSpelled]),
971M.EndSpelled, PrintToken(ExpandedTokens[M.BeginExpanded]),
972M.BeginExpanded, PrintToken(ExpandedTokens[M.EndExpanded]),
Defines the Diagnostic-related interfaces.
static Decl::Kind getKind(const Decl *D)
Defines the clang::IdentifierInfo, clang::IdentifierTable, and clang::Selector interfaces.
Forward-declares and imports various common LLVM datatypes that clang wants to use unqualified.
Defines the clang::LangOptions interface.
llvm::MachO::Target Target
Defines the PPCallbacks interface.
static ParseState advance(ParseState S, size_t N)
Defines the clang::Preprocessor interface.
Defines the clang::SourceLocation class and associated facilities.
Defines the SourceManager interface.
Defines the clang::TokenKind enum and support functions.
Builds mappings and spelled tokens in the TokenBuffer based on the expanded token stream.
Builder(std::vector< syntax::Token > Expanded, PPExpansions CollectedExpansions, const SourceManager &SM, const LangOptions &LangOpts)
Records information reqired to construct mappings for the token buffer that we are collecting.
CollectPPExpansions(TokenCollector &C)
void disable()
Disabled instance will stop reporting anything to TokenCollector.
void MacroExpands(const clang::Token &MacroNameTok, const MacroDefinition &MD, SourceRange Range, const MacroArgs *Args) override
Called by Preprocessor::HandleMacroExpandedIdentifier when a macro invocation is found.
Represents a character-granular source range.
An opaque identifier used by SourceManager which refers to a source file (MemoryBuffer) along with it...
unsigned getHashValue() const
One of these records is kept for each identifier that is lexed.
tok::TokenKind getTokenID() const
If this is a source-language token (e.g.
Implements an efficient mapping from strings to IdentifierInfo nodes.
IdentifierInfo & get(StringRef Name)
Return the identifier token info for the specified named identifier.
Keeps track of the various options that can be enabled, which controls the dialect of C or C++ that i...
Lexer - This provides a simple interface that turns a text buffer into a stream of tokens.
bool LexFromRawLexer(Token &Result)
LexFromRawLexer - Lex a token from a designated raw lexer (one with no associated preprocessor object...
unsigned getCurrentBufferOffset()
Returns the current lexing offset.
MacroArgs - An instance of this class captures information about the formal arguments specified to a ...
A description of the current definition of a macro.
This interface provides a way to observe the actions of the preprocessor as it does its thing.
Engages in a tight little dance with the lexer to efficiently preprocess tokens.
void addPPCallbacks(std::unique_ptr< PPCallbacks > C)
SourceManager & getSourceManager() const
void setTokenWatcher(llvm::unique_function< void(const clang::Token &)> F)
Register a function that would be called on each token in the final expanded token stream.
Encodes a location in the source.
bool isValid() const
Return true if this is a valid SourceLocation object.
This class handles loading and caching of source files into memory.
FileID getFileID(SourceLocation SpellingLoc) const
Return the FileID for a SourceLocation.
OptionalFileEntryRef getFileEntryRefForID(FileID FID) const
Returns the FileEntryRef for the provided FileID.
bool isMacroArgExpansion(SourceLocation Loc, SourceLocation *StartLoc=nullptr) const
Tests whether the given source location represents a macro argument's expansion into the function-lik...
SourceLocation getExpansionLoc(SourceLocation Loc) const
Given a SourceLocation object Loc, return the expansion location referenced by the ID.
A trivial tuple used to represent a source range.
void setBegin(SourceLocation b)
SourceLocation getEnd() const
SourceLocation getBegin() const
Token - This structure provides full information about a lexed token.
A list of tokens obtained by preprocessing a text buffer and operations to map between the expanded a...
const syntax::Token * spelledTokenContaining(SourceLocation Loc) const
Returns the spelled Token containing the Loc, if there are no such tokens returns nullptr.
const SourceManager & sourceManager() const
void indexExpandedTokens()
Builds a cache to make future calls to expandedToken(SourceRange) faster.
llvm::SmallVector< llvm::ArrayRef< syntax::Token >, 1 > expandedForSpelled(llvm::ArrayRef< syntax::Token > Spelled) const
Find the subranges of expanded tokens, corresponding to Spelled.
llvm::ArrayRef< syntax::Token > expandedTokens() const
All tokens produced by the preprocessor after all macro replacements, directives, etc.
std::string dumpForTests() const
std::optional< llvm::ArrayRef< syntax::Token > > spelledForExpanded(llvm::ArrayRef< syntax::Token > Expanded) const
Returns the subrange of spelled tokens corresponding to AST node spanning Expanded.
std::vector< Expansion > expansionsOverlapping(llvm::ArrayRef< syntax::Token > Spelled) const
Returns all expansions (partially) expanded from the specified tokens.
std::optional< Expansion > expansionStartingAt(const syntax::Token *Spelled) const
If Spelled starts a mapping (e.g.
llvm::ArrayRef< syntax::Token > spelledTokens(FileID FID) const
Lexed tokens of a file before preprocessing.
std::vector< const syntax::Token * > macroExpansions(FileID FID) const
Get all tokens that expand a macro in FID.
Collects tokens for the main file while running the frontend action.
TokenBuffer consume() &&
Finalizes token collection.
TokenCollector(Preprocessor &P)
Adds the hooks to collect the tokens.
A token coming directly from a file or from a macro invocation.
std::string str() const
For debugging purposes.
llvm::StringRef text(const SourceManager &SM) const
Get the substring covered by the token.
tok::TokenKind kind() const
FileRange range(const SourceManager &SM) const
Gets a range of this token.
Token(SourceLocation Location, unsigned Length, tok::TokenKind Kind)
std::string dumpForTests(const SourceManager &SM) const
SourceLocation location() const
Location of the first character of a token.
bool Dec(InterpState &S, CodePtr OpPC)
1) Pops a pointer from the stack 2) Load the value from the pointer 3) Writes the value decreased by ...
const syntax::Token * spelledIdentifierTouching(SourceLocation Loc, llvm::ArrayRef< syntax::Token > Tokens)
The identifier token that overlaps or touches a spelling location Loc.
std::vector< syntax::Token > tokenize(FileID FID, const SourceManager &SM, const LangOptions &LO)
Lex the text buffer, corresponding to FID, in raw mode and record the resulting spelled tokens.
raw_ostream & operator<<(raw_ostream &OS, NodeKind K)
For debugging purposes.
llvm::ArrayRef< syntax::Token > spelledTokensTouching(SourceLocation Loc, const syntax::TokenBuffer &Tokens)
The spelled tokens that overlap or touch a spelling location Loc.
const char * getTokenName(TokenKind Kind) LLVM_READNONE
Determines the name of a token as used within the front end.
TokenKind
Provides a simple uniform namespace for tokens from all C languages.
The JSON file list parser is used to communicate input to InstallAPI.
const FunctionProtoType * T
float __ovld __cnfn length(float)
Return the length of vector p, i.e., sqrt(p.x2 + p.y 2 + ...)
A half-open character range inside a particular file, the start offset is included and the end offset...
CharSourceRange toCharRange(const SourceManager &SM) const
Convert to the clang range.
FileRange(FileID File, unsigned BeginOffset, unsigned EndOffset)
EXPECTS: File.isValid() && Begin <= End.
unsigned beginOffset() const
Start is a start offset (inclusive) in the corresponding file.
llvm::StringRef text(const SourceManager &SM) const
Gets the substring that this FileRange refers to.
unsigned endOffset() const
End offset (exclusive) in the corresponding file.
An expansion produced by the preprocessor, includes macro expansions and preprocessor directives.
RetroSearch is an open source project built by @garambo | Open a GitHub Issue
Search and Browse the WWW like it's 1997 | Search results from DuckDuckGo
HTML:
3.2
| Encoding:
UTF-8
| Version:
0.7.4