Mercurial > hg > CbC > CbC_llvm
comparison clang/lib/Tooling/Syntax/Tokens.cpp @ 150:1d019706d866
LLVM10
author | anatofuz |
---|---|
date | Thu, 13 Feb 2020 15:10:13 +0900 |
parents | |
children | 0572611fdcc8 |
comparison
equal
deleted
inserted
replaced
147:c2174574ed3a | 150:1d019706d866 |
---|---|
1 //===- Tokens.cpp - collect tokens from preprocessing ---------------------===// | |
2 // | |
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. | |
4 // See https://llvm.org/LICENSE.txt for license information. | |
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception | |
6 // | |
7 //===----------------------------------------------------------------------===// | |
8 #include "clang/Tooling/Syntax/Tokens.h" | |
9 | |
10 #include "clang/Basic/Diagnostic.h" | |
11 #include "clang/Basic/IdentifierTable.h" | |
12 #include "clang/Basic/LLVM.h" | |
13 #include "clang/Basic/LangOptions.h" | |
14 #include "clang/Basic/SourceLocation.h" | |
15 #include "clang/Basic/SourceManager.h" | |
16 #include "clang/Basic/TokenKinds.h" | |
17 #include "clang/Lex/PPCallbacks.h" | |
18 #include "clang/Lex/Preprocessor.h" | |
19 #include "clang/Lex/Token.h" | |
20 #include "llvm/ADT/ArrayRef.h" | |
21 #include "llvm/ADT/None.h" | |
22 #include "llvm/ADT/Optional.h" | |
23 #include "llvm/ADT/STLExtras.h" | |
24 #include "llvm/Support/Debug.h" | |
25 #include "llvm/Support/ErrorHandling.h" | |
26 #include "llvm/Support/FormatVariadic.h" | |
27 #include "llvm/Support/raw_ostream.h" | |
28 #include <algorithm> | |
29 #include <cassert> | |
30 #include <iterator> | |
31 #include <string> | |
32 #include <utility> | |
33 #include <vector> | |
34 | |
35 using namespace clang; | |
36 using namespace clang::syntax; | |
37 | |
38 syntax::Token::Token(SourceLocation Location, unsigned Length, | |
39 tok::TokenKind Kind) | |
40 : Location(Location), Length(Length), Kind(Kind) { | |
41 assert(Location.isValid()); | |
42 } | |
43 | |
44 syntax::Token::Token(const clang::Token &T) | |
45 : Token(T.getLocation(), T.getLength(), T.getKind()) { | |
46 assert(!T.isAnnotation()); | |
47 } | |
48 | |
49 llvm::StringRef syntax::Token::text(const SourceManager &SM) const { | |
50 bool Invalid = false; | |
51 const char *Start = SM.getCharacterData(location(), &Invalid); | |
52 assert(!Invalid); | |
53 return llvm::StringRef(Start, length()); | |
54 } | |
55 | |
56 FileRange syntax::Token::range(const SourceManager &SM) const { | |
57 assert(location().isFileID() && "must be a spelled token"); | |
58 FileID File; | |
59 unsigned StartOffset; | |
60 std::tie(File, StartOffset) = SM.getDecomposedLoc(location()); | |
61 return FileRange(File, StartOffset, StartOffset + length()); | |
62 } | |
63 | |
64 FileRange syntax::Token::range(const SourceManager &SM, | |
65 const syntax::Token &First, | |
66 const syntax::Token &Last) { | |
67 auto F = First.range(SM); | |
68 auto L = Last.range(SM); | |
69 assert(F.file() == L.file() && "tokens from different files"); | |
70 assert((F == L || F.endOffset() <= L.beginOffset()) && "wrong order of tokens"); | |
71 return FileRange(F.file(), F.beginOffset(), L.endOffset()); | |
72 } | |
73 | |
74 llvm::raw_ostream &syntax::operator<<(llvm::raw_ostream &OS, const Token &T) { | |
75 return OS << T.str(); | |
76 } | |
77 | |
78 FileRange::FileRange(FileID File, unsigned BeginOffset, unsigned EndOffset) | |
79 : File(File), Begin(BeginOffset), End(EndOffset) { | |
80 assert(File.isValid()); | |
81 assert(BeginOffset <= EndOffset); | |
82 } | |
83 | |
84 FileRange::FileRange(const SourceManager &SM, SourceLocation BeginLoc, | |
85 unsigned Length) { | |
86 assert(BeginLoc.isValid()); | |
87 assert(BeginLoc.isFileID()); | |
88 | |
89 std::tie(File, Begin) = SM.getDecomposedLoc(BeginLoc); | |
90 End = Begin + Length; | |
91 } | |
92 FileRange::FileRange(const SourceManager &SM, SourceLocation BeginLoc, | |
93 SourceLocation EndLoc) { | |
94 assert(BeginLoc.isValid()); | |
95 assert(BeginLoc.isFileID()); | |
96 assert(EndLoc.isValid()); | |
97 assert(EndLoc.isFileID()); | |
98 assert(SM.getFileID(BeginLoc) == SM.getFileID(EndLoc)); | |
99 assert(SM.getFileOffset(BeginLoc) <= SM.getFileOffset(EndLoc)); | |
100 | |
101 std::tie(File, Begin) = SM.getDecomposedLoc(BeginLoc); | |
102 End = SM.getFileOffset(EndLoc); | |
103 } | |
104 | |
105 llvm::raw_ostream &syntax::operator<<(llvm::raw_ostream &OS, | |
106 const FileRange &R) { | |
107 return OS << llvm::formatv("FileRange(file = {0}, offsets = {1}-{2})", | |
108 R.file().getHashValue(), R.beginOffset(), | |
109 R.endOffset()); | |
110 } | |
111 | |
112 llvm::StringRef FileRange::text(const SourceManager &SM) const { | |
113 bool Invalid = false; | |
114 StringRef Text = SM.getBufferData(File, &Invalid); | |
115 if (Invalid) | |
116 return ""; | |
117 assert(Begin <= Text.size()); | |
118 assert(End <= Text.size()); | |
119 return Text.substr(Begin, length()); | |
120 } | |
121 | |
122 llvm::ArrayRef<syntax::Token> TokenBuffer::expandedTokens(SourceRange R) const { | |
123 if (R.isInvalid()) | |
124 return {}; | |
125 const Token *Begin = | |
126 llvm::partition_point(expandedTokens(), [&](const syntax::Token &T) { | |
127 return SourceMgr->isBeforeInTranslationUnit(T.location(), R.getBegin()); | |
128 }); | |
129 const Token *End = | |
130 llvm::partition_point(expandedTokens(), [&](const syntax::Token &T) { | |
131 return !SourceMgr->isBeforeInTranslationUnit(R.getEnd(), T.location()); | |
132 }); | |
133 if (Begin > End) | |
134 return {}; | |
135 return {Begin, End}; | |
136 } | |
137 | |
138 CharSourceRange FileRange::toCharRange(const SourceManager &SM) const { | |
139 return CharSourceRange( | |
140 SourceRange(SM.getComposedLoc(File, Begin), SM.getComposedLoc(File, End)), | |
141 /*IsTokenRange=*/false); | |
142 } | |
143 | |
144 std::pair<const syntax::Token *, const TokenBuffer::Mapping *> | |
145 TokenBuffer::spelledForExpandedToken(const syntax::Token *Expanded) const { | |
146 assert(Expanded); | |
147 assert(ExpandedTokens.data() <= Expanded && | |
148 Expanded < ExpandedTokens.data() + ExpandedTokens.size()); | |
149 | |
150 auto FileIt = Files.find( | |
151 SourceMgr->getFileID(SourceMgr->getExpansionLoc(Expanded->location()))); | |
152 assert(FileIt != Files.end() && "no file for an expanded token"); | |
153 | |
154 const MarkedFile &File = FileIt->second; | |
155 | |
156 unsigned ExpandedIndex = Expanded - ExpandedTokens.data(); | |
157 // Find the first mapping that produced tokens after \p Expanded. | |
158 auto It = llvm::partition_point(File.Mappings, [&](const Mapping &M) { | |
159 return M.BeginExpanded <= ExpandedIndex; | |
160 }); | |
161 // Our token could only be produced by the previous mapping. | |
162 if (It == File.Mappings.begin()) { | |
163 // No previous mapping, no need to modify offsets. | |
164 return {&File.SpelledTokens[ExpandedIndex - File.BeginExpanded], nullptr}; | |
165 } | |
166 --It; // 'It' now points to last mapping that started before our token. | |
167 | |
168 // Check if the token is part of the mapping. | |
169 if (ExpandedIndex < It->EndExpanded) | |
170 return {&File.SpelledTokens[It->BeginSpelled], /*Mapping*/ &*It}; | |
171 | |
172 // Not part of the mapping, use the index from previous mapping to compute the | |
173 // corresponding spelled token. | |
174 return { | |
175 &File.SpelledTokens[It->EndSpelled + (ExpandedIndex - It->EndExpanded)], | |
176 /*Mapping*/ nullptr}; | |
177 } | |
178 | |
179 llvm::ArrayRef<syntax::Token> TokenBuffer::spelledTokens(FileID FID) const { | |
180 auto It = Files.find(FID); | |
181 assert(It != Files.end()); | |
182 return It->second.SpelledTokens; | |
183 } | |
184 | |
185 std::string TokenBuffer::Mapping::str() const { | |
186 return std::string( | |
187 llvm::formatv("spelled tokens: [{0},{1}), expanded tokens: [{2},{3})", | |
188 BeginSpelled, EndSpelled, BeginExpanded, EndExpanded)); | |
189 } | |
190 | |
191 llvm::Optional<llvm::ArrayRef<syntax::Token>> | |
192 TokenBuffer::spelledForExpanded(llvm::ArrayRef<syntax::Token> Expanded) const { | |
193 // Mapping an empty range is ambiguous in case of empty mappings at either end | |
194 // of the range, bail out in that case. | |
195 if (Expanded.empty()) | |
196 return llvm::None; | |
197 | |
198 // FIXME: also allow changes uniquely mapping to macro arguments. | |
199 | |
200 const syntax::Token *BeginSpelled; | |
201 const Mapping *BeginMapping; | |
202 std::tie(BeginSpelled, BeginMapping) = | |
203 spelledForExpandedToken(&Expanded.front()); | |
204 | |
205 const syntax::Token *LastSpelled; | |
206 const Mapping *LastMapping; | |
207 std::tie(LastSpelled, LastMapping) = | |
208 spelledForExpandedToken(&Expanded.back()); | |
209 | |
210 FileID FID = SourceMgr->getFileID(BeginSpelled->location()); | |
211 // FIXME: Handle multi-file changes by trying to map onto a common root. | |
212 if (FID != SourceMgr->getFileID(LastSpelled->location())) | |
213 return llvm::None; | |
214 | |
215 const MarkedFile &File = Files.find(FID)->second; | |
216 | |
217 // Do not allow changes that cross macro expansion boundaries. | |
218 unsigned BeginExpanded = Expanded.begin() - ExpandedTokens.data(); | |
219 unsigned EndExpanded = Expanded.end() - ExpandedTokens.data(); | |
220 if (BeginMapping && BeginMapping->BeginExpanded < BeginExpanded) | |
221 return llvm::None; | |
222 if (LastMapping && EndExpanded < LastMapping->EndExpanded) | |
223 return llvm::None; | |
224 // All is good, return the result. | |
225 return llvm::makeArrayRef( | |
226 BeginMapping ? File.SpelledTokens.data() + BeginMapping->BeginSpelled | |
227 : BeginSpelled, | |
228 LastMapping ? File.SpelledTokens.data() + LastMapping->EndSpelled | |
229 : LastSpelled + 1); | |
230 } | |
231 | |
232 llvm::Optional<TokenBuffer::Expansion> | |
233 TokenBuffer::expansionStartingAt(const syntax::Token *Spelled) const { | |
234 assert(Spelled); | |
235 assert(Spelled->location().isFileID() && "not a spelled token"); | |
236 auto FileIt = Files.find(SourceMgr->getFileID(Spelled->location())); | |
237 assert(FileIt != Files.end() && "file not tracked by token buffer"); | |
238 | |
239 auto &File = FileIt->second; | |
240 assert(File.SpelledTokens.data() <= Spelled && | |
241 Spelled < (File.SpelledTokens.data() + File.SpelledTokens.size())); | |
242 | |
243 unsigned SpelledIndex = Spelled - File.SpelledTokens.data(); | |
244 auto M = llvm::partition_point(File.Mappings, [&](const Mapping &M) { | |
245 return M.BeginSpelled < SpelledIndex; | |
246 }); | |
247 if (M == File.Mappings.end() || M->BeginSpelled != SpelledIndex) | |
248 return llvm::None; | |
249 | |
250 Expansion E; | |
251 E.Spelled = llvm::makeArrayRef(File.SpelledTokens.data() + M->BeginSpelled, | |
252 File.SpelledTokens.data() + M->EndSpelled); | |
253 E.Expanded = llvm::makeArrayRef(ExpandedTokens.data() + M->BeginExpanded, | |
254 ExpandedTokens.data() + M->EndExpanded); | |
255 return E; | |
256 } | |
257 llvm::ArrayRef<syntax::Token> | |
258 syntax::spelledTokensTouching(SourceLocation Loc, | |
259 llvm::ArrayRef<syntax::Token> Tokens) { | |
260 assert(Loc.isFileID()); | |
261 | |
262 auto *Right = llvm::partition_point( | |
263 Tokens, [&](const syntax::Token &Tok) { return Tok.location() < Loc; }); | |
264 bool AcceptRight = Right != Tokens.end() && Right->location() <= Loc; | |
265 bool AcceptLeft = | |
266 Right != Tokens.begin() && (Right - 1)->endLocation() >= Loc; | |
267 return llvm::makeArrayRef(Right - (AcceptLeft ? 1 : 0), | |
268 Right + (AcceptRight ? 1 : 0)); | |
269 } | |
270 | |
271 llvm::ArrayRef<syntax::Token> | |
272 syntax::spelledTokensTouching(SourceLocation Loc, | |
273 const syntax::TokenBuffer &Tokens) { | |
274 return spelledTokensTouching( | |
275 Loc, Tokens.spelledTokens(Tokens.sourceManager().getFileID(Loc))); | |
276 } | |
277 | |
278 const syntax::Token * | |
279 syntax::spelledIdentifierTouching(SourceLocation Loc, | |
280 llvm::ArrayRef<syntax::Token> Tokens) { | |
281 for (const syntax::Token &Tok : spelledTokensTouching(Loc, Tokens)) { | |
282 if (Tok.kind() == tok::identifier) | |
283 return &Tok; | |
284 } | |
285 return nullptr; | |
286 } | |
287 | |
288 const syntax::Token * | |
289 syntax::spelledIdentifierTouching(SourceLocation Loc, | |
290 const syntax::TokenBuffer &Tokens) { | |
291 return spelledIdentifierTouching( | |
292 Loc, Tokens.spelledTokens(Tokens.sourceManager().getFileID(Loc))); | |
293 } | |
294 | |
295 std::vector<const syntax::Token *> | |
296 TokenBuffer::macroExpansions(FileID FID) const { | |
297 auto FileIt = Files.find(FID); | |
298 assert(FileIt != Files.end() && "file not tracked by token buffer"); | |
299 auto &File = FileIt->second; | |
300 std::vector<const syntax::Token *> Expansions; | |
301 auto &Spelled = File.SpelledTokens; | |
302 for (auto Mapping : File.Mappings) { | |
303 const syntax::Token *Token = &Spelled[Mapping.BeginSpelled]; | |
304 if (Token->kind() == tok::TokenKind::identifier) | |
305 Expansions.push_back(Token); | |
306 } | |
307 return Expansions; | |
308 } | |
309 | |
310 std::vector<syntax::Token> syntax::tokenize(FileID FID, const SourceManager &SM, | |
311 const LangOptions &LO) { | |
312 std::vector<syntax::Token> Tokens; | |
313 IdentifierTable Identifiers(LO); | |
314 auto AddToken = [&](clang::Token T) { | |
315 // Fill the proper token kind for keywords, etc. | |
316 if (T.getKind() == tok::raw_identifier && !T.needsCleaning() && | |
317 !T.hasUCN()) { // FIXME: support needsCleaning and hasUCN cases. | |
318 clang::IdentifierInfo &II = Identifiers.get(T.getRawIdentifier()); | |
319 T.setIdentifierInfo(&II); | |
320 T.setKind(II.getTokenID()); | |
321 } | |
322 Tokens.push_back(syntax::Token(T)); | |
323 }; | |
324 | |
325 Lexer L(FID, SM.getBuffer(FID), SM, LO); | |
326 | |
327 clang::Token T; | |
328 while (!L.LexFromRawLexer(T)) | |
329 AddToken(T); | |
330 // 'eof' is only the last token if the input is null-terminated. Never store | |
331 // it, for consistency. | |
332 if (T.getKind() != tok::eof) | |
333 AddToken(T); | |
334 return Tokens; | |
335 } | |
336 | |
337 /// Records information reqired to construct mappings for the token buffer that | |
338 /// we are collecting. | |
339 class TokenCollector::CollectPPExpansions : public PPCallbacks { | |
340 public: | |
341 CollectPPExpansions(TokenCollector &C) : Collector(&C) {} | |
342 | |
343 /// Disabled instance will stop reporting anything to TokenCollector. | |
344 /// This ensures that uses of the preprocessor after TokenCollector::consume() | |
345 /// is called do not access the (possibly invalid) collector instance. | |
346 void disable() { Collector = nullptr; } | |
347 | |
348 void MacroExpands(const clang::Token &MacroNameTok, const MacroDefinition &MD, | |
349 SourceRange Range, const MacroArgs *Args) override { | |
350 if (!Collector) | |
351 return; | |
352 // Only record top-level expansions, not those where: | |
353 // - the macro use is inside a macro body, | |
354 // - the macro appears in an argument to another macro. | |
355 if (!MacroNameTok.getLocation().isFileID() || | |
356 (LastExpansionEnd.isValid() && | |
357 Collector->PP.getSourceManager().isBeforeInTranslationUnit( | |
358 Range.getBegin(), LastExpansionEnd))) | |
359 return; | |
360 Collector->Expansions[Range.getBegin().getRawEncoding()] = Range.getEnd(); | |
361 LastExpansionEnd = Range.getEnd(); | |
362 } | |
363 // FIXME: handle directives like #pragma, #include, etc. | |
364 private: | |
365 TokenCollector *Collector; | |
366 /// Used to detect recursive macro expansions. | |
367 SourceLocation LastExpansionEnd; | |
368 }; | |
369 | |
370 /// Fills in the TokenBuffer by tracing the run of a preprocessor. The | |
371 /// implementation tracks the tokens, macro expansions and directives coming | |
372 /// from the preprocessor and: | |
373 /// - for each token, figures out if it is a part of an expanded token stream, | |
374 /// spelled token stream or both. Stores the tokens appropriately. | |
375 /// - records mappings from the spelled to expanded token ranges, e.g. for macro | |
376 /// expansions. | |
377 /// FIXME: also properly record: | |
378 /// - #include directives, | |
379 /// - #pragma, #line and other PP directives, | |
380 /// - skipped pp regions, | |
381 /// - ... | |
382 | |
383 TokenCollector::TokenCollector(Preprocessor &PP) : PP(PP) { | |
384 // Collect the expanded token stream during preprocessing. | |
385 PP.setTokenWatcher([this](const clang::Token &T) { | |
386 if (T.isAnnotation()) | |
387 return; | |
388 DEBUG_WITH_TYPE("collect-tokens", llvm::dbgs() | |
389 << "Token: " | |
390 << syntax::Token(T).dumpForTests( | |
391 this->PP.getSourceManager()) | |
392 << "\n" | |
393 | |
394 ); | |
395 Expanded.push_back(syntax::Token(T)); | |
396 }); | |
397 // And locations of macro calls, to properly recover boundaries of those in | |
398 // case of empty expansions. | |
399 auto CB = std::make_unique<CollectPPExpansions>(*this); | |
400 this->Collector = CB.get(); | |
401 PP.addPPCallbacks(std::move(CB)); | |
402 } | |
403 | |
404 /// Builds mappings and spelled tokens in the TokenBuffer based on the expanded | |
405 /// token stream. | |
406 class TokenCollector::Builder { | |
407 public: | |
408 Builder(std::vector<syntax::Token> Expanded, PPExpansions CollectedExpansions, | |
409 const SourceManager &SM, const LangOptions &LangOpts) | |
410 : Result(SM), CollectedExpansions(std::move(CollectedExpansions)), SM(SM), | |
411 LangOpts(LangOpts) { | |
412 Result.ExpandedTokens = std::move(Expanded); | |
413 } | |
414 | |
415 TokenBuffer build() && { | |
416 buildSpelledTokens(); | |
417 | |
418 // Walk over expanded tokens and spelled tokens in parallel, building the | |
419 // mappings between those using source locations. | |
420 // To correctly recover empty macro expansions, we also take locations | |
421 // reported to PPCallbacks::MacroExpands into account as we do not have any | |
422 // expanded tokens with source locations to guide us. | |
423 | |
424 // The 'eof' token is special, it is not part of spelled token stream. We | |
425 // handle it separately at the end. | |
426 assert(!Result.ExpandedTokens.empty()); | |
427 assert(Result.ExpandedTokens.back().kind() == tok::eof); | |
428 for (unsigned I = 0; I < Result.ExpandedTokens.size() - 1; ++I) { | |
429 // (!) I might be updated by the following call. | |
430 processExpandedToken(I); | |
431 } | |
432 | |
433 // 'eof' not handled in the loop, do it here. | |
434 assert(SM.getMainFileID() == | |
435 SM.getFileID(Result.ExpandedTokens.back().location())); | |
436 fillGapUntil(Result.Files[SM.getMainFileID()], | |
437 Result.ExpandedTokens.back().location(), | |
438 Result.ExpandedTokens.size() - 1); | |
439 Result.Files[SM.getMainFileID()].EndExpanded = Result.ExpandedTokens.size(); | |
440 | |
441 // Some files might have unaccounted spelled tokens at the end, add an empty | |
442 // mapping for those as they did not have expanded counterparts. | |
443 fillGapsAtEndOfFiles(); | |
444 | |
445 return std::move(Result); | |
446 } | |
447 | |
448 private: | |
449 /// Process the next token in an expanded stream and move corresponding | |
450 /// spelled tokens, record any mapping if needed. | |
451 /// (!) \p I will be updated if this had to skip tokens, e.g. for macros. | |
452 void processExpandedToken(unsigned &I) { | |
453 auto L = Result.ExpandedTokens[I].location(); | |
454 if (L.isMacroID()) { | |
455 processMacroExpansion(SM.getExpansionRange(L), I); | |
456 return; | |
457 } | |
458 if (L.isFileID()) { | |
459 auto FID = SM.getFileID(L); | |
460 TokenBuffer::MarkedFile &File = Result.Files[FID]; | |
461 | |
462 fillGapUntil(File, L, I); | |
463 | |
464 // Skip the token. | |
465 assert(File.SpelledTokens[NextSpelled[FID]].location() == L && | |
466 "no corresponding token in the spelled stream"); | |
467 ++NextSpelled[FID]; | |
468 return; | |
469 } | |
470 } | |
471 | |
472 /// Skipped expanded and spelled tokens of a macro expansion that covers \p | |
473 /// SpelledRange. Add a corresponding mapping. | |
474 /// (!) \p I will be the index of the last token in an expansion after this | |
475 /// function returns. | |
476 void processMacroExpansion(CharSourceRange SpelledRange, unsigned &I) { | |
477 auto FID = SM.getFileID(SpelledRange.getBegin()); | |
478 assert(FID == SM.getFileID(SpelledRange.getEnd())); | |
479 TokenBuffer::MarkedFile &File = Result.Files[FID]; | |
480 | |
481 fillGapUntil(File, SpelledRange.getBegin(), I); | |
482 | |
483 // Skip all expanded tokens from the same macro expansion. | |
484 unsigned BeginExpanded = I; | |
485 for (; I + 1 < Result.ExpandedTokens.size(); ++I) { | |
486 auto NextL = Result.ExpandedTokens[I + 1].location(); | |
487 if (!NextL.isMacroID() || | |
488 SM.getExpansionLoc(NextL) != SpelledRange.getBegin()) | |
489 break; | |
490 } | |
491 unsigned EndExpanded = I + 1; | |
492 consumeMapping(File, SM.getFileOffset(SpelledRange.getEnd()), BeginExpanded, | |
493 EndExpanded, NextSpelled[FID]); | |
494 } | |
495 | |
496 /// Initializes TokenBuffer::Files and fills spelled tokens and expanded | |
497 /// ranges for each of the files. | |
498 void buildSpelledTokens() { | |
499 for (unsigned I = 0; I < Result.ExpandedTokens.size(); ++I) { | |
500 auto FID = | |
501 SM.getFileID(SM.getExpansionLoc(Result.ExpandedTokens[I].location())); | |
502 auto It = Result.Files.try_emplace(FID); | |
503 TokenBuffer::MarkedFile &File = It.first->second; | |
504 | |
505 File.EndExpanded = I + 1; | |
506 if (!It.second) | |
507 continue; // we have seen this file before. | |
508 | |
509 // This is the first time we see this file. | |
510 File.BeginExpanded = I; | |
511 File.SpelledTokens = tokenize(FID, SM, LangOpts); | |
512 } | |
513 } | |
514 | |
515 void consumeEmptyMapping(TokenBuffer::MarkedFile &File, unsigned EndOffset, | |
516 unsigned ExpandedIndex, unsigned &SpelledIndex) { | |
517 consumeMapping(File, EndOffset, ExpandedIndex, ExpandedIndex, SpelledIndex); | |
518 } | |
519 | |
520 /// Consumes spelled tokens that form a macro expansion and adds a entry to | |
521 /// the resulting token buffer. | |
522 /// (!) SpelledIndex is updated in-place. | |
523 void consumeMapping(TokenBuffer::MarkedFile &File, unsigned EndOffset, | |
524 unsigned BeginExpanded, unsigned EndExpanded, | |
525 unsigned &SpelledIndex) { | |
526 // We need to record this mapping before continuing. | |
527 unsigned MappingBegin = SpelledIndex; | |
528 ++SpelledIndex; | |
529 | |
530 bool HitMapping = | |
531 tryConsumeSpelledUntil(File, EndOffset + 1, SpelledIndex).hasValue(); | |
532 (void)HitMapping; | |
533 assert(!HitMapping && "recursive macro expansion?"); | |
534 | |
535 TokenBuffer::Mapping M; | |
536 M.BeginExpanded = BeginExpanded; | |
537 M.EndExpanded = EndExpanded; | |
538 M.BeginSpelled = MappingBegin; | |
539 M.EndSpelled = SpelledIndex; | |
540 | |
541 File.Mappings.push_back(M); | |
542 } | |
543 | |
544 /// Consumes spelled tokens until location \p L is reached and adds a mapping | |
545 /// covering the consumed tokens. The mapping will point to an empty expanded | |
546 /// range at position \p ExpandedIndex. | |
547 void fillGapUntil(TokenBuffer::MarkedFile &File, SourceLocation L, | |
548 unsigned ExpandedIndex) { | |
549 assert(L.isFileID()); | |
550 FileID FID; | |
551 unsigned Offset; | |
552 std::tie(FID, Offset) = SM.getDecomposedLoc(L); | |
553 | |
554 unsigned &SpelledIndex = NextSpelled[FID]; | |
555 unsigned MappingBegin = SpelledIndex; | |
556 while (true) { | |
557 auto EndLoc = tryConsumeSpelledUntil(File, Offset, SpelledIndex); | |
558 if (SpelledIndex != MappingBegin) { | |
559 TokenBuffer::Mapping M; | |
560 M.BeginSpelled = MappingBegin; | |
561 M.EndSpelled = SpelledIndex; | |
562 M.BeginExpanded = M.EndExpanded = ExpandedIndex; | |
563 File.Mappings.push_back(M); | |
564 } | |
565 if (!EndLoc) | |
566 break; | |
567 consumeEmptyMapping(File, SM.getFileOffset(*EndLoc), ExpandedIndex, | |
568 SpelledIndex); | |
569 | |
570 MappingBegin = SpelledIndex; | |
571 } | |
572 }; | |
573 | |
574 /// Consumes spelled tokens until it reaches Offset or a mapping boundary, | |
575 /// i.e. a name of a macro expansion or the start '#' token of a PP directive. | |
576 /// (!) NextSpelled is updated in place. | |
577 /// | |
578 /// returns None if \p Offset was reached, otherwise returns the end location | |
579 /// of a mapping that starts at \p NextSpelled. | |
580 llvm::Optional<SourceLocation> | |
581 tryConsumeSpelledUntil(TokenBuffer::MarkedFile &File, unsigned Offset, | |
582 unsigned &NextSpelled) { | |
583 for (; NextSpelled < File.SpelledTokens.size(); ++NextSpelled) { | |
584 auto L = File.SpelledTokens[NextSpelled].location(); | |
585 if (Offset <= SM.getFileOffset(L)) | |
586 return llvm::None; // reached the offset we are looking for. | |
587 auto Mapping = CollectedExpansions.find(L.getRawEncoding()); | |
588 if (Mapping != CollectedExpansions.end()) | |
589 return Mapping->second; // found a mapping before the offset. | |
590 } | |
591 return llvm::None; // no more tokens, we "reached" the offset. | |
592 } | |
593 | |
594 /// Adds empty mappings for unconsumed spelled tokens at the end of each file. | |
595 void fillGapsAtEndOfFiles() { | |
596 for (auto &F : Result.Files) { | |
597 if (F.second.SpelledTokens.empty()) | |
598 continue; | |
599 fillGapUntil(F.second, F.second.SpelledTokens.back().endLocation(), | |
600 F.second.EndExpanded); | |
601 } | |
602 } | |
603 | |
604 TokenBuffer Result; | |
605 /// For each file, a position of the next spelled token we will consume. | |
606 llvm::DenseMap<FileID, unsigned> NextSpelled; | |
607 PPExpansions CollectedExpansions; | |
608 const SourceManager &SM; | |
609 const LangOptions &LangOpts; | |
610 }; | |
611 | |
612 TokenBuffer TokenCollector::consume() && { | |
613 PP.setTokenWatcher(nullptr); | |
614 Collector->disable(); | |
615 return Builder(std::move(Expanded), std::move(Expansions), | |
616 PP.getSourceManager(), PP.getLangOpts()) | |
617 .build(); | |
618 } | |
619 | |
620 std::string syntax::Token::str() const { | |
621 return std::string(llvm::formatv("Token({0}, length = {1})", | |
622 tok::getTokenName(kind()), length())); | |
623 } | |
624 | |
625 std::string syntax::Token::dumpForTests(const SourceManager &SM) const { | |
626 return std::string( | |
627 llvm::formatv("{0} {1}", tok::getTokenName(kind()), text(SM))); | |
628 } | |
629 | |
630 std::string TokenBuffer::dumpForTests() const { | |
631 auto PrintToken = [this](const syntax::Token &T) -> std::string { | |
632 if (T.kind() == tok::eof) | |
633 return "<eof>"; | |
634 return std::string(T.text(*SourceMgr)); | |
635 }; | |
636 | |
637 auto DumpTokens = [this, &PrintToken](llvm::raw_ostream &OS, | |
638 llvm::ArrayRef<syntax::Token> Tokens) { | |
639 if (Tokens.empty()) { | |
640 OS << "<empty>"; | |
641 return; | |
642 } | |
643 OS << Tokens[0].text(*SourceMgr); | |
644 for (unsigned I = 1; I < Tokens.size(); ++I) { | |
645 if (Tokens[I].kind() == tok::eof) | |
646 continue; | |
647 OS << " " << PrintToken(Tokens[I]); | |
648 } | |
649 }; | |
650 | |
651 std::string Dump; | |
652 llvm::raw_string_ostream OS(Dump); | |
653 | |
654 OS << "expanded tokens:\n" | |
655 << " "; | |
656 // (!) we do not show '<eof>'. | |
657 DumpTokens(OS, llvm::makeArrayRef(ExpandedTokens).drop_back()); | |
658 OS << "\n"; | |
659 | |
660 std::vector<FileID> Keys; | |
661 for (auto F : Files) | |
662 Keys.push_back(F.first); | |
663 llvm::sort(Keys); | |
664 | |
665 for (FileID ID : Keys) { | |
666 const MarkedFile &File = Files.find(ID)->second; | |
667 auto *Entry = SourceMgr->getFileEntryForID(ID); | |
668 if (!Entry) | |
669 continue; // Skip builtin files. | |
670 OS << llvm::formatv("file '{0}'\n", Entry->getName()) | |
671 << " spelled tokens:\n" | |
672 << " "; | |
673 DumpTokens(OS, File.SpelledTokens); | |
674 OS << "\n"; | |
675 | |
676 if (File.Mappings.empty()) { | |
677 OS << " no mappings.\n"; | |
678 continue; | |
679 } | |
680 OS << " mappings:\n"; | |
681 for (auto &M : File.Mappings) { | |
682 OS << llvm::formatv( | |
683 " ['{0}'_{1}, '{2}'_{3}) => ['{4}'_{5}, '{6}'_{7})\n", | |
684 PrintToken(File.SpelledTokens[M.BeginSpelled]), M.BeginSpelled, | |
685 M.EndSpelled == File.SpelledTokens.size() | |
686 ? "<eof>" | |
687 : PrintToken(File.SpelledTokens[M.EndSpelled]), | |
688 M.EndSpelled, PrintToken(ExpandedTokens[M.BeginExpanded]), | |
689 M.BeginExpanded, PrintToken(ExpandedTokens[M.EndExpanded]), | |
690 M.EndExpanded); | |
691 } | |
692 } | |
693 return OS.str(); | |
694 } |