Movatterモバイル変換


[0]ホーム

URL:


Skip to content

Navigation Menu

Sign in
Appearance settings

Search code, repositories, users, issues, pull requests...

Provide feedback

We read every piece of feedback, and take your input very seriously.

Saved searches

Use saved searches to filter your results more quickly

Sign up
Appearance settings

Commitc2821eb

Browse files
vasily-kirichenkoKevinRansom
authored andcommitted
Fix lexer cache (dotnet#2158)
* revert "Use Roslyn line numbers only in lexer cache"dotnet#2090* fix OutOfRangeException in getCachedSourceLineData* fixed: lexer cache does not work at all* fix finding start line for relexing in getCachedSourceLineData
1 parent9e4cb09 commitc2821eb

File tree

2 files changed

+73
-75
lines changed

2 files changed

+73
-75
lines changed

‎vsintegration/src/FSharp.Editor/Common/CommonHelpers.fs‎

Lines changed: 72 additions & 74 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,8 @@
22

33
namespaceMicrosoft.VisualStudio.FSharp.Editor
44

5+
#nowarn"1182"
6+
57
openSystem
68
openSystem.Collections.Generic
79
openSystem.Threading
@@ -73,13 +75,6 @@ module internal CommonHelpers =
7375
data.[i]<- None
7476
i<- i+1
7577

76-
/// Go backwards to find the last cached scanned line that is valid.
77-
memberx.GetLastValidCachedLine(startLine:int,sourceLines:TextLineCollection):int=
78-
let mutablei= startLine
79-
while i>0&&(match x.[i]with Some data->not(data.IsValid(sourceLines.[i]))| None->true)do
80-
i<- i-1
81-
i
82-
8378
letprivatedataCache= ConditionalWeakTable<DocumentId, SourceTextData>()
8479

8580
letinternalcompilerTokenToRoslynToken(colorKind:FSharpTokenColorKind):string=
@@ -133,72 +128,66 @@ module internal CommonHelpers =
133128

134129
SourceLineData(textLine.Start, lexState, previousLexState.Value, lineContents.GetHashCode(), classifiedSpans, List.ofSeq tokens)
135130

136-
letprivategetSourceLineDatas(documentKey:DocumentId,sourceText:SourceText,startLine:int,endLine:int,fileName:string option,defines:string list,
137-
cancellationToken:CancellationToken):ResizeArray<SourceLineData>=
138-
letsourceTokenizer= FSharpSourceTokenizer(defines, fileName)
139-
letlines= sourceText.Lines
140-
// We keep incremental data per-document. When text changes we correlate text line-by-line (by hash codes of lines)
141-
letsourceTextData= dataCache.GetValue(documentKey,fun key-> SourceTextData(lines.Count))
142-
letscanStartLine= sourceTextData.GetLastValidCachedLine(startLine, lines)
143-
144-
// Rescan the lines if necessary and report the information
145-
letresult= ResizeArray()
146-
let mutablelexState=if scanStartLine=0then0Lelse sourceTextData.[scanStartLine].Value.LexStateAtEndOfLine
147-
148-
for i= scanStartLineto endLinedo
149-
cancellationToken.ThrowIfCancellationRequested()
150-
lettextLine= lines.[i]
151-
letlineContents= textLine.Text.ToString(textLine.Span)
152-
153-
letlineData=
154-
// We can reuse the old data when
155-
// 1. the line starts at the same overall position
156-
// 2. the hash codes match
157-
// 3. the start-of-line lex states are the same
158-
match sourceTextData.[i]with
159-
| Some datawhen data.IsValid(textLine)&& data.LexStateAtStartOfLine= lexState->
160-
data
161-
|_->
162-
// Otherwise, we recompute
163-
letnewData= scanSourceLine(sourceTokenizer, textLine, lineContents, lexState)
164-
sourceTextData.[i]<- Some newData
165-
newData
166-
167-
lexState<- lineData.LexStateAtEndOfLine
168-
169-
if startLine<= ithen
170-
result.Add(lineData)
171-
172-
// If necessary, invalidate all subsequent lines after endLine
173-
if endLine< lines.Count-1then
174-
match sourceTextData.[endLine+1]with
175-
| Some data->
176-
if data.LexStateAtStartOfLine<> lexStatethen
177-
sourceTextData.ClearFrom(endLine+1)
178-
| None->()
179-
180-
result
181-
182131
letgetColorizationData(documentKey:DocumentId,sourceText:SourceText,textSpan:TextSpan,fileName:string option,defines:string list,
183132
cancellationToken:CancellationToken):List<ClassifiedSpan>=
184-
try
185-
letlines= sourceText.Lines
186-
letstartLine= lines.GetLineFromPosition(textSpan.Start).LineNumber
187-
letendLine= lines.GetLineFromPosition(textSpan.End).LineNumber
188-
189-
// Rescan the lines if necessary and report the information
190-
letresult=new List<ClassifiedSpan>()
191-
for lineDatain getSourceLineDatas(documentKey, sourceText, startLine, endLine, fileName, defines, cancellationToken)do
192-
result.AddRange(lineData.ClassifiedSpans|> Seq.filter(fun token->
193-
textSpan.Contains(token.TextSpan.Start)||
194-
textSpan.Contains(token.TextSpan.End-1)||
195-
(token.TextSpan.Start<= textSpan.Start&& textSpan.End<= token.TextSpan.End)))
196-
result
197-
with
198-
|:? System.OperationCanceledException-> reraise()
199-
| ex->
200-
Assert.Exception(ex)
201-
List<ClassifiedSpan>()
133+
try
134+
letsourceTokenizer= FSharpSourceTokenizer(defines, fileName)
135+
letlines= sourceText.Lines
136+
// We keep incremental data per-document. When text changes we correlate text line-by-line (by hash codes of lines)
137+
letsourceTextData= dataCache.GetValue(documentKey,fun key-> SourceTextData(lines.Count))
138+
139+
letstartLine= lines.GetLineFromPosition(textSpan.Start).LineNumber
140+
letendLine= lines.GetLineFromPosition(textSpan.End).LineNumber
141+
// Go backwards to find the last cached scanned line that is valid
142+
letscanStartLine=
143+
let mutablei= startLine
144+
while i>0&&(match sourceTextData.[i]with Some data->not(data.IsValid(lines.[i]))| None->true)do
145+
i<- i-1
146+
i
147+
// Rescan the lines if necessary and report the information
148+
letresult=new List<ClassifiedSpan>()
149+
let mutablelexState=if scanStartLine=0then0Lelse sourceTextData.[scanStartLine-1].Value.LexStateAtEndOfLine
150+
151+
for i= scanStartLineto endLinedo
152+
cancellationToken.ThrowIfCancellationRequested()
153+
lettextLine= lines.[i]
154+
letlineContents= textLine.Text.ToString(textLine.Span)
155+
156+
letlineData=
157+
// We can reuse the old data when
158+
// 1. the line starts at the same overall position
159+
// 2. the hash codes match
160+
// 3. the start-of-line lex states are the same
161+
match sourceTextData.[i]with
162+
| Some datawhen data.IsValid(textLine)&& data.LexStateAtStartOfLine= lexState->
163+
data
164+
|_->
165+
// Otherwise, we recompute
166+
letnewData= scanSourceLine(sourceTokenizer, textLine, lineContents, lexState)
167+
sourceTextData.[i]<- Some newData
168+
newData
169+
170+
lexState<- lineData.LexStateAtEndOfLine
171+
172+
if startLine<= ithen
173+
result.AddRange(lineData.ClassifiedSpans|> Seq.filter(fun token->
174+
textSpan.Contains(token.TextSpan.Start)||
175+
textSpan.Contains(token.TextSpan.End-1)||
176+
(token.TextSpan.Start<= textSpan.Start&& textSpan.End<= token.TextSpan.End)))
177+
178+
// If necessary, invalidate all subsequent lines after endLine
179+
if endLine< lines.Count-1then
180+
match sourceTextData.[endLine+1]with
181+
| Some data->
182+
if data.LexStateAtStartOfLine<> lexStatethen
183+
sourceTextData.ClearFrom(endLine+1)
184+
| None->()
185+
result
186+
with
187+
|:? System.OperationCanceledException-> reraise()
188+
| ex->
189+
Assert.Exception(ex)
190+
List<ClassifiedSpan>()
202191

203192
typeprivateDraftToken=
204193
{ Kind:LexerSymbolKind
@@ -328,14 +317,22 @@ module internal CommonHelpers =
328317
letprivategetCachedSourceLineData(documentKey:DocumentId,sourceText:SourceText,position:int,fileName:string,defines:string list)=
329318
lettextLine= sourceText.Lines.GetLineFromPosition(position)
330319
lettextLinePos= sourceText.Lines.GetLinePosition(position)
331-
letlineNumber= textLinePos.Line
320+
letlineNumber= textLinePos.Line+1// FCS line number
332321
letsourceTokenizer= FSharpSourceTokenizer(defines, Some fileName)
333322
letlines= sourceText.Lines
334323
// We keep incremental data per-document. When text changes we correlate text line-by-line (by hash codes of lines)
335324
letsourceTextData= dataCache.GetValue(documentKey,fun key-> SourceTextData(lines.Count))
336325
// Go backwards to find the last cached scanned line that is valid
337-
letscanStartLine= sourceTextData.GetLastValidCachedLine(lineNumber, lines)
338-
letlexState=if scanStartLine=0then0Lelse sourceTextData.[scanStartLine].Value.LexStateAtEndOfLine
326+
letscanStartLine=
327+
let mutablei= min(lines.Count-1) lineNumber
328+
while i>0&&
329+
(match sourceTextData.[i]with
330+
| Some data->not(data.IsValid(lines.[i]))
331+
| None->true
332+
)do
333+
i<- i-1
334+
i
335+
letlexState=if scanStartLine=0then0Lelse sourceTextData.[scanStartLine-1].Value.LexStateAtEndOfLine
339336
letlineContents= textLine.Text.ToString(textLine.Span)
340337

341338
// We can reuse the old data when
@@ -363,6 +360,7 @@ module internal CommonHelpers =
363360
letgetSymbolAtPosition(documentKey:DocumentId,sourceText:SourceText,position:int,fileName:string,defines:string list,lookupKind:SymbolLookupKind):LexerSymbol option=
364361
try
365362
letlineData,textLinePos,lineContents= getCachedSourceLineData(documentKey, sourceText, position, fileName, defines)
363+
letsourceTokenizer= FSharpSourceTokenizer(defines, Some fileName)
366364
getSymbolFromTokens(fileName, lineData.Tokens, textLinePos, lineContents, lookupKind)
367365
with
368366
|:? System.OperationCanceledException-> reraise()

‎vsintegration/src/FSharp.Editor/FSharp.Editor.fsproj‎

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,8 @@
3434
<CompileInclude="Common\Pervasive.fs" />
3535
<CompileInclude="Common\CommonConstants.fs" />
3636
<CompileInclude="Common\CommonRoslynHelpers.fs" />
37-
<CompileInclude="Common\CommonHelpers.fs" />
3837
<CompileInclude="Common\Logging.fs" />
38+
<CompileInclude="Common\CommonHelpers.fs" />
3939
<CompileInclude="Common\ContentType.fs" />
4040
<CompileInclude="Common\LanguageService.fs" />
4141
<CompileInclude="Common\SymbolHelpers.fs" />

0 commit comments

Comments
 (0)

[8]ページ先頭

©2009-2025 Movatter.jp