|
2 | 2 |
|
3 | 3 | namespaceMicrosoft.VisualStudio.FSharp.Editor |
4 | 4 |
|
| 5 | +#nowarn"1182" |
| 6 | + |
5 | 7 | openSystem |
6 | 8 | openSystem.Collections.Generic |
7 | 9 | openSystem.Threading |
@@ -73,13 +75,6 @@ module internal CommonHelpers = |
73 | 75 | data.[i]<- None |
74 | 76 | i<- i+1 |
75 | 77 |
|
76 | | -/// Go backwards to find the last cached scanned line that is valid. |
77 | | -memberx.GetLastValidCachedLine(startLine:int,sourceLines:TextLineCollection):int= |
78 | | -let mutablei= startLine |
79 | | -while i>0&&(match x.[i]with Some data->not(data.IsValid(sourceLines.[i]))| None->true)do |
80 | | - i<- i-1 |
81 | | - i |
82 | | - |
83 | 78 | letprivatedataCache= ConditionalWeakTable<DocumentId, SourceTextData>() |
84 | 79 |
|
85 | 80 | letinternalcompilerTokenToRoslynToken(colorKind:FSharpTokenColorKind):string= |
@@ -133,72 +128,66 @@ module internal CommonHelpers = |
133 | 128 |
|
134 | 129 | SourceLineData(textLine.Start, lexState, previousLexState.Value, lineContents.GetHashCode(), classifiedSpans, List.ofSeq tokens) |
135 | 130 |
|
136 | | -letprivategetSourceLineDatas(documentKey:DocumentId,sourceText:SourceText,startLine:int,endLine:int,fileName:string option,defines:string list, |
137 | | -cancellationToken:CancellationToken):ResizeArray<SourceLineData>= |
138 | | -letsourceTokenizer= FSharpSourceTokenizer(defines, fileName) |
139 | | -letlines= sourceText.Lines |
140 | | -// We keep incremental data per-document. When text changes we correlate text line-by-line (by hash codes of lines) |
141 | | -letsourceTextData= dataCache.GetValue(documentKey,fun key-> SourceTextData(lines.Count)) |
142 | | -letscanStartLine= sourceTextData.GetLastValidCachedLine(startLine, lines) |
143 | | - |
144 | | -// Rescan the lines if necessary and report the information |
145 | | -letresult= ResizeArray() |
146 | | -let mutablelexState=if scanStartLine=0then0Lelse sourceTextData.[scanStartLine].Value.LexStateAtEndOfLine |
147 | | - |
148 | | -for i= scanStartLineto endLinedo |
149 | | - cancellationToken.ThrowIfCancellationRequested() |
150 | | -lettextLine= lines.[i] |
151 | | -letlineContents= textLine.Text.ToString(textLine.Span) |
152 | | - |
153 | | -letlineData= |
154 | | -// We can reuse the old data when |
155 | | -// 1. the line starts at the same overall position |
156 | | -// 2. the hash codes match |
157 | | -// 3. the start-of-line lex states are the same |
158 | | -match sourceTextData.[i]with |
159 | | -| Some datawhen data.IsValid(textLine)&& data.LexStateAtStartOfLine= lexState-> |
160 | | - data |
161 | | -|_-> |
162 | | -// Otherwise, we recompute |
163 | | -letnewData= scanSourceLine(sourceTokenizer, textLine, lineContents, lexState) |
164 | | - sourceTextData.[i]<- Some newData |
165 | | - newData |
166 | | - |
167 | | - lexState<- lineData.LexStateAtEndOfLine |
168 | | - |
169 | | -if startLine<= ithen |
170 | | - result.Add(lineData) |
171 | | - |
172 | | -// If necessary, invalidate all subsequent lines after endLine |
173 | | -if endLine< lines.Count-1then |
174 | | -match sourceTextData.[endLine+1]with |
175 | | -| Some data-> |
176 | | -if data.LexStateAtStartOfLine<> lexStatethen |
177 | | - sourceTextData.ClearFrom(endLine+1) |
178 | | -| None->() |
179 | | - |
180 | | - result |
181 | | - |
182 | 131 | letgetColorizationData(documentKey:DocumentId,sourceText:SourceText,textSpan:TextSpan,fileName:string option,defines:string list, |
183 | 132 | cancellationToken:CancellationToken):List<ClassifiedSpan>= |
184 | | -try |
185 | | -letlines= sourceText.Lines |
186 | | -letstartLine= lines.GetLineFromPosition(textSpan.Start).LineNumber |
187 | | -letendLine= lines.GetLineFromPosition(textSpan.End).LineNumber |
188 | | - |
189 | | -// Rescan the lines if necessary and report the information |
190 | | -letresult=new List<ClassifiedSpan>() |
191 | | -for lineDatain getSourceLineDatas(documentKey, sourceText, startLine, endLine, fileName, defines, cancellationToken)do |
192 | | - result.AddRange(lineData.ClassifiedSpans|> Seq.filter(fun token-> |
193 | | - textSpan.Contains(token.TextSpan.Start)|| |
194 | | - textSpan.Contains(token.TextSpan.End-1)|| |
195 | | -(token.TextSpan.Start<= textSpan.Start&& textSpan.End<= token.TextSpan.End))) |
196 | | - result |
197 | | -with |
198 | | -|:? System.OperationCanceledException-> reraise() |
199 | | -| ex-> |
200 | | - Assert.Exception(ex) |
201 | | - List<ClassifiedSpan>() |
| 133 | +try |
| 134 | +letsourceTokenizer= FSharpSourceTokenizer(defines, fileName) |
| 135 | +letlines= sourceText.Lines |
| 136 | +// We keep incremental data per-document. When text changes we correlate text line-by-line (by hash codes of lines) |
| 137 | +letsourceTextData= dataCache.GetValue(documentKey,fun key-> SourceTextData(lines.Count)) |
| 138 | + |
| 139 | +letstartLine= lines.GetLineFromPosition(textSpan.Start).LineNumber |
| 140 | +letendLine= lines.GetLineFromPosition(textSpan.End).LineNumber |
| 141 | +// Go backwards to find the last cached scanned line that is valid |
| 142 | +letscanStartLine= |
| 143 | +let mutablei= startLine |
| 144 | +while i>0&&(match sourceTextData.[i]with Some data->not(data.IsValid(lines.[i]))| None->true)do |
| 145 | + i<- i-1 |
| 146 | + i |
| 147 | +// Rescan the lines if necessary and report the information |
| 148 | +letresult=new List<ClassifiedSpan>() |
| 149 | +let mutablelexState=if scanStartLine=0then0Lelse sourceTextData.[scanStartLine-1].Value.LexStateAtEndOfLine |
| 150 | + |
| 151 | +for i= scanStartLineto endLinedo |
| 152 | + cancellationToken.ThrowIfCancellationRequested() |
| 153 | +lettextLine= lines.[i] |
| 154 | +letlineContents= textLine.Text.ToString(textLine.Span) |
| 155 | + |
| 156 | +letlineData= |
| 157 | +// We can reuse the old data when |
| 158 | +// 1. the line starts at the same overall position |
| 159 | +// 2. the hash codes match |
| 160 | +// 3. the start-of-line lex states are the same |
| 161 | +match sourceTextData.[i]with |
| 162 | +| Some datawhen data.IsValid(textLine)&& data.LexStateAtStartOfLine= lexState-> |
| 163 | + data |
| 164 | +|_-> |
| 165 | +// Otherwise, we recompute |
| 166 | +letnewData= scanSourceLine(sourceTokenizer, textLine, lineContents, lexState) |
| 167 | + sourceTextData.[i]<- Some newData |
| 168 | + newData |
| 169 | + |
| 170 | + lexState<- lineData.LexStateAtEndOfLine |
| 171 | + |
| 172 | +if startLine<= ithen |
| 173 | + result.AddRange(lineData.ClassifiedSpans|> Seq.filter(fun token-> |
| 174 | + textSpan.Contains(token.TextSpan.Start)|| |
| 175 | + textSpan.Contains(token.TextSpan.End-1)|| |
| 176 | +(token.TextSpan.Start<= textSpan.Start&& textSpan.End<= token.TextSpan.End))) |
| 177 | + |
| 178 | +// If necessary, invalidate all subsequent lines after endLine |
| 179 | +if endLine< lines.Count-1then |
| 180 | +match sourceTextData.[endLine+1]with |
| 181 | +| Some data-> |
| 182 | +if data.LexStateAtStartOfLine<> lexStatethen |
| 183 | + sourceTextData.ClearFrom(endLine+1) |
| 184 | +| None->() |
| 185 | + result |
| 186 | +with |
| 187 | +|:? System.OperationCanceledException-> reraise() |
| 188 | +| ex-> |
| 189 | + Assert.Exception(ex) |
| 190 | + List<ClassifiedSpan>() |
202 | 191 |
|
203 | 192 | typeprivateDraftToken= |
204 | 193 | { Kind:LexerSymbolKind |
@@ -328,14 +317,22 @@ module internal CommonHelpers = |
328 | 317 | letprivategetCachedSourceLineData(documentKey:DocumentId,sourceText:SourceText,position:int,fileName:string,defines:string list)= |
329 | 318 | lettextLine= sourceText.Lines.GetLineFromPosition(position) |
330 | 319 | lettextLinePos= sourceText.Lines.GetLinePosition(position) |
331 | | -letlineNumber= textLinePos.Line |
| 320 | +letlineNumber= textLinePos.Line+1// FCS line number |
332 | 321 | letsourceTokenizer= FSharpSourceTokenizer(defines, Some fileName) |
333 | 322 | letlines= sourceText.Lines |
334 | 323 | // We keep incremental data per-document. When text changes we correlate text line-by-line (by hash codes of lines) |
335 | 324 | letsourceTextData= dataCache.GetValue(documentKey,fun key-> SourceTextData(lines.Count)) |
336 | 325 | // Go backwards to find the last cached scanned line that is valid |
337 | | -letscanStartLine= sourceTextData.GetLastValidCachedLine(lineNumber, lines) |
338 | | -letlexState=if scanStartLine=0then0Lelse sourceTextData.[scanStartLine].Value.LexStateAtEndOfLine |
| 326 | +letscanStartLine= |
| 327 | +let mutablei= min(lines.Count-1) lineNumber |
| 328 | +while i>0&& |
| 329 | +(match sourceTextData.[i]with |
| 330 | +| Some data->not(data.IsValid(lines.[i])) |
| 331 | +| None->true |
| 332 | +)do |
| 333 | + i<- i-1 |
| 334 | + i |
| 335 | +letlexState=if scanStartLine=0then0Lelse sourceTextData.[scanStartLine-1].Value.LexStateAtEndOfLine |
339 | 336 | letlineContents= textLine.Text.ToString(textLine.Span) |
340 | 337 |
|
341 | 338 | // We can reuse the old data when |
@@ -363,6 +360,7 @@ module internal CommonHelpers = |
363 | 360 | letgetSymbolAtPosition(documentKey:DocumentId,sourceText:SourceText,position:int,fileName:string,defines:string list,lookupKind:SymbolLookupKind):LexerSymbol option= |
364 | 361 | try |
365 | 362 | letlineData,textLinePos,lineContents= getCachedSourceLineData(documentKey, sourceText, position, fileName, defines) |
| 363 | +letsourceTokenizer= FSharpSourceTokenizer(defines, Some fileName) |
366 | 364 | getSymbolFromTokens(fileName, lineData.Tokens, textLinePos, lineContents, lookupKind) |
367 | 365 | with |
368 | 366 | |:? System.OperationCanceledException-> reraise() |
|