mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-10 18:22:34 -05:00
Merge from vscode 8e0f348413f4f616c23a88ae30030efa85811973 (#6381)
* Merge from vscode 8e0f348413f4f616c23a88ae30030efa85811973 * disable strict null check
This commit is contained in:
@@ -177,22 +177,26 @@ export function guessIndentation(source: ITextBuffer, defaultTabSize: number, de
|
||||
}
|
||||
|
||||
let tabSize = defaultTabSize;
|
||||
let tabSizeScore = (insertSpaces ? 0 : 0.1 * linesCount);
|
||||
|
||||
// console.log("score threshold: " + tabSizeScore);
|
||||
// Guess tabSize only if inserting spaces...
|
||||
if (insertSpaces) {
|
||||
let tabSizeScore = (insertSpaces ? 0 : 0.1 * linesCount);
|
||||
|
||||
ALLOWED_TAB_SIZE_GUESSES.forEach((possibleTabSize) => {
|
||||
let possibleTabSizeScore = spacesDiffCount[possibleTabSize];
|
||||
if (possibleTabSizeScore > tabSizeScore) {
|
||||
tabSizeScore = possibleTabSizeScore;
|
||||
tabSize = possibleTabSize;
|
||||
// console.log("score threshold: " + tabSizeScore);
|
||||
|
||||
ALLOWED_TAB_SIZE_GUESSES.forEach((possibleTabSize) => {
|
||||
let possibleTabSizeScore = spacesDiffCount[possibleTabSize];
|
||||
if (possibleTabSizeScore > tabSizeScore) {
|
||||
tabSizeScore = possibleTabSizeScore;
|
||||
tabSize = possibleTabSize;
|
||||
}
|
||||
});
|
||||
|
||||
// Let a tabSize of 2 win even if it is not the maximum
|
||||
// (only in case 4 was guessed)
|
||||
if (tabSize === 4 && spacesDiffCount[4] > 0 && spacesDiffCount[2] > 0 && spacesDiffCount[2] >= spacesDiffCount[4] / 2) {
|
||||
tabSize = 2;
|
||||
}
|
||||
});
|
||||
|
||||
// Let a tabSize of 2 win even if it is not the maximum
|
||||
// (only in case 4 was guessed)
|
||||
if (tabSize === 4 && spacesDiffCount[4] > 0 && spacesDiffCount[2] > 0 && spacesDiffCount[2] >= spacesDiffCount[4] / 2) {
|
||||
tabSize = 2;
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -611,25 +611,25 @@ export class PieceTreeBase {
|
||||
let resultLen = 0;
|
||||
const searcher = new Searcher(searchData.wordSeparators, searchData.regex);
|
||||
|
||||
let startPostion = this.nodeAt2(searchRange.startLineNumber, searchRange.startColumn);
|
||||
if (startPostion === null) {
|
||||
let startPosition = this.nodeAt2(searchRange.startLineNumber, searchRange.startColumn);
|
||||
if (startPosition === null) {
|
||||
return [];
|
||||
}
|
||||
let endPosition = this.nodeAt2(searchRange.endLineNumber, searchRange.endColumn);
|
||||
if (endPosition === null) {
|
||||
return [];
|
||||
}
|
||||
let start = this.positionInBuffer(startPostion.node, startPostion.remainder);
|
||||
let start = this.positionInBuffer(startPosition.node, startPosition.remainder);
|
||||
let end = this.positionInBuffer(endPosition.node, endPosition.remainder);
|
||||
|
||||
if (startPostion.node === endPosition.node) {
|
||||
this.findMatchesInNode(startPostion.node, searcher, searchRange.startLineNumber, searchRange.startColumn, start, end, searchData, captureMatches, limitResultCount, resultLen, result);
|
||||
if (startPosition.node === endPosition.node) {
|
||||
this.findMatchesInNode(startPosition.node, searcher, searchRange.startLineNumber, searchRange.startColumn, start, end, searchData, captureMatches, limitResultCount, resultLen, result);
|
||||
return result;
|
||||
}
|
||||
|
||||
let startLineNumber = searchRange.startLineNumber;
|
||||
|
||||
let currentNode = startPostion.node;
|
||||
let currentNode = startPosition.node;
|
||||
while (currentNode !== endPosition.node) {
|
||||
let lineBreakCnt = this.getLineFeedCnt(currentNode.piece.bufferIndex, start, currentNode.piece.end);
|
||||
|
||||
@@ -663,9 +663,9 @@ export class PieceTreeBase {
|
||||
}
|
||||
|
||||
startLineNumber++;
|
||||
startPostion = this.nodeAt2(startLineNumber, 1);
|
||||
currentNode = startPostion.node;
|
||||
start = this.positionInBuffer(startPostion.node, startPostion.remainder);
|
||||
startPosition = this.nodeAt2(startLineNumber, 1);
|
||||
currentNode = startPosition.node;
|
||||
start = this.positionInBuffer(startPosition.node, startPosition.remainder);
|
||||
}
|
||||
|
||||
if (startLineNumber === searchRange.endLineNumber) {
|
||||
|
||||
@@ -8,7 +8,6 @@ import { onUnexpectedError } from 'vs/base/common/errors';
|
||||
import { Emitter, Event } from 'vs/base/common/event';
|
||||
import { IMarkdownString } from 'vs/base/common/htmlContent';
|
||||
import { Disposable, IDisposable } from 'vs/base/common/lifecycle';
|
||||
import { StopWatch } from 'vs/base/common/stopwatch';
|
||||
import * as strings from 'vs/base/common/strings';
|
||||
import { URI } from 'vs/base/common/uri';
|
||||
import { EDITOR_MODEL_DEFAULTS } from 'vs/editor/common/config/editorOptions';
|
||||
@@ -23,9 +22,9 @@ import { IntervalNode, IntervalTree, getNodeIsInOverviewRuler, recomputeMaxEnd }
|
||||
import { PieceTreeTextBufferBuilder } from 'vs/editor/common/model/pieceTreeTextBuffer/pieceTreeTextBufferBuilder';
|
||||
import { IModelContentChangedEvent, IModelDecorationsChangedEvent, IModelLanguageChangedEvent, IModelLanguageConfigurationChangedEvent, IModelOptionsChangedEvent, IModelTokensChangedEvent, InternalModelContentChangeEvent, ModelRawChange, ModelRawContentChangedEvent, ModelRawEOLChanged, ModelRawFlush, ModelRawLineChanged, ModelRawLinesDeleted, ModelRawLinesInserted } from 'vs/editor/common/model/textModelEvents';
|
||||
import { SearchData, SearchParams, TextModelSearch } from 'vs/editor/common/model/textModelSearch';
|
||||
import { ModelLinesTokens, ModelTokensChangedEventBuilder } from 'vs/editor/common/model/textModelTokens';
|
||||
import { TextModelTokenization, countEOL } from 'vs/editor/common/model/textModelTokens';
|
||||
import { getWordAtText } from 'vs/editor/common/model/wordHelper';
|
||||
import { IState, LanguageId, LanguageIdentifier, TokenizationRegistry, FormattingOptions } from 'vs/editor/common/modes';
|
||||
import { LanguageId, LanguageIdentifier, FormattingOptions } from 'vs/editor/common/modes';
|
||||
import { LanguageConfigurationRegistry } from 'vs/editor/common/modes/languageConfigurationRegistry';
|
||||
import { NULL_LANGUAGE_IDENTIFIER } from 'vs/editor/common/modes/nullMode';
|
||||
import { ignoreBracketsInToken } from 'vs/editor/common/modes/supports';
|
||||
@@ -33,8 +32,8 @@ import { BracketsUtils, RichEditBracket, RichEditBrackets } from 'vs/editor/comm
|
||||
import { ITheme, ThemeColor } from 'vs/platform/theme/common/themeService';
|
||||
import { withUndefinedAsNull } from 'vs/base/common/types';
|
||||
import { VSBufferReadableStream, VSBuffer } from 'vs/base/common/buffer';
|
||||
|
||||
const CHEAP_TOKENIZATION_LENGTH_LIMIT = 2048;
|
||||
import { TokensStore, MultilineTokens } from 'vs/editor/common/model/tokensStore';
|
||||
import { Color } from 'vs/base/common/color';
|
||||
|
||||
function createTextBufferBuilder() {
|
||||
return new PieceTreeTextBufferBuilder();
|
||||
@@ -235,6 +234,9 @@ export class TextModel extends Disposable implements model.ITextModel {
|
||||
private readonly _onDidChangeOptions: Emitter<IModelOptionsChangedEvent> = this._register(new Emitter<IModelOptionsChangedEvent>());
|
||||
public readonly onDidChangeOptions: Event<IModelOptionsChangedEvent> = this._onDidChangeOptions.event;
|
||||
|
||||
private readonly _onDidChangeAttached: Emitter<void> = this._register(new Emitter<void>());
|
||||
public readonly onDidChangeAttached: Event<void> = this._onDidChangeAttached.event;
|
||||
|
||||
private readonly _eventEmitter: DidChangeContentEmitter = this._register(new DidChangeContentEmitter());
|
||||
public onDidChangeRawContentFast(listener: (e: ModelRawContentChangedEvent) => void): IDisposable {
|
||||
return this._eventEmitter.fastEvent((e: InternalModelContentChangeEvent) => listener(e.rawContentChangedEvent));
|
||||
@@ -242,6 +244,9 @@ export class TextModel extends Disposable implements model.ITextModel {
|
||||
public onDidChangeRawContent(listener: (e: ModelRawContentChangedEvent) => void): IDisposable {
|
||||
return this._eventEmitter.slowEvent((e: InternalModelContentChangeEvent) => listener(e.rawContentChangedEvent));
|
||||
}
|
||||
public onDidChangeContentFast(listener: (e: IModelContentChangedEvent) => void): IDisposable {
|
||||
return this._eventEmitter.fastEvent((e: InternalModelContentChangeEvent) => listener(e.contentChangedEvent));
|
||||
}
|
||||
public onDidChangeContent(listener: (e: IModelContentChangedEvent) => void): IDisposable {
|
||||
return this._eventEmitter.slowEvent((e: InternalModelContentChangeEvent) => listener(e.contentChangedEvent));
|
||||
}
|
||||
@@ -284,10 +289,9 @@ export class TextModel extends Disposable implements model.ITextModel {
|
||||
|
||||
//#region Tokenization
|
||||
private _languageIdentifier: LanguageIdentifier;
|
||||
private readonly _tokenizationListener: IDisposable;
|
||||
private readonly _languageRegistryListener: IDisposable;
|
||||
private _revalidateTokensTimeout: any;
|
||||
/*private*/_tokens: ModelLinesTokens;
|
||||
private readonly _tokens: TokensStore;
|
||||
private readonly _tokenization: TextModelTokenization;
|
||||
//#endregion
|
||||
|
||||
constructor(source: string | model.ITextBufferFactory, creationOptions: model.ITextModelCreationOptions, languageIdentifier: LanguageIdentifier | null, associatedResource: URI | null = null) {
|
||||
@@ -330,31 +334,12 @@ export class TextModel extends Disposable implements model.ITextModel {
|
||||
this._isDisposing = false;
|
||||
|
||||
this._languageIdentifier = languageIdentifier || NULL_LANGUAGE_IDENTIFIER;
|
||||
this._tokenizationListener = TokenizationRegistry.onDidChange((e) => {
|
||||
if (e.changedLanguages.indexOf(this._languageIdentifier.language) === -1) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._resetTokenizationState();
|
||||
this.emitModelTokensChangedEvent({
|
||||
tokenizationSupportChanged: true,
|
||||
ranges: [{
|
||||
fromLineNumber: 1,
|
||||
toLineNumber: this.getLineCount()
|
||||
}]
|
||||
});
|
||||
|
||||
if (this._shouldAutoTokenize()) {
|
||||
this._warmUpTokens();
|
||||
}
|
||||
});
|
||||
this._revalidateTokensTimeout = -1;
|
||||
this._languageRegistryListener = LanguageConfigurationRegistry.onDidChange((e) => {
|
||||
if (e.languageIdentifier.id === this._languageIdentifier.id) {
|
||||
this._onDidChangeLanguageConfiguration.fire({});
|
||||
}
|
||||
});
|
||||
this._resetTokenizationState();
|
||||
|
||||
this._instanceId = singleLetter(MODEL_ID);
|
||||
this._lastDecorationId = 0;
|
||||
@@ -365,16 +350,17 @@ export class TextModel extends Disposable implements model.ITextModel {
|
||||
this._isUndoing = false;
|
||||
this._isRedoing = false;
|
||||
this._trimAutoWhitespaceLines = null;
|
||||
|
||||
this._tokens = new TokensStore();
|
||||
this._tokenization = new TextModelTokenization(this);
|
||||
}
|
||||
|
||||
public dispose(): void {
|
||||
this._isDisposing = true;
|
||||
this._onWillDispose.fire();
|
||||
this._tokenizationListener.dispose();
|
||||
this._languageRegistryListener.dispose();
|
||||
this._clearTimers();
|
||||
this._tokenization.dispose();
|
||||
this._isDisposed = true;
|
||||
// Null out members, such that any use of a disposed model will throw exceptions sooner rather than later
|
||||
super.dispose();
|
||||
this._isDisposing = false;
|
||||
}
|
||||
@@ -439,8 +425,8 @@ export class TextModel extends Disposable implements model.ITextModel {
|
||||
this._buffer = textBuffer;
|
||||
this._increaseVersionId();
|
||||
|
||||
// Cancel tokenization, clear all tokens and begin tokenizing
|
||||
this._resetTokenizationState();
|
||||
// Flush all tokens
|
||||
this._tokens.flush();
|
||||
|
||||
// Destroy all my decorations
|
||||
this._decorations = Object.create(null);
|
||||
@@ -524,36 +510,18 @@ export class TextModel extends Disposable implements model.ITextModel {
|
||||
}
|
||||
}
|
||||
|
||||
private _resetTokenizationState(): void {
|
||||
this._clearTimers();
|
||||
let tokenizationSupport = (
|
||||
this._isTooLargeForTokenization
|
||||
? null
|
||||
: TokenizationRegistry.get(this._languageIdentifier.language)
|
||||
);
|
||||
this._tokens = new ModelLinesTokens(this._languageIdentifier, tokenizationSupport);
|
||||
this._beginBackgroundTokenization();
|
||||
}
|
||||
|
||||
private _clearTimers(): void {
|
||||
if (this._revalidateTokensTimeout !== -1) {
|
||||
clearTimeout(this._revalidateTokensTimeout);
|
||||
this._revalidateTokensTimeout = -1;
|
||||
}
|
||||
}
|
||||
|
||||
public onBeforeAttached(): void {
|
||||
this._attachedEditorCount++;
|
||||
// Warm up tokens for the editor
|
||||
this._warmUpTokens();
|
||||
if (this._attachedEditorCount === 1) {
|
||||
this._onDidChangeAttached.fire(undefined);
|
||||
}
|
||||
}
|
||||
|
||||
public onBeforeDetached(): void {
|
||||
this._attachedEditorCount--;
|
||||
}
|
||||
|
||||
private _shouldAutoTokenize(): boolean {
|
||||
return this.isAttachedToEditor();
|
||||
if (this._attachedEditorCount === 0) {
|
||||
this._onDidChangeAttached.fire(undefined);
|
||||
}
|
||||
}
|
||||
|
||||
public isAttachedToEditor(): boolean {
|
||||
@@ -1292,36 +1260,6 @@ export class TextModel extends Disposable implements model.ITextModel {
|
||||
}
|
||||
}
|
||||
|
||||
private static _eolCount(text: string): [number, number] {
|
||||
let eolCount = 0;
|
||||
let firstLineLength = 0;
|
||||
for (let i = 0, len = text.length; i < len; i++) {
|
||||
const chr = text.charCodeAt(i);
|
||||
|
||||
if (chr === CharCode.CarriageReturn) {
|
||||
if (eolCount === 0) {
|
||||
firstLineLength = i;
|
||||
}
|
||||
eolCount++;
|
||||
if (i + 1 < len && text.charCodeAt(i + 1) === CharCode.LineFeed) {
|
||||
// \r\n... case
|
||||
i++; // skip \n
|
||||
} else {
|
||||
// \r... case
|
||||
}
|
||||
} else if (chr === CharCode.LineFeed) {
|
||||
if (eolCount === 0) {
|
||||
firstLineLength = i;
|
||||
}
|
||||
eolCount++;
|
||||
}
|
||||
}
|
||||
if (eolCount === 0) {
|
||||
firstLineLength = text.length;
|
||||
}
|
||||
return [eolCount, firstLineLength];
|
||||
}
|
||||
|
||||
private _applyEdits(rawOperations: model.IIdentifiedSingleEditOperation[]): model.IIdentifiedSingleEditOperation[] {
|
||||
for (let i = 0, len = rawOperations.length; i < len; i++) {
|
||||
rawOperations[i].range = this.validateRange(rawOperations[i].range);
|
||||
@@ -1340,13 +1278,8 @@ export class TextModel extends Disposable implements model.ITextModel {
|
||||
let lineCount = oldLineCount;
|
||||
for (let i = 0, len = contentChanges.length; i < len; i++) {
|
||||
const change = contentChanges[i];
|
||||
const [eolCount, firstLineLength] = TextModel._eolCount(change.text);
|
||||
try {
|
||||
this._tokens.applyEdits(change.range, eolCount, firstLineLength);
|
||||
} catch (err) {
|
||||
// emergency recovery => reset tokens
|
||||
this._tokens = new ModelLinesTokens(this._tokens.languageIdentifier, this._tokens.tokenizationSupport);
|
||||
}
|
||||
const [eolCount, firstLineLength] = countEOL(change.text);
|
||||
this._tokens.applyEdits(change.range, eolCount, firstLineLength);
|
||||
this._onDidChangeDecorations.fire();
|
||||
this._decorationsTree.acceptReplace(change.rangeOffset, change.rangeLength, change.text.length, change.forceMoveMarkers);
|
||||
|
||||
@@ -1407,10 +1340,6 @@ export class TextModel extends Disposable implements model.ITextModel {
|
||||
);
|
||||
}
|
||||
|
||||
if (this._tokens.hasLinesToTokenize(this._buffer)) {
|
||||
this._beginBackgroundTokenization();
|
||||
}
|
||||
|
||||
return result.reverseEdits;
|
||||
}
|
||||
|
||||
@@ -1775,93 +1704,60 @@ export class TextModel extends Disposable implements model.ITextModel {
|
||||
|
||||
//#region Tokenization
|
||||
|
||||
public setLineTokens(lineNumber: number, tokens: Uint32Array): void {
|
||||
if (lineNumber < 1 || lineNumber > this.getLineCount()) {
|
||||
throw new Error('Illegal value for lineNumber');
|
||||
}
|
||||
|
||||
this._tokens.setTokens(this._languageIdentifier.id, lineNumber - 1, this._buffer.getLineLength(lineNumber), tokens);
|
||||
}
|
||||
|
||||
public setTokens(tokens: MultilineTokens[]): void {
|
||||
if (tokens.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
let ranges: { fromLineNumber: number; toLineNumber: number; }[] = [];
|
||||
|
||||
for (let i = 0, len = tokens.length; i < len; i++) {
|
||||
const element = tokens[i];
|
||||
ranges.push({ fromLineNumber: element.startLineNumber, toLineNumber: element.startLineNumber + element.tokens.length - 1 });
|
||||
for (let j = 0, lenJ = element.tokens.length; j < lenJ; j++) {
|
||||
this.setLineTokens(element.startLineNumber + j, element.tokens[j]);
|
||||
}
|
||||
}
|
||||
|
||||
this._emitModelTokensChangedEvent({
|
||||
tokenizationSupportChanged: false,
|
||||
ranges: ranges
|
||||
});
|
||||
}
|
||||
|
||||
public tokenizeViewport(startLineNumber: number, endLineNumber: number): void {
|
||||
if (!this._tokens.tokenizationSupport) {
|
||||
// nothing to do
|
||||
return;
|
||||
}
|
||||
|
||||
startLineNumber = Math.max(1, startLineNumber);
|
||||
endLineNumber = Math.min(this.getLineCount(), endLineNumber);
|
||||
endLineNumber = Math.min(this._buffer.getLineCount(), endLineNumber);
|
||||
this._tokenization.tokenizeViewport(startLineNumber, endLineNumber);
|
||||
}
|
||||
|
||||
if (endLineNumber <= this._tokens.inValidLineStartIndex) {
|
||||
// nothing to do
|
||||
return;
|
||||
}
|
||||
public clearTokens(): void {
|
||||
this._tokens.flush();
|
||||
this._emitModelTokensChangedEvent({
|
||||
tokenizationSupportChanged: true,
|
||||
ranges: [{
|
||||
fromLineNumber: 1,
|
||||
toLineNumber: this._buffer.getLineCount()
|
||||
}]
|
||||
});
|
||||
}
|
||||
|
||||
if (startLineNumber <= this._tokens.inValidLineStartIndex) {
|
||||
// tokenization has reached the viewport start...
|
||||
this.forceTokenization(endLineNumber);
|
||||
return;
|
||||
}
|
||||
|
||||
let nonWhitespaceColumn = this.getLineFirstNonWhitespaceColumn(startLineNumber);
|
||||
let fakeLines: string[] = [];
|
||||
let initialState: IState | null = null;
|
||||
for (let i = startLineNumber - 1; nonWhitespaceColumn > 0 && i >= 1; i--) {
|
||||
let newNonWhitespaceIndex = this.getLineFirstNonWhitespaceColumn(i);
|
||||
|
||||
if (newNonWhitespaceIndex === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (newNonWhitespaceIndex < nonWhitespaceColumn) {
|
||||
initialState = this._tokens._getState(i - 1);
|
||||
if (initialState) {
|
||||
break;
|
||||
}
|
||||
fakeLines.push(this.getLineContent(i));
|
||||
nonWhitespaceColumn = newNonWhitespaceIndex;
|
||||
}
|
||||
}
|
||||
|
||||
if (!initialState) {
|
||||
initialState = this._tokens.tokenizationSupport.getInitialState();
|
||||
}
|
||||
|
||||
let state = initialState.clone();
|
||||
for (let i = fakeLines.length - 1; i >= 0; i--) {
|
||||
let r = this._tokens._tokenizeText(this._buffer, fakeLines[i], state);
|
||||
if (r) {
|
||||
state = r.endState.clone();
|
||||
} else {
|
||||
state = initialState.clone();
|
||||
}
|
||||
}
|
||||
|
||||
const eventBuilder = new ModelTokensChangedEventBuilder();
|
||||
for (let i = startLineNumber; i <= endLineNumber; i++) {
|
||||
let text = this.getLineContent(i);
|
||||
let r = this._tokens._tokenizeText(this._buffer, text, state);
|
||||
if (r) {
|
||||
this._tokens._setTokens(this._tokens.languageIdentifier.id, i - 1, text.length, r.tokens);
|
||||
|
||||
// We cannot trust these states/tokens to be valid!
|
||||
// (see https://github.com/Microsoft/vscode/issues/67607)
|
||||
this._tokens._setIsInvalid(i - 1, true);
|
||||
this._tokens._setState(i - 1, state);
|
||||
state = r.endState.clone();
|
||||
eventBuilder.registerChangedTokens(i);
|
||||
} else {
|
||||
state = initialState.clone();
|
||||
}
|
||||
}
|
||||
|
||||
const e = eventBuilder.build();
|
||||
if (e) {
|
||||
private _emitModelTokensChangedEvent(e: IModelTokensChangedEvent): void {
|
||||
if (!this._isDisposing) {
|
||||
this._onDidChangeTokens.fire(e);
|
||||
}
|
||||
}
|
||||
|
||||
public flushTokens(): void {
|
||||
this._resetTokenizationState();
|
||||
this.emitModelTokensChangedEvent({
|
||||
tokenizationSupportChanged: false,
|
||||
ranges: [{
|
||||
fromLineNumber: 1,
|
||||
toLineNumber: this.getLineCount()
|
||||
}]
|
||||
});
|
||||
public resetTokenization(): void {
|
||||
this._tokenization.reset();
|
||||
}
|
||||
|
||||
public forceTokenization(lineNumber: number): void {
|
||||
@@ -1869,30 +1765,11 @@ export class TextModel extends Disposable implements model.ITextModel {
|
||||
throw new Error('Illegal value for lineNumber');
|
||||
}
|
||||
|
||||
const eventBuilder = new ModelTokensChangedEventBuilder();
|
||||
|
||||
this._tokens._updateTokensUntilLine(this._buffer, eventBuilder, lineNumber);
|
||||
|
||||
const e = eventBuilder.build();
|
||||
if (e) {
|
||||
this._onDidChangeTokens.fire(e);
|
||||
}
|
||||
this._tokenization.forceTokenization(lineNumber);
|
||||
}
|
||||
|
||||
public isCheapToTokenize(lineNumber: number): boolean {
|
||||
if (!this._tokens.isCheapToTokenize(lineNumber)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (lineNumber < this._tokens.inValidLineStartIndex + 1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (this.getLineLength(lineNumber) < CHEAP_TOKENIZATION_LENGTH_LIMIT) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
return this._tokenization.isCheapToTokenize(lineNumber);
|
||||
}
|
||||
|
||||
public tokenizeIfCheap(lineNumber: number): void {
|
||||
@@ -1910,7 +1787,7 @@ export class TextModel extends Disposable implements model.ITextModel {
|
||||
}
|
||||
|
||||
private _getLineTokens(lineNumber: number): LineTokens {
|
||||
const lineText = this._buffer.getLineContent(lineNumber);
|
||||
const lineText = this.getLineContent(lineNumber);
|
||||
return this._tokens.getTokens(this._languageIdentifier.id, lineNumber - 1, lineText);
|
||||
}
|
||||
|
||||
@@ -1935,81 +1812,14 @@ export class TextModel extends Disposable implements model.ITextModel {
|
||||
|
||||
this._languageIdentifier = languageIdentifier;
|
||||
|
||||
// Cancel tokenization, clear all tokens and begin tokenizing
|
||||
this._resetTokenizationState();
|
||||
|
||||
this.emitModelTokensChangedEvent({
|
||||
tokenizationSupportChanged: true,
|
||||
ranges: [{
|
||||
fromLineNumber: 1,
|
||||
toLineNumber: this.getLineCount()
|
||||
}]
|
||||
});
|
||||
this._onDidChangeLanguage.fire(e);
|
||||
this._onDidChangeLanguageConfiguration.fire({});
|
||||
}
|
||||
|
||||
public getLanguageIdAtPosition(_lineNumber: number, _column: number): LanguageId {
|
||||
if (!this._tokens.tokenizationSupport) {
|
||||
return this._languageIdentifier.id;
|
||||
}
|
||||
let { lineNumber, column } = this.validatePosition({ lineNumber: _lineNumber, column: _column });
|
||||
|
||||
let lineTokens = this._getLineTokens(lineNumber);
|
||||
return lineTokens.getLanguageId(lineTokens.findTokenIndexAtOffset(column - 1));
|
||||
}
|
||||
|
||||
private _beginBackgroundTokenization(): void {
|
||||
if (this._shouldAutoTokenize() && this._revalidateTokensTimeout === -1) {
|
||||
this._revalidateTokensTimeout = setTimeout(() => {
|
||||
this._revalidateTokensTimeout = -1;
|
||||
this._revalidateTokensNow();
|
||||
}, 0);
|
||||
}
|
||||
}
|
||||
|
||||
_warmUpTokens(): void {
|
||||
// Warm up first 100 lines (if it takes less than 50ms)
|
||||
const maxLineNumber = Math.min(100, this.getLineCount());
|
||||
this._revalidateTokensNow(maxLineNumber);
|
||||
|
||||
if (this._tokens.hasLinesToTokenize(this._buffer)) {
|
||||
this._beginBackgroundTokenization();
|
||||
}
|
||||
}
|
||||
|
||||
private _revalidateTokensNow(toLineNumber: number = this._buffer.getLineCount()): void {
|
||||
const MAX_ALLOWED_TIME = 20;
|
||||
const eventBuilder = new ModelTokensChangedEventBuilder();
|
||||
const sw = StopWatch.create(false);
|
||||
|
||||
while (this._tokens.hasLinesToTokenize(this._buffer)) {
|
||||
if (sw.elapsed() > MAX_ALLOWED_TIME) {
|
||||
// Stop if MAX_ALLOWED_TIME is reached
|
||||
break;
|
||||
}
|
||||
|
||||
const tokenizedLineNumber = this._tokens._tokenizeOneLine(this._buffer, eventBuilder);
|
||||
|
||||
if (tokenizedLineNumber >= toLineNumber) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (this._tokens.hasLinesToTokenize(this._buffer)) {
|
||||
this._beginBackgroundTokenization();
|
||||
}
|
||||
|
||||
const e = eventBuilder.build();
|
||||
if (e) {
|
||||
this._onDidChangeTokens.fire(e);
|
||||
}
|
||||
}
|
||||
|
||||
private emitModelTokensChangedEvent(e: IModelTokensChangedEvent): void {
|
||||
if (!this._isDisposing) {
|
||||
this._onDidChangeTokens.fire(e);
|
||||
}
|
||||
public getLanguageIdAtPosition(lineNumber: number, column: number): LanguageId {
|
||||
const position = this.validatePosition(new Position(lineNumber, column));
|
||||
const lineTokens = this.getLineTokens(position.lineNumber);
|
||||
return lineTokens.getLanguageId(lineTokens.findTokenIndexAtOffset(position.column - 1));
|
||||
}
|
||||
|
||||
// Having tokens allows implementing additional helper methods
|
||||
@@ -2823,17 +2633,25 @@ function cleanClassName(className: string): string {
|
||||
return className.replace(/[^a-z0-9\-_]/gi, ' ');
|
||||
}
|
||||
|
||||
export class ModelDecorationOverviewRulerOptions implements model.IModelDecorationOverviewRulerOptions {
|
||||
class DecorationOptions implements model.IDecorationOptions {
|
||||
readonly color: string | ThemeColor;
|
||||
readonly darkColor: string | ThemeColor;
|
||||
|
||||
constructor(options: model.IDecorationOptions) {
|
||||
this.color = options.color || strings.empty;
|
||||
this.darkColor = options.darkColor || strings.empty;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
export class ModelDecorationOverviewRulerOptions extends DecorationOptions {
|
||||
readonly position: model.OverviewRulerLane;
|
||||
private _resolvedColor: string | null;
|
||||
|
||||
constructor(options: model.IModelDecorationOverviewRulerOptions) {
|
||||
this.color = options.color || strings.empty;
|
||||
this.darkColor = options.darkColor || strings.empty;
|
||||
this.position = (typeof options.position === 'number' ? options.position : model.OverviewRulerLane.Center);
|
||||
super(options);
|
||||
this._resolvedColor = null;
|
||||
this.position = (typeof options.position === 'number' ? options.position : model.OverviewRulerLane.Center);
|
||||
}
|
||||
|
||||
public getColor(theme: ITheme): string {
|
||||
@@ -2863,6 +2681,36 @@ export class ModelDecorationOverviewRulerOptions implements model.IModelDecorati
|
||||
}
|
||||
}
|
||||
|
||||
export class ModelDecorationMinimapOptions extends DecorationOptions {
|
||||
readonly position: model.MinimapPosition;
|
||||
private _resolvedColor: Color | undefined;
|
||||
|
||||
|
||||
constructor(options: model.IModelDecorationMinimapOptions) {
|
||||
super(options);
|
||||
this.position = options.position;
|
||||
}
|
||||
|
||||
public getColor(theme: ITheme): Color | undefined {
|
||||
if (!this._resolvedColor) {
|
||||
if (theme.type !== 'light' && this.darkColor) {
|
||||
this._resolvedColor = this._resolveColor(this.darkColor, theme);
|
||||
} else {
|
||||
this._resolvedColor = this._resolveColor(this.color, theme);
|
||||
}
|
||||
}
|
||||
|
||||
return this._resolvedColor;
|
||||
}
|
||||
|
||||
private _resolveColor(color: string | ThemeColor, theme: ITheme): Color | undefined {
|
||||
if (typeof color === 'string') {
|
||||
return Color.fromHex(color);
|
||||
}
|
||||
return theme.getColor(color.id);
|
||||
}
|
||||
}
|
||||
|
||||
export class ModelDecorationOptions implements model.IModelDecorationOptions {
|
||||
|
||||
public static EMPTY: ModelDecorationOptions;
|
||||
@@ -2884,6 +2732,7 @@ export class ModelDecorationOptions implements model.IModelDecorationOptions {
|
||||
readonly showIfCollapsed: boolean;
|
||||
readonly collapseOnReplaceEdit: boolean;
|
||||
readonly overviewRuler: ModelDecorationOverviewRulerOptions | null;
|
||||
readonly minimap: ModelDecorationMinimapOptions | null;
|
||||
readonly glyphMarginClassName: string | null;
|
||||
readonly linesDecorationsClassName: string | null;
|
||||
readonly marginClassName: string | null;
|
||||
@@ -2902,6 +2751,7 @@ export class ModelDecorationOptions implements model.IModelDecorationOptions {
|
||||
this.showIfCollapsed = options.showIfCollapsed || false;
|
||||
this.collapseOnReplaceEdit = options.collapseOnReplaceEdit || false;
|
||||
this.overviewRuler = options.overviewRuler ? new ModelDecorationOverviewRulerOptions(options.overviewRuler) : null;
|
||||
this.minimap = options.minimap ? new ModelDecorationMinimapOptions(options.minimap) : null;
|
||||
this.glyphMarginClassName = options.glyphMarginClassName ? cleanClassName(options.glyphMarginClassName) : null;
|
||||
this.linesDecorationsClassName = options.linesDecorationsClassName ? cleanClassName(options.linesDecorationsClassName) : null;
|
||||
this.marginClassName = options.marginClassName ? cleanClassName(options.marginClassName) : null;
|
||||
|
||||
@@ -7,501 +7,486 @@ import * as arrays from 'vs/base/common/arrays';
|
||||
import { onUnexpectedError } from 'vs/base/common/errors';
|
||||
import { LineTokens } from 'vs/editor/common/core/lineTokens';
|
||||
import { Position } from 'vs/editor/common/core/position';
|
||||
import { Range } from 'vs/editor/common/core/range';
|
||||
import { IRange } from 'vs/editor/common/core/range';
|
||||
import { TokenizationResult2 } from 'vs/editor/common/core/token';
|
||||
import { ITextBuffer } from 'vs/editor/common/model';
|
||||
import { IModelTokensChangedEvent } from 'vs/editor/common/model/textModelEvents';
|
||||
import { ColorId, FontStyle, IState, ITokenizationSupport, LanguageId, LanguageIdentifier, MetadataConsts, StandardTokenType, TokenMetadata } from 'vs/editor/common/modes';
|
||||
import { RawContentChangedType } from 'vs/editor/common/model/textModelEvents';
|
||||
import { IState, ITokenizationSupport, LanguageIdentifier, TokenizationRegistry } from 'vs/editor/common/modes';
|
||||
import { nullTokenize2 } from 'vs/editor/common/modes/nullMode';
|
||||
import { TextModel } from 'vs/editor/common/model/textModel';
|
||||
import { Disposable } from 'vs/base/common/lifecycle';
|
||||
import { StopWatch } from 'vs/base/common/stopwatch';
|
||||
import { CharCode } from 'vs/base/common/charCode';
|
||||
import { MultilineTokensBuilder } from 'vs/editor/common/model/tokensStore';
|
||||
|
||||
function getDefaultMetadata(topLevelLanguageId: LanguageId): number {
|
||||
return (
|
||||
(topLevelLanguageId << MetadataConsts.LANGUAGEID_OFFSET)
|
||||
| (StandardTokenType.Other << MetadataConsts.TOKEN_TYPE_OFFSET)
|
||||
| (FontStyle.None << MetadataConsts.FONT_STYLE_OFFSET)
|
||||
| (ColorId.DefaultForeground << MetadataConsts.FOREGROUND_OFFSET)
|
||||
| (ColorId.DefaultBackground << MetadataConsts.BACKGROUND_OFFSET)
|
||||
) >>> 0;
|
||||
export function countEOL(text: string): [number, number] {
|
||||
let eolCount = 0;
|
||||
let firstLineLength = 0;
|
||||
for (let i = 0, len = text.length; i < len; i++) {
|
||||
const chr = text.charCodeAt(i);
|
||||
|
||||
if (chr === CharCode.CarriageReturn) {
|
||||
if (eolCount === 0) {
|
||||
firstLineLength = i;
|
||||
}
|
||||
eolCount++;
|
||||
if (i + 1 < len && text.charCodeAt(i + 1) === CharCode.LineFeed) {
|
||||
// \r\n... case
|
||||
i++; // skip \n
|
||||
} else {
|
||||
// \r... case
|
||||
}
|
||||
} else if (chr === CharCode.LineFeed) {
|
||||
if (eolCount === 0) {
|
||||
firstLineLength = i;
|
||||
}
|
||||
eolCount++;
|
||||
}
|
||||
}
|
||||
if (eolCount === 0) {
|
||||
firstLineLength = text.length;
|
||||
}
|
||||
return [eolCount, firstLineLength];
|
||||
}
|
||||
|
||||
const EMPTY_LINE_TOKENS = (new Uint32Array(0)).buffer;
|
||||
|
||||
class ModelLineTokens {
|
||||
_state: IState | null;
|
||||
_lineTokens: ArrayBuffer | null;
|
||||
_invalid: boolean;
|
||||
|
||||
constructor(state: IState | null) {
|
||||
this._state = state;
|
||||
this._lineTokens = null;
|
||||
this._invalid = true;
|
||||
}
|
||||
|
||||
public deleteBeginning(toChIndex: number): void {
|
||||
if (this._lineTokens === null || this._lineTokens === EMPTY_LINE_TOKENS) {
|
||||
return;
|
||||
}
|
||||
this.delete(0, toChIndex);
|
||||
}
|
||||
|
||||
public deleteEnding(fromChIndex: number): void {
|
||||
if (this._lineTokens === null || this._lineTokens === EMPTY_LINE_TOKENS) {
|
||||
return;
|
||||
}
|
||||
|
||||
const tokens = new Uint32Array(this._lineTokens);
|
||||
const lineTextLength = tokens[tokens.length - 2];
|
||||
this.delete(fromChIndex, lineTextLength);
|
||||
}
|
||||
|
||||
public delete(fromChIndex: number, toChIndex: number): void {
|
||||
if (this._lineTokens === null || this._lineTokens === EMPTY_LINE_TOKENS || fromChIndex === toChIndex) {
|
||||
return;
|
||||
}
|
||||
|
||||
const tokens = new Uint32Array(this._lineTokens);
|
||||
const tokensCount = (tokens.length >>> 1);
|
||||
|
||||
// special case: deleting everything
|
||||
if (fromChIndex === 0 && tokens[tokens.length - 2] === toChIndex) {
|
||||
this._lineTokens = EMPTY_LINE_TOKENS;
|
||||
return;
|
||||
}
|
||||
|
||||
const fromTokenIndex = LineTokens.findIndexInTokensArray(tokens, fromChIndex);
|
||||
const fromTokenStartOffset = (fromTokenIndex > 0 ? tokens[(fromTokenIndex - 1) << 1] : 0);
|
||||
const fromTokenEndOffset = tokens[fromTokenIndex << 1];
|
||||
|
||||
if (toChIndex < fromTokenEndOffset) {
|
||||
// the delete range is inside a single token
|
||||
const delta = (toChIndex - fromChIndex);
|
||||
for (let i = fromTokenIndex; i < tokensCount; i++) {
|
||||
tokens[i << 1] -= delta;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
let dest: number;
|
||||
let lastEnd: number;
|
||||
if (fromTokenStartOffset !== fromChIndex) {
|
||||
tokens[fromTokenIndex << 1] = fromChIndex;
|
||||
dest = ((fromTokenIndex + 1) << 1);
|
||||
lastEnd = fromChIndex;
|
||||
} else {
|
||||
dest = (fromTokenIndex << 1);
|
||||
lastEnd = fromTokenStartOffset;
|
||||
}
|
||||
|
||||
const delta = (toChIndex - fromChIndex);
|
||||
for (let tokenIndex = fromTokenIndex + 1; tokenIndex < tokensCount; tokenIndex++) {
|
||||
const tokenEndOffset = tokens[tokenIndex << 1] - delta;
|
||||
if (tokenEndOffset > lastEnd) {
|
||||
tokens[dest++] = tokenEndOffset;
|
||||
tokens[dest++] = tokens[(tokenIndex << 1) + 1];
|
||||
lastEnd = tokenEndOffset;
|
||||
}
|
||||
}
|
||||
|
||||
if (dest === tokens.length) {
|
||||
// nothing to trim
|
||||
return;
|
||||
}
|
||||
|
||||
let tmp = new Uint32Array(dest);
|
||||
tmp.set(tokens.subarray(0, dest), 0);
|
||||
this._lineTokens = tmp.buffer;
|
||||
}
|
||||
|
||||
public append(_otherTokens: ArrayBuffer | null): void {
|
||||
if (_otherTokens === EMPTY_LINE_TOKENS) {
|
||||
return;
|
||||
}
|
||||
if (this._lineTokens === EMPTY_LINE_TOKENS) {
|
||||
this._lineTokens = _otherTokens;
|
||||
return;
|
||||
}
|
||||
if (this._lineTokens === null) {
|
||||
return;
|
||||
}
|
||||
if (_otherTokens === null) {
|
||||
// cannot determine combined line length...
|
||||
this._lineTokens = null;
|
||||
return;
|
||||
}
|
||||
const myTokens = new Uint32Array(this._lineTokens);
|
||||
const otherTokens = new Uint32Array(_otherTokens);
|
||||
const otherTokensCount = (otherTokens.length >>> 1);
|
||||
|
||||
let result = new Uint32Array(myTokens.length + otherTokens.length);
|
||||
result.set(myTokens, 0);
|
||||
let dest = myTokens.length;
|
||||
const delta = myTokens[myTokens.length - 2];
|
||||
for (let i = 0; i < otherTokensCount; i++) {
|
||||
result[dest++] = otherTokens[(i << 1)] + delta;
|
||||
result[dest++] = otherTokens[(i << 1) + 1];
|
||||
}
|
||||
this._lineTokens = result.buffer;
|
||||
}
|
||||
|
||||
public insert(chIndex: number, textLength: number): void {
|
||||
if (!this._lineTokens) {
|
||||
// nothing to do
|
||||
return;
|
||||
}
|
||||
|
||||
const tokens = new Uint32Array(this._lineTokens);
|
||||
const tokensCount = (tokens.length >>> 1);
|
||||
|
||||
let fromTokenIndex = LineTokens.findIndexInTokensArray(tokens, chIndex);
|
||||
if (fromTokenIndex > 0) {
|
||||
const fromTokenStartOffset = tokens[(fromTokenIndex - 1) << 1];
|
||||
if (fromTokenStartOffset === chIndex) {
|
||||
fromTokenIndex--;
|
||||
}
|
||||
}
|
||||
for (let tokenIndex = fromTokenIndex; tokenIndex < tokensCount; tokenIndex++) {
|
||||
tokens[tokenIndex << 1] += textLength;
|
||||
}
|
||||
}
|
||||
const enum Constants {
|
||||
CHEAP_TOKENIZATION_LENGTH_LIMIT = 2048
|
||||
}
|
||||
|
||||
export class ModelLinesTokens {
|
||||
|
||||
public readonly languageIdentifier: LanguageIdentifier;
|
||||
public readonly tokenizationSupport: ITokenizationSupport | null;
|
||||
private _tokens: ModelLineTokens[];
|
||||
export class TokenizationStateStore {
|
||||
private _beginState: (IState | null)[];
|
||||
private _valid: boolean[];
|
||||
private _len: number;
|
||||
private _invalidLineStartIndex: number;
|
||||
private _lastState: IState | null;
|
||||
|
||||
constructor(languageIdentifier: LanguageIdentifier, tokenizationSupport: ITokenizationSupport | null) {
|
||||
this.languageIdentifier = languageIdentifier;
|
||||
this.tokenizationSupport = tokenizationSupport;
|
||||
this._tokens = [];
|
||||
if (this.tokenizationSupport) {
|
||||
let initialState: IState | null = null;
|
||||
try {
|
||||
initialState = this.tokenizationSupport.getInitialState();
|
||||
} catch (e) {
|
||||
onUnexpectedError(e);
|
||||
this.tokenizationSupport = null;
|
||||
}
|
||||
|
||||
if (initialState) {
|
||||
this._tokens[0] = new ModelLineTokens(initialState);
|
||||
}
|
||||
}
|
||||
|
||||
this._invalidLineStartIndex = 0;
|
||||
this._lastState = null;
|
||||
constructor() {
|
||||
this._reset(null);
|
||||
}
|
||||
|
||||
public get inValidLineStartIndex() {
|
||||
private _reset(initialState: IState | null): void {
|
||||
this._beginState = [];
|
||||
this._valid = [];
|
||||
this._len = 0;
|
||||
this._invalidLineStartIndex = 0;
|
||||
|
||||
if (initialState) {
|
||||
this._setBeginState(0, initialState);
|
||||
}
|
||||
}
|
||||
|
||||
public flush(initialState: IState | null): void {
|
||||
this._reset(initialState);
|
||||
}
|
||||
|
||||
public get invalidLineStartIndex() {
|
||||
return this._invalidLineStartIndex;
|
||||
}
|
||||
|
||||
public getTokens(topLevelLanguageId: LanguageId, lineIndex: number, lineText: string): LineTokens {
|
||||
let rawLineTokens: ArrayBuffer | null = null;
|
||||
if (lineIndex < this._tokens.length && this._tokens[lineIndex]) {
|
||||
rawLineTokens = this._tokens[lineIndex]._lineTokens;
|
||||
private _invalidateLine(lineIndex: number): void {
|
||||
if (lineIndex < this._len) {
|
||||
this._valid[lineIndex] = false;
|
||||
}
|
||||
|
||||
if (rawLineTokens !== null && rawLineTokens !== EMPTY_LINE_TOKENS) {
|
||||
return new LineTokens(new Uint32Array(rawLineTokens), lineText);
|
||||
}
|
||||
|
||||
let lineTokens = new Uint32Array(2);
|
||||
lineTokens[0] = lineText.length;
|
||||
lineTokens[1] = getDefaultMetadata(topLevelLanguageId);
|
||||
return new LineTokens(lineTokens, lineText);
|
||||
}
|
||||
|
||||
public isCheapToTokenize(lineNumber: number): boolean {
|
||||
const firstInvalidLineNumber = this._invalidLineStartIndex + 1;
|
||||
return (firstInvalidLineNumber >= lineNumber);
|
||||
}
|
||||
|
||||
public hasLinesToTokenize(buffer: ITextBuffer): boolean {
|
||||
return (this._invalidLineStartIndex < buffer.getLineCount());
|
||||
}
|
||||
|
||||
public invalidateLine(lineIndex: number): void {
|
||||
this._setIsInvalid(lineIndex, true);
|
||||
if (lineIndex < this._invalidLineStartIndex) {
|
||||
this._setIsInvalid(this._invalidLineStartIndex, true);
|
||||
this._invalidLineStartIndex = lineIndex;
|
||||
}
|
||||
}
|
||||
|
||||
_setIsInvalid(lineIndex: number, invalid: boolean): void {
|
||||
if (lineIndex < this._tokens.length && this._tokens[lineIndex]) {
|
||||
this._tokens[lineIndex]._invalid = invalid;
|
||||
private _isValid(lineIndex: number): boolean {
|
||||
if (lineIndex < this._len) {
|
||||
return this._valid[lineIndex];
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
_isInvalid(lineIndex: number): boolean {
|
||||
if (lineIndex < this._tokens.length && this._tokens[lineIndex]) {
|
||||
return this._tokens[lineIndex]._invalid;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
_getState(lineIndex: number): IState | null {
|
||||
if (lineIndex < this._tokens.length && this._tokens[lineIndex]) {
|
||||
return this._tokens[lineIndex]._state;
|
||||
public getBeginState(lineIndex: number): IState | null {
|
||||
if (lineIndex < this._len) {
|
||||
return this._beginState[lineIndex];
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
_setTokens(topLevelLanguageId: LanguageId, lineIndex: number, lineTextLength: number, tokens: Uint32Array): void {
|
||||
let target: ModelLineTokens;
|
||||
if (lineIndex < this._tokens.length && this._tokens[lineIndex]) {
|
||||
target = this._tokens[lineIndex];
|
||||
} else {
|
||||
target = new ModelLineTokens(null);
|
||||
this._tokens[lineIndex] = target;
|
||||
private _ensureLine(lineIndex: number): void {
|
||||
while (lineIndex >= this._len) {
|
||||
this._beginState[this._len] = null;
|
||||
this._valid[this._len] = false;
|
||||
this._len++;
|
||||
}
|
||||
|
||||
if (lineTextLength === 0) {
|
||||
let hasDifferentLanguageId = false;
|
||||
if (tokens && tokens.length > 1) {
|
||||
hasDifferentLanguageId = (TokenMetadata.getLanguageId(tokens[1]) !== topLevelLanguageId);
|
||||
}
|
||||
|
||||
if (!hasDifferentLanguageId) {
|
||||
target._lineTokens = EMPTY_LINE_TOKENS;
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if (!tokens || tokens.length === 0) {
|
||||
tokens = new Uint32Array(2);
|
||||
tokens[0] = 0;
|
||||
tokens[1] = getDefaultMetadata(topLevelLanguageId);
|
||||
}
|
||||
|
||||
LineTokens.convertToEndOffset(tokens, lineTextLength);
|
||||
|
||||
target._lineTokens = tokens.buffer;
|
||||
}
|
||||
|
||||
_setState(lineIndex: number, state: IState): void {
|
||||
if (lineIndex < this._tokens.length && this._tokens[lineIndex]) {
|
||||
this._tokens[lineIndex]._state = state;
|
||||
} else {
|
||||
const tmp = new ModelLineTokens(state);
|
||||
this._tokens[lineIndex] = tmp;
|
||||
private _deleteLines(start: number, deleteCount: number): void {
|
||||
if (deleteCount === 0) {
|
||||
return;
|
||||
}
|
||||
this._beginState.splice(start, deleteCount);
|
||||
this._valid.splice(start, deleteCount);
|
||||
this._len -= deleteCount;
|
||||
}
|
||||
|
||||
private _insertLines(insertIndex: number, insertCount: number): void {
|
||||
if (insertCount === 0) {
|
||||
return;
|
||||
}
|
||||
let beginState: (IState | null)[] = [];
|
||||
let valid: boolean[] = [];
|
||||
for (let i = 0; i < insertCount; i++) {
|
||||
beginState[i] = null;
|
||||
valid[i] = false;
|
||||
}
|
||||
this._beginState = arrays.arrayInsert(this._beginState, insertIndex, beginState);
|
||||
this._valid = arrays.arrayInsert(this._valid, insertIndex, valid);
|
||||
this._len += insertCount;
|
||||
}
|
||||
|
||||
private _setValid(lineIndex: number, valid: boolean): void {
|
||||
this._ensureLine(lineIndex);
|
||||
this._valid[lineIndex] = valid;
|
||||
}
|
||||
|
||||
private _setBeginState(lineIndex: number, beginState: IState | null): void {
|
||||
this._ensureLine(lineIndex);
|
||||
this._beginState[lineIndex] = beginState;
|
||||
}
|
||||
|
||||
public setEndState(linesLength: number, lineIndex: number, endState: IState): void {
|
||||
this._setValid(lineIndex, true);
|
||||
this._invalidLineStartIndex = lineIndex + 1;
|
||||
|
||||
// Check if this was the last line
|
||||
if (lineIndex === linesLength - 1) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if the end state has changed
|
||||
const previousEndState = this.getBeginState(lineIndex + 1);
|
||||
if (previousEndState === null || !endState.equals(previousEndState)) {
|
||||
this._setBeginState(lineIndex + 1, endState);
|
||||
this._invalidateLine(lineIndex + 1);
|
||||
return;
|
||||
}
|
||||
|
||||
// Perhaps we can skip tokenizing some lines...
|
||||
let i = lineIndex + 1;
|
||||
while (i < linesLength) {
|
||||
if (!this._isValid(i)) {
|
||||
break;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
this._invalidLineStartIndex = i;
|
||||
}
|
||||
|
||||
public setFakeTokens(lineIndex: number): void {
|
||||
this._setValid(lineIndex, false);
|
||||
}
|
||||
|
||||
//#region Editing
|
||||
|
||||
public applyEdits(range: Range, eolCount: number, firstLineLength: number): void {
|
||||
|
||||
public applyEdits(range: IRange, eolCount: number): void {
|
||||
const deletingLinesCnt = range.endLineNumber - range.startLineNumber;
|
||||
const insertingLinesCnt = eolCount;
|
||||
const editingLinesCnt = Math.min(deletingLinesCnt, insertingLinesCnt);
|
||||
|
||||
for (let j = editingLinesCnt; j >= 0; j--) {
|
||||
this.invalidateLine(range.startLineNumber + j - 1);
|
||||
this._invalidateLine(range.startLineNumber + j - 1);
|
||||
}
|
||||
|
||||
this._acceptDeleteRange(range);
|
||||
this._acceptInsertText(new Position(range.startLineNumber, range.startColumn), eolCount, firstLineLength);
|
||||
this._acceptInsertText(new Position(range.startLineNumber, range.startColumn), eolCount);
|
||||
}
|
||||
|
||||
private _acceptDeleteRange(range: Range): void {
|
||||
private _acceptDeleteRange(range: IRange): void {
|
||||
|
||||
const firstLineIndex = range.startLineNumber - 1;
|
||||
if (firstLineIndex >= this._tokens.length) {
|
||||
if (firstLineIndex >= this._len) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (range.startLineNumber === range.endLineNumber) {
|
||||
if (range.startColumn === range.endColumn) {
|
||||
// Nothing to delete
|
||||
return;
|
||||
}
|
||||
|
||||
this._tokens[firstLineIndex].delete(range.startColumn - 1, range.endColumn - 1);
|
||||
return;
|
||||
}
|
||||
|
||||
const firstLine = this._tokens[firstLineIndex];
|
||||
firstLine.deleteEnding(range.startColumn - 1);
|
||||
|
||||
const lastLineIndex = range.endLineNumber - 1;
|
||||
let lastLineTokens: ArrayBuffer | null = null;
|
||||
if (lastLineIndex < this._tokens.length) {
|
||||
const lastLine = this._tokens[lastLineIndex];
|
||||
lastLine.deleteBeginning(range.endColumn - 1);
|
||||
lastLineTokens = lastLine._lineTokens;
|
||||
}
|
||||
|
||||
// Take remaining text on last line and append it to remaining text on first line
|
||||
firstLine.append(lastLineTokens);
|
||||
|
||||
// Delete middle lines
|
||||
this._tokens.splice(range.startLineNumber, range.endLineNumber - range.startLineNumber);
|
||||
this._deleteLines(range.startLineNumber, range.endLineNumber - range.startLineNumber);
|
||||
}
|
||||
|
||||
private _acceptInsertText(position: Position, eolCount: number, firstLineLength: number): void {
|
||||
|
||||
if (eolCount === 0 && firstLineLength === 0) {
|
||||
// Nothing to insert
|
||||
return;
|
||||
}
|
||||
private _acceptInsertText(position: Position, eolCount: number): void {
|
||||
|
||||
const lineIndex = position.lineNumber - 1;
|
||||
if (lineIndex >= this._tokens.length) {
|
||||
if (lineIndex >= this._len) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (eolCount === 0) {
|
||||
// Inserting text on one line
|
||||
this._tokens[lineIndex].insert(position.column - 1, firstLineLength);
|
||||
return;
|
||||
}
|
||||
|
||||
const line = this._tokens[lineIndex];
|
||||
line.deleteEnding(position.column - 1);
|
||||
line.insert(position.column - 1, firstLineLength);
|
||||
|
||||
let insert: ModelLineTokens[] = new Array<ModelLineTokens>(eolCount);
|
||||
for (let i = eolCount - 1; i >= 0; i--) {
|
||||
insert[i] = new ModelLineTokens(null);
|
||||
}
|
||||
this._tokens = arrays.arrayInsert(this._tokens, position.lineNumber, insert);
|
||||
this._insertLines(position.lineNumber, eolCount);
|
||||
}
|
||||
|
||||
//#endregion
|
||||
}
|
||||
|
||||
//#region Tokenization
|
||||
export class TextModelTokenization extends Disposable {
|
||||
|
||||
public _tokenizeOneLine(buffer: ITextBuffer, eventBuilder: ModelTokensChangedEventBuilder): number {
|
||||
if (!this.hasLinesToTokenize(buffer)) {
|
||||
return buffer.getLineCount() + 1;
|
||||
private readonly _textModel: TextModel;
|
||||
private readonly _tokenizationStateStore: TokenizationStateStore;
|
||||
private _revalidateTokensTimeout: any;
|
||||
private _tokenizationSupport: ITokenizationSupport | null;
|
||||
|
||||
constructor(textModel: TextModel) {
|
||||
super();
|
||||
this._textModel = textModel;
|
||||
this._tokenizationStateStore = new TokenizationStateStore();
|
||||
this._revalidateTokensTimeout = -1;
|
||||
this._tokenizationSupport = null;
|
||||
|
||||
this._register(TokenizationRegistry.onDidChange((e) => {
|
||||
const languageIdentifier = this._textModel.getLanguageIdentifier();
|
||||
if (e.changedLanguages.indexOf(languageIdentifier.language) === -1) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._resetTokenizationState();
|
||||
this._textModel.clearTokens();
|
||||
}));
|
||||
|
||||
this._register(this._textModel.onDidChangeRawContentFast((e) => {
|
||||
if (e.containsEvent(RawContentChangedType.Flush)) {
|
||||
this._resetTokenizationState();
|
||||
return;
|
||||
}
|
||||
}));
|
||||
|
||||
this._register(this._textModel.onDidChangeContentFast((e) => {
|
||||
for (let i = 0, len = e.changes.length; i < len; i++) {
|
||||
const change = e.changes[i];
|
||||
const [eolCount] = countEOL(change.text);
|
||||
this._tokenizationStateStore.applyEdits(change.range, eolCount);
|
||||
}
|
||||
|
||||
this._beginBackgroundTokenization();
|
||||
}));
|
||||
|
||||
this._register(this._textModel.onDidChangeAttached(() => {
|
||||
this._beginBackgroundTokenization();
|
||||
}));
|
||||
|
||||
this._register(this._textModel.onDidChangeLanguage(() => {
|
||||
this._resetTokenizationState();
|
||||
this._textModel.clearTokens();
|
||||
}));
|
||||
|
||||
this._resetTokenizationState();
|
||||
}
|
||||
|
||||
public dispose(): void {
|
||||
this._clearTimers();
|
||||
super.dispose();
|
||||
}
|
||||
|
||||
private _clearTimers(): void {
|
||||
if (this._revalidateTokensTimeout !== -1) {
|
||||
clearTimeout(this._revalidateTokensTimeout);
|
||||
this._revalidateTokensTimeout = -1;
|
||||
}
|
||||
const lineNumber = this._invalidLineStartIndex + 1;
|
||||
this._updateTokensUntilLine(buffer, eventBuilder, lineNumber);
|
||||
}
|
||||
|
||||
private _resetTokenizationState(): void {
|
||||
this._clearTimers();
|
||||
const [tokenizationSupport, initialState] = initializeTokenization(this._textModel);
|
||||
this._tokenizationSupport = tokenizationSupport;
|
||||
this._tokenizationStateStore.flush(initialState);
|
||||
this._beginBackgroundTokenization();
|
||||
}
|
||||
|
||||
private _beginBackgroundTokenization(): void {
|
||||
if (this._textModel.isAttachedToEditor() && this._hasLinesToTokenize() && this._revalidateTokensTimeout === -1) {
|
||||
this._revalidateTokensTimeout = setTimeout(() => {
|
||||
this._revalidateTokensTimeout = -1;
|
||||
this._revalidateTokensNow();
|
||||
}, 0);
|
||||
}
|
||||
}
|
||||
|
||||
private _revalidateTokensNow(toLineNumber: number = this._textModel.getLineCount()): void {
|
||||
const MAX_ALLOWED_TIME = 20;
|
||||
const builder = new MultilineTokensBuilder();
|
||||
const sw = StopWatch.create(false);
|
||||
|
||||
while (this._hasLinesToTokenize()) {
|
||||
if (sw.elapsed() > MAX_ALLOWED_TIME) {
|
||||
// Stop if MAX_ALLOWED_TIME is reached
|
||||
break;
|
||||
}
|
||||
|
||||
const tokenizedLineNumber = this._tokenizeOneInvalidLine(builder);
|
||||
|
||||
if (tokenizedLineNumber >= toLineNumber) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
this._beginBackgroundTokenization();
|
||||
this._textModel.setTokens(builder.tokens);
|
||||
}
|
||||
|
||||
public tokenizeViewport(startLineNumber: number, endLineNumber: number): void {
|
||||
const builder = new MultilineTokensBuilder();
|
||||
this._tokenizeViewport(builder, startLineNumber, endLineNumber);
|
||||
this._textModel.setTokens(builder.tokens);
|
||||
}
|
||||
|
||||
public reset(): void {
|
||||
this._resetTokenizationState();
|
||||
this._textModel.clearTokens();
|
||||
}
|
||||
|
||||
public forceTokenization(lineNumber: number): void {
|
||||
const builder = new MultilineTokensBuilder();
|
||||
this._updateTokensUntilLine(builder, lineNumber);
|
||||
this._textModel.setTokens(builder.tokens);
|
||||
}
|
||||
|
||||
public isCheapToTokenize(lineNumber: number): boolean {
|
||||
if (!this._tokenizationSupport) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const firstInvalidLineNumber = this._tokenizationStateStore.invalidLineStartIndex + 1;
|
||||
if (lineNumber > firstInvalidLineNumber) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (lineNumber < firstInvalidLineNumber) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (this._textModel.getLineLength(lineNumber) < Constants.CHEAP_TOKENIZATION_LENGTH_LIMIT) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private _hasLinesToTokenize(): boolean {
|
||||
if (!this._tokenizationSupport) {
|
||||
return false;
|
||||
}
|
||||
return (this._tokenizationStateStore.invalidLineStartIndex < this._textModel.getLineCount());
|
||||
}
|
||||
|
||||
private _tokenizeOneInvalidLine(builder: MultilineTokensBuilder): number {
|
||||
if (!this._hasLinesToTokenize()) {
|
||||
return this._textModel.getLineCount() + 1;
|
||||
}
|
||||
const lineNumber = this._tokenizationStateStore.invalidLineStartIndex + 1;
|
||||
this._updateTokensUntilLine(builder, lineNumber);
|
||||
return lineNumber;
|
||||
}
|
||||
|
||||
public _tokenizeText(buffer: ITextBuffer, text: string, state: IState): TokenizationResult2 {
|
||||
let r: TokenizationResult2 | null = null;
|
||||
|
||||
if (this.tokenizationSupport) {
|
||||
try {
|
||||
r = this.tokenizationSupport.tokenize2(text, state, 0);
|
||||
} catch (e) {
|
||||
onUnexpectedError(e);
|
||||
}
|
||||
}
|
||||
|
||||
if (!r) {
|
||||
r = nullTokenize2(this.languageIdentifier.id, text, state, 0);
|
||||
}
|
||||
return r;
|
||||
}
|
||||
|
||||
public _updateTokensUntilLine(buffer: ITextBuffer, eventBuilder: ModelTokensChangedEventBuilder, lineNumber: number): void {
|
||||
if (!this.tokenizationSupport) {
|
||||
this._invalidLineStartIndex = buffer.getLineCount();
|
||||
private _updateTokensUntilLine(builder: MultilineTokensBuilder, lineNumber: number): void {
|
||||
if (!this._tokenizationSupport) {
|
||||
return;
|
||||
}
|
||||
|
||||
const linesLength = buffer.getLineCount();
|
||||
const languageIdentifier = this._textModel.getLanguageIdentifier();
|
||||
const linesLength = this._textModel.getLineCount();
|
||||
const endLineIndex = lineNumber - 1;
|
||||
|
||||
// Validate all states up to and including endLineIndex
|
||||
for (let lineIndex = this._invalidLineStartIndex; lineIndex <= endLineIndex; lineIndex++) {
|
||||
const endStateIndex = lineIndex + 1;
|
||||
const text = buffer.getLineContent(lineIndex + 1);
|
||||
const lineStartState = this._getState(lineIndex);
|
||||
for (let lineIndex = this._tokenizationStateStore.invalidLineStartIndex; lineIndex <= endLineIndex; lineIndex++) {
|
||||
const text = this._textModel.getLineContent(lineIndex + 1);
|
||||
const lineStartState = this._tokenizationStateStore.getBeginState(lineIndex);
|
||||
|
||||
let r: TokenizationResult2 | null = null;
|
||||
const r = safeTokenize(languageIdentifier, this._tokenizationSupport, text, lineStartState!);
|
||||
builder.add(lineIndex + 1, r.tokens);
|
||||
this._tokenizationStateStore.setEndState(linesLength, lineIndex, r.endState);
|
||||
lineIndex = this._tokenizationStateStore.invalidLineStartIndex - 1; // -1 because the outer loop increments it
|
||||
}
|
||||
}
|
||||
|
||||
try {
|
||||
// Tokenize only the first X characters
|
||||
let freshState = lineStartState!.clone();
|
||||
r = this.tokenizationSupport.tokenize2(text, freshState, 0);
|
||||
} catch (e) {
|
||||
onUnexpectedError(e);
|
||||
private _tokenizeViewport(builder: MultilineTokensBuilder, startLineNumber: number, endLineNumber: number): void {
|
||||
if (!this._tokenizationSupport) {
|
||||
// nothing to do
|
||||
return;
|
||||
}
|
||||
|
||||
if (endLineNumber <= this._tokenizationStateStore.invalidLineStartIndex) {
|
||||
// nothing to do
|
||||
return;
|
||||
}
|
||||
|
||||
if (startLineNumber <= this._tokenizationStateStore.invalidLineStartIndex) {
|
||||
// tokenization has reached the viewport start...
|
||||
this._updateTokensUntilLine(builder, endLineNumber);
|
||||
return;
|
||||
}
|
||||
|
||||
let nonWhitespaceColumn = this._textModel.getLineFirstNonWhitespaceColumn(startLineNumber);
|
||||
let fakeLines: string[] = [];
|
||||
let initialState: IState | null = null;
|
||||
for (let i = startLineNumber - 1; nonWhitespaceColumn > 0 && i >= 1; i--) {
|
||||
let newNonWhitespaceIndex = this._textModel.getLineFirstNonWhitespaceColumn(i);
|
||||
|
||||
if (newNonWhitespaceIndex === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!r) {
|
||||
r = nullTokenize2(this.languageIdentifier.id, text, lineStartState, 0);
|
||||
}
|
||||
this._setTokens(this.languageIdentifier.id, lineIndex, text.length, r.tokens);
|
||||
eventBuilder.registerChangedTokens(lineIndex + 1);
|
||||
this._setIsInvalid(lineIndex, false);
|
||||
|
||||
if (endStateIndex < linesLength) {
|
||||
const previousEndState = this._getState(endStateIndex);
|
||||
if (previousEndState !== null && r.endState.equals(previousEndState)) {
|
||||
// The end state of this line remains the same
|
||||
let nextInvalidLineIndex = lineIndex + 1;
|
||||
while (nextInvalidLineIndex < linesLength) {
|
||||
if (this._isInvalid(nextInvalidLineIndex)) {
|
||||
break;
|
||||
}
|
||||
if (nextInvalidLineIndex + 1 < linesLength) {
|
||||
if (this._getState(nextInvalidLineIndex + 1) === null) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
if (this._lastState === null) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
nextInvalidLineIndex++;
|
||||
}
|
||||
this._invalidLineStartIndex = Math.max(this._invalidLineStartIndex, nextInvalidLineIndex);
|
||||
lineIndex = nextInvalidLineIndex - 1; // -1 because the outer loop increments it
|
||||
} else {
|
||||
this._setState(endStateIndex, r.endState);
|
||||
if (newNonWhitespaceIndex < nonWhitespaceColumn) {
|
||||
initialState = this._tokenizationStateStore.getBeginState(i - 1);
|
||||
if (initialState) {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
this._lastState = r.endState;
|
||||
fakeLines.push(this._textModel.getLineContent(i));
|
||||
nonWhitespaceColumn = newNonWhitespaceIndex;
|
||||
}
|
||||
}
|
||||
this._invalidLineStartIndex = Math.max(this._invalidLineStartIndex, endLineIndex + 1);
|
||||
}
|
||||
|
||||
// #endregion
|
||||
if (!initialState) {
|
||||
initialState = this._tokenizationSupport.getInitialState();
|
||||
}
|
||||
|
||||
const languageIdentifier = this._textModel.getLanguageIdentifier();
|
||||
let state = initialState;
|
||||
for (let i = fakeLines.length - 1; i >= 0; i--) {
|
||||
let r = safeTokenize(languageIdentifier, this._tokenizationSupport, fakeLines[i], state);
|
||||
state = r.endState;
|
||||
}
|
||||
|
||||
for (let lineNumber = startLineNumber; lineNumber <= endLineNumber; lineNumber++) {
|
||||
let text = this._textModel.getLineContent(lineNumber);
|
||||
let r = safeTokenize(languageIdentifier, this._tokenizationSupport, text, state);
|
||||
builder.add(lineNumber, r.tokens);
|
||||
this._tokenizationStateStore.setFakeTokens(lineNumber - 1);
|
||||
state = r.endState;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class ModelTokensChangedEventBuilder {
|
||||
|
||||
private readonly _ranges: { fromLineNumber: number; toLineNumber: number; }[];
|
||||
|
||||
constructor() {
|
||||
this._ranges = [];
|
||||
function initializeTokenization(textModel: TextModel): [ITokenizationSupport | null, IState | null] {
|
||||
const languageIdentifier = textModel.getLanguageIdentifier();
|
||||
let tokenizationSupport = (
|
||||
textModel.isTooLargeForTokenization()
|
||||
? null
|
||||
: TokenizationRegistry.get(languageIdentifier.language)
|
||||
);
|
||||
let initialState: IState | null = null;
|
||||
if (tokenizationSupport) {
|
||||
try {
|
||||
initialState = tokenizationSupport.getInitialState();
|
||||
} catch (e) {
|
||||
onUnexpectedError(e);
|
||||
tokenizationSupport = null;
|
||||
}
|
||||
}
|
||||
return [tokenizationSupport, initialState];
|
||||
}
|
||||
|
||||
public registerChangedTokens(lineNumber: number): void {
|
||||
const ranges = this._ranges;
|
||||
const rangesLength = ranges.length;
|
||||
const previousRange = rangesLength > 0 ? ranges[rangesLength - 1] : null;
|
||||
function safeTokenize(languageIdentifier: LanguageIdentifier, tokenizationSupport: ITokenizationSupport | null, text: string, state: IState): TokenizationResult2 {
|
||||
let r: TokenizationResult2 | null = null;
|
||||
|
||||
if (previousRange && previousRange.toLineNumber === lineNumber - 1) {
|
||||
// extend previous range
|
||||
previousRange.toLineNumber++;
|
||||
} else {
|
||||
// insert new range
|
||||
ranges[rangesLength] = {
|
||||
fromLineNumber: lineNumber,
|
||||
toLineNumber: lineNumber
|
||||
};
|
||||
if (tokenizationSupport) {
|
||||
try {
|
||||
r = tokenizationSupport.tokenize2(text, state.clone(), 0);
|
||||
} catch (e) {
|
||||
onUnexpectedError(e);
|
||||
}
|
||||
}
|
||||
|
||||
public build(): IModelTokensChangedEvent | null {
|
||||
if (this._ranges.length === 0) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
tokenizationSupportChanged: false,
|
||||
ranges: this._ranges
|
||||
};
|
||||
if (!r) {
|
||||
r = nullTokenize2(languageIdentifier.id, text, state, 0);
|
||||
}
|
||||
|
||||
LineTokens.convertToEndOffset(r.tokens, text.length);
|
||||
return r;
|
||||
}
|
||||
|
||||
330
src/vs/editor/common/model/tokensStore.ts
Normal file
330
src/vs/editor/common/model/tokensStore.ts
Normal file
@@ -0,0 +1,330 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as arrays from 'vs/base/common/arrays';
|
||||
import { LineTokens } from 'vs/editor/common/core/lineTokens';
|
||||
import { Position } from 'vs/editor/common/core/position';
|
||||
import { IRange } from 'vs/editor/common/core/range';
|
||||
import { ColorId, FontStyle, LanguageId, MetadataConsts, StandardTokenType, TokenMetadata } from 'vs/editor/common/modes';
|
||||
|
||||
function getDefaultMetadata(topLevelLanguageId: LanguageId): number {
|
||||
return (
|
||||
(topLevelLanguageId << MetadataConsts.LANGUAGEID_OFFSET)
|
||||
| (StandardTokenType.Other << MetadataConsts.TOKEN_TYPE_OFFSET)
|
||||
| (FontStyle.None << MetadataConsts.FONT_STYLE_OFFSET)
|
||||
| (ColorId.DefaultForeground << MetadataConsts.FOREGROUND_OFFSET)
|
||||
| (ColorId.DefaultBackground << MetadataConsts.BACKGROUND_OFFSET)
|
||||
) >>> 0;
|
||||
}
|
||||
|
||||
const EMPTY_LINE_TOKENS = (new Uint32Array(0)).buffer;
|
||||
|
||||
export class MultilineTokensBuilder {
|
||||
|
||||
public readonly tokens: MultilineTokens[];
|
||||
|
||||
constructor() {
|
||||
this.tokens = [];
|
||||
}
|
||||
|
||||
public add(lineNumber: number, lineTokens: Uint32Array): void {
|
||||
if (this.tokens.length > 0) {
|
||||
const last = this.tokens[this.tokens.length - 1];
|
||||
const lastLineNumber = last.startLineNumber + last.tokens.length - 1;
|
||||
if (lastLineNumber + 1 === lineNumber) {
|
||||
// append
|
||||
last.tokens.push(lineTokens);
|
||||
return;
|
||||
}
|
||||
}
|
||||
this.tokens.push(new MultilineTokens(lineNumber, lineTokens));
|
||||
}
|
||||
}
|
||||
|
||||
export class MultilineTokens {
|
||||
|
||||
public readonly startLineNumber: number;
|
||||
public readonly tokens: Uint32Array[];
|
||||
|
||||
constructor(lineNumber: number, tokens: Uint32Array) {
|
||||
this.startLineNumber = lineNumber;
|
||||
this.tokens = [tokens];
|
||||
}
|
||||
}
|
||||
|
||||
export class TokensStore {
|
||||
private _lineTokens: (ArrayBuffer | null)[];
|
||||
private _len: number;
|
||||
|
||||
constructor() {
|
||||
this._lineTokens = [];
|
||||
this._len = 0;
|
||||
}
|
||||
|
||||
public flush(): void {
|
||||
this._lineTokens = [];
|
||||
this._len = 0;
|
||||
}
|
||||
|
||||
public getTokens(topLevelLanguageId: LanguageId, lineIndex: number, lineText: string): LineTokens {
|
||||
let rawLineTokens: ArrayBuffer | null = null;
|
||||
if (lineIndex < this._len) {
|
||||
rawLineTokens = this._lineTokens[lineIndex];
|
||||
}
|
||||
|
||||
if (rawLineTokens !== null && rawLineTokens !== EMPTY_LINE_TOKENS) {
|
||||
return new LineTokens(new Uint32Array(rawLineTokens), lineText);
|
||||
}
|
||||
|
||||
let lineTokens = new Uint32Array(2);
|
||||
lineTokens[0] = lineText.length;
|
||||
lineTokens[1] = getDefaultMetadata(topLevelLanguageId);
|
||||
return new LineTokens(lineTokens, lineText);
|
||||
}
|
||||
|
||||
private static _massageTokens(topLevelLanguageId: LanguageId, lineTextLength: number, tokens: Uint32Array): ArrayBuffer {
|
||||
if (lineTextLength === 0) {
|
||||
let hasDifferentLanguageId = false;
|
||||
if (tokens && tokens.length > 1) {
|
||||
hasDifferentLanguageId = (TokenMetadata.getLanguageId(tokens[1]) !== topLevelLanguageId);
|
||||
}
|
||||
|
||||
if (!hasDifferentLanguageId) {
|
||||
return EMPTY_LINE_TOKENS;
|
||||
}
|
||||
}
|
||||
|
||||
if (!tokens || tokens.length === 0) {
|
||||
tokens = new Uint32Array(2);
|
||||
tokens[0] = lineTextLength;
|
||||
tokens[1] = getDefaultMetadata(topLevelLanguageId);
|
||||
}
|
||||
|
||||
return tokens.buffer;
|
||||
}
|
||||
|
||||
private _ensureLine(lineIndex: number): void {
|
||||
while (lineIndex >= this._len) {
|
||||
this._lineTokens[this._len] = null;
|
||||
this._len++;
|
||||
}
|
||||
}
|
||||
|
||||
private _deleteLines(start: number, deleteCount: number): void {
|
||||
if (deleteCount === 0) {
|
||||
return;
|
||||
}
|
||||
this._lineTokens.splice(start, deleteCount);
|
||||
this._len -= deleteCount;
|
||||
}
|
||||
|
||||
private _insertLines(insertIndex: number, insertCount: number): void {
|
||||
if (insertCount === 0) {
|
||||
return;
|
||||
}
|
||||
let lineTokens: (ArrayBuffer | null)[] = [];
|
||||
for (let i = 0; i < insertCount; i++) {
|
||||
lineTokens[i] = null;
|
||||
}
|
||||
this._lineTokens = arrays.arrayInsert(this._lineTokens, insertIndex, lineTokens);
|
||||
this._len += insertCount;
|
||||
}
|
||||
|
||||
public setTokens(topLevelLanguageId: LanguageId, lineIndex: number, lineTextLength: number, _tokens: Uint32Array): void {
|
||||
const tokens = TokensStore._massageTokens(topLevelLanguageId, lineTextLength, _tokens);
|
||||
this._ensureLine(lineIndex);
|
||||
this._lineTokens[lineIndex] = tokens;
|
||||
}
|
||||
|
||||
//#region Editing
|
||||
|
||||
public applyEdits(range: IRange, eolCount: number, firstLineLength: number): void {
|
||||
this._acceptDeleteRange(range);
|
||||
this._acceptInsertText(new Position(range.startLineNumber, range.startColumn), eolCount, firstLineLength);
|
||||
}
|
||||
|
||||
private _acceptDeleteRange(range: IRange): void {
|
||||
|
||||
const firstLineIndex = range.startLineNumber - 1;
|
||||
if (firstLineIndex >= this._len) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (range.startLineNumber === range.endLineNumber) {
|
||||
if (range.startColumn === range.endColumn) {
|
||||
// Nothing to delete
|
||||
return;
|
||||
}
|
||||
|
||||
this._lineTokens[firstLineIndex] = TokensStore._delete(this._lineTokens[firstLineIndex], range.startColumn - 1, range.endColumn - 1);
|
||||
return;
|
||||
}
|
||||
|
||||
this._lineTokens[firstLineIndex] = TokensStore._deleteEnding(this._lineTokens[firstLineIndex], range.startColumn - 1);
|
||||
|
||||
const lastLineIndex = range.endLineNumber - 1;
|
||||
let lastLineTokens: ArrayBuffer | null = null;
|
||||
if (lastLineIndex < this._len) {
|
||||
lastLineTokens = TokensStore._deleteBeginning(this._lineTokens[lastLineIndex], range.endColumn - 1);
|
||||
}
|
||||
|
||||
// Take remaining text on last line and append it to remaining text on first line
|
||||
this._lineTokens[firstLineIndex] = TokensStore._append(this._lineTokens[firstLineIndex], lastLineTokens);
|
||||
|
||||
// Delete middle lines
|
||||
this._deleteLines(range.startLineNumber, range.endLineNumber - range.startLineNumber);
|
||||
}
|
||||
|
||||
private _acceptInsertText(position: Position, eolCount: number, firstLineLength: number): void {
|
||||
|
||||
if (eolCount === 0 && firstLineLength === 0) {
|
||||
// Nothing to insert
|
||||
return;
|
||||
}
|
||||
|
||||
const lineIndex = position.lineNumber - 1;
|
||||
if (lineIndex >= this._len) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (eolCount === 0) {
|
||||
// Inserting text on one line
|
||||
this._lineTokens[lineIndex] = TokensStore._insert(this._lineTokens[lineIndex], position.column - 1, firstLineLength);
|
||||
return;
|
||||
}
|
||||
|
||||
this._lineTokens[lineIndex] = TokensStore._deleteEnding(this._lineTokens[lineIndex], position.column - 1);
|
||||
this._lineTokens[lineIndex] = TokensStore._insert(this._lineTokens[lineIndex], position.column - 1, firstLineLength);
|
||||
|
||||
this._insertLines(position.lineNumber, eolCount);
|
||||
}
|
||||
|
||||
private static _deleteBeginning(lineTokens: ArrayBuffer | null, toChIndex: number): ArrayBuffer | null {
|
||||
if (lineTokens === null || lineTokens === EMPTY_LINE_TOKENS) {
|
||||
return lineTokens;
|
||||
}
|
||||
return TokensStore._delete(lineTokens, 0, toChIndex);
|
||||
}
|
||||
|
||||
private static _deleteEnding(lineTokens: ArrayBuffer | null, fromChIndex: number): ArrayBuffer | null {
|
||||
if (lineTokens === null || lineTokens === EMPTY_LINE_TOKENS) {
|
||||
return lineTokens;
|
||||
}
|
||||
|
||||
const tokens = new Uint32Array(lineTokens);
|
||||
const lineTextLength = tokens[tokens.length - 2];
|
||||
return TokensStore._delete(lineTokens, fromChIndex, lineTextLength);
|
||||
}
|
||||
|
||||
private static _delete(lineTokens: ArrayBuffer | null, fromChIndex: number, toChIndex: number): ArrayBuffer | null {
|
||||
if (lineTokens === null || lineTokens === EMPTY_LINE_TOKENS || fromChIndex === toChIndex) {
|
||||
return lineTokens;
|
||||
}
|
||||
|
||||
const tokens = new Uint32Array(lineTokens);
|
||||
const tokensCount = (tokens.length >>> 1);
|
||||
|
||||
// special case: deleting everything
|
||||
if (fromChIndex === 0 && tokens[tokens.length - 2] === toChIndex) {
|
||||
return EMPTY_LINE_TOKENS;
|
||||
}
|
||||
|
||||
const fromTokenIndex = LineTokens.findIndexInTokensArray(tokens, fromChIndex);
|
||||
const fromTokenStartOffset = (fromTokenIndex > 0 ? tokens[(fromTokenIndex - 1) << 1] : 0);
|
||||
const fromTokenEndOffset = tokens[fromTokenIndex << 1];
|
||||
|
||||
if (toChIndex < fromTokenEndOffset) {
|
||||
// the delete range is inside a single token
|
||||
const delta = (toChIndex - fromChIndex);
|
||||
for (let i = fromTokenIndex; i < tokensCount; i++) {
|
||||
tokens[i << 1] -= delta;
|
||||
}
|
||||
return lineTokens;
|
||||
}
|
||||
|
||||
let dest: number;
|
||||
let lastEnd: number;
|
||||
if (fromTokenStartOffset !== fromChIndex) {
|
||||
tokens[fromTokenIndex << 1] = fromChIndex;
|
||||
dest = ((fromTokenIndex + 1) << 1);
|
||||
lastEnd = fromChIndex;
|
||||
} else {
|
||||
dest = (fromTokenIndex << 1);
|
||||
lastEnd = fromTokenStartOffset;
|
||||
}
|
||||
|
||||
const delta = (toChIndex - fromChIndex);
|
||||
for (let tokenIndex = fromTokenIndex + 1; tokenIndex < tokensCount; tokenIndex++) {
|
||||
const tokenEndOffset = tokens[tokenIndex << 1] - delta;
|
||||
if (tokenEndOffset > lastEnd) {
|
||||
tokens[dest++] = tokenEndOffset;
|
||||
tokens[dest++] = tokens[(tokenIndex << 1) + 1];
|
||||
lastEnd = tokenEndOffset;
|
||||
}
|
||||
}
|
||||
|
||||
if (dest === tokens.length) {
|
||||
// nothing to trim
|
||||
return lineTokens;
|
||||
}
|
||||
|
||||
let tmp = new Uint32Array(dest);
|
||||
tmp.set(tokens.subarray(0, dest), 0);
|
||||
return tmp.buffer;
|
||||
}
|
||||
|
||||
private static _append(lineTokens: ArrayBuffer | null, _otherTokens: ArrayBuffer | null): ArrayBuffer | null {
|
||||
if (_otherTokens === EMPTY_LINE_TOKENS) {
|
||||
return lineTokens;
|
||||
}
|
||||
if (lineTokens === EMPTY_LINE_TOKENS) {
|
||||
return _otherTokens;
|
||||
}
|
||||
if (lineTokens === null) {
|
||||
return lineTokens;
|
||||
}
|
||||
if (_otherTokens === null) {
|
||||
// cannot determine combined line length...
|
||||
return null;
|
||||
}
|
||||
const myTokens = new Uint32Array(lineTokens);
|
||||
const otherTokens = new Uint32Array(_otherTokens);
|
||||
const otherTokensCount = (otherTokens.length >>> 1);
|
||||
|
||||
let result = new Uint32Array(myTokens.length + otherTokens.length);
|
||||
result.set(myTokens, 0);
|
||||
let dest = myTokens.length;
|
||||
const delta = myTokens[myTokens.length - 2];
|
||||
for (let i = 0; i < otherTokensCount; i++) {
|
||||
result[dest++] = otherTokens[(i << 1)] + delta;
|
||||
result[dest++] = otherTokens[(i << 1) + 1];
|
||||
}
|
||||
return result.buffer;
|
||||
}
|
||||
|
||||
private static _insert(lineTokens: ArrayBuffer | null, chIndex: number, textLength: number): ArrayBuffer | null {
|
||||
if (lineTokens === null || lineTokens === EMPTY_LINE_TOKENS) {
|
||||
// nothing to do
|
||||
return lineTokens;
|
||||
}
|
||||
|
||||
const tokens = new Uint32Array(lineTokens);
|
||||
const tokensCount = (tokens.length >>> 1);
|
||||
|
||||
let fromTokenIndex = LineTokens.findIndexInTokensArray(tokens, chIndex);
|
||||
if (fromTokenIndex > 0) {
|
||||
const fromTokenStartOffset = tokens[(fromTokenIndex - 1) << 1];
|
||||
if (fromTokenStartOffset === chIndex) {
|
||||
fromTokenIndex--;
|
||||
}
|
||||
}
|
||||
for (let tokenIndex = fromTokenIndex; tokenIndex < tokensCount; tokenIndex++) {
|
||||
tokens[tokenIndex << 1] += textLength;
|
||||
}
|
||||
return lineTokens;
|
||||
}
|
||||
|
||||
//#endregion
|
||||
}
|
||||
Reference in New Issue
Block a user