Merge from vscode 2f984aad710215f4e4684a035bb02f55d1a9e2cc (#9819)

This commit is contained in:
Anthony Dresser
2020-04-01 00:44:39 -07:00
committed by GitHub
parent 0e27aaa61f
commit 0bfbdc62ed
247 changed files with 5402 additions and 3311 deletions

View File

@@ -2722,10 +2722,6 @@ export interface ISuggestOptions {
* Overwrite word ends on accept. Default to false.
*/
insertMode?: 'insert' | 'replace';
/**
* Show a highlight when suggestion replaces or keep text after the cursor. Defaults to false.
*/
insertHighlight?: boolean;
/**
* Enable graceful matching. Defaults to true.
*/
@@ -2876,7 +2872,6 @@ class EditorSuggest extends BaseEditorOption<EditorOption.suggest, InternalSugge
constructor() {
const defaults: InternalSuggestOptions = {
insertMode: 'insert',
insertHighlight: true,
filterGraceful: true,
snippetsPreventQuickSuggestions: true,
localityBonus: false,
@@ -2927,11 +2922,6 @@ class EditorSuggest extends BaseEditorOption<EditorOption.suggest, InternalSugge
default: defaults.insertMode,
description: nls.localize('suggest.insertMode', "Controls whether words are overwritten when accepting completions. Note that this depends on extensions opting into this feature.")
},
'editor.suggest.insertHighlight': {
type: 'boolean',
default: defaults.insertHighlight,
description: nls.localize('suggest.insertHighlight', "Controls whether unexpected text modifications while accepting completions should be highlighted, e.g `insertMode` is `replace` but the completion only supports `insert`.")
},
'editor.suggest.filterGraceful': {
type: 'boolean',
default: defaults.filterGraceful,
@@ -3124,7 +3114,6 @@ class EditorSuggest extends BaseEditorOption<EditorOption.suggest, InternalSugge
const input = _input as ISuggestOptions;
return {
insertMode: EditorStringEnumOption.stringSet(input.insertMode, this.defaultValue.insertMode, ['insert', 'replace']),
insertHighlight: EditorBooleanOption.boolean(input.insertHighlight, this.defaultValue.insertHighlight),
filterGraceful: EditorBooleanOption.boolean(input.filterGraceful, this.defaultValue.filterGraceful),
snippetsPreventQuickSuggestions: EditorBooleanOption.boolean(input.snippetsPreventQuickSuggestions, this.defaultValue.filterGraceful),
localityBonus: EditorBooleanOption.boolean(input.localityBonus, this.defaultValue.localityBonus),

View File

@@ -238,7 +238,7 @@ export class WordOperations {
const left = lineContent.charCodeAt(column - 2);
const right = lineContent.charCodeAt(column - 1);
if (left !== CharCode.Underline && right === CharCode.Underline) {
if (left === CharCode.Underline && right !== CharCode.Underline) {
// snake_case_variables
return new Position(lineNumber, column);
}
@@ -340,7 +340,7 @@ export class WordOperations {
const left = lineContent.charCodeAt(column - 2);
const right = lineContent.charCodeAt(column - 1);
if (left === CharCode.Underline && right !== CharCode.Underline) {
if (left !== CharCode.Underline && right === CharCode.Underline) {
// snake_case_variables
return new Position(lineNumber, column);
}

View File

@@ -24,17 +24,18 @@ export interface IStringBuilder {
}
let _platformTextDecoder: TextDecoder | null;
function getPlatformTextDecoder(): TextDecoder {
export function getPlatformTextDecoder(): TextDecoder {
if (!_platformTextDecoder) {
_platformTextDecoder = new TextDecoder(platform.isLittleEndian() ? 'UTF-16LE' : 'UTF-16BE');
}
return _platformTextDecoder;
}
export const hasTextDecoder = (typeof TextDecoder !== 'undefined');
export let createStringBuilder: (capacity: number) => IStringBuilder;
export let decodeUTF16LE: (source: Uint8Array, offset: number, len: number) => string;
if (typeof TextDecoder !== 'undefined') {
if (hasTextDecoder) {
createStringBuilder = (capacity) => new StringBuilder(capacity);
decodeUTF16LE = standardDecodeUTF16LE;
} else {

View File

@@ -827,7 +827,17 @@ export interface ITextModel {
/**
* @internal
*/
setSemanticTokens(tokens: MultilineTokens2[] | null): void;
setSemanticTokens(tokens: MultilineTokens2[] | null, isComplete: boolean): void;
/**
* @internal
*/
setPartialSemanticTokens(range: Range, tokens: MultilineTokens2[] | null): void;
/**
* @internal
*/
hasSemanticTokens(): boolean;
/**
* Flush all tokenization state.

View File

@@ -10,10 +10,13 @@ import { EndOfLineSequence, ICursorStateComputer, IIdentifiedSingleEditOperation
import { TextModel } from 'vs/editor/common/model/textModel';
import { IUndoRedoService, IResourceUndoRedoElement, UndoRedoElementType, IWorkspaceUndoRedoElement } from 'vs/platform/undoRedo/common/undoRedo';
import { URI } from 'vs/base/common/uri';
import { getComparisonKey as uriGetComparisonKey } from 'vs/base/common/resources';
import { TextChange, compressConsecutiveTextChanges } from 'vs/editor/common/model/textChange';
import * as buffer from 'vs/base/common/buffer';
function uriGetComparisonKey(resource: URI): string {
return resource.toString();
}
class SingleModelEditStackData {
public static create(model: ITextModel, beforeCursorState: Selection[] | null): SingleModelEditStackData {

View File

@@ -1793,8 +1793,8 @@ export class TextModel extends Disposable implements model.ITextModel {
}
}
public setSemanticTokens(tokens: MultilineTokens2[] | null): void {
this._tokens2.set(tokens);
public setSemanticTokens(tokens: MultilineTokens2[] | null, isComplete: boolean): void {
this._tokens2.set(tokens, isComplete);
this._emitModelTokensChangedEvent({
tokenizationSupportChanged: false,
@@ -1803,6 +1803,23 @@ export class TextModel extends Disposable implements model.ITextModel {
});
}
public hasSemanticTokens(): boolean {
return this._tokens2.isComplete();
}
public setPartialSemanticTokens(range: Range, tokens: MultilineTokens2[]): void {
if (this.hasSemanticTokens()) {
return;
}
const changedRange = this._tokens2.setPartial(range, tokens);
this._emitModelTokensChangedEvent({
tokenizationSupportChanged: false,
semanticTokensApplied: true,
ranges: [{ fromLineNumber: changedRange.startLineNumber, toLineNumber: changedRange.endLineNumber }]
});
}
public tokenizeViewport(startLineNumber: number, endLineNumber: number): void {
startLineNumber = Math.max(1, startLineNumber);
endLineNumber = Math.min(this._buffer.getLineCount(), endLineNumber);

View File

@@ -6,7 +6,7 @@
import * as arrays from 'vs/base/common/arrays';
import { LineTokens } from 'vs/editor/common/core/lineTokens';
import { Position } from 'vs/editor/common/core/position';
import { IRange } from 'vs/editor/common/core/range';
import { IRange, Range } from 'vs/editor/common/core/range';
import { ColorId, FontStyle, LanguageId, MetadataConsts, StandardTokenType, TokenMetadata } from 'vs/editor/common/modes';
import { writeUInt32BE, readUInt32BE } from 'vs/base/common/buffer';
import { CharCode } from 'vs/base/common/charCode';
@@ -124,20 +124,7 @@ export class MultilineTokensBuilder {
}
}
export interface IEncodedTokens {
getTokenCount(): number;
getDeltaLine(tokenIndex: number): number;
getMaxDeltaLine(): number;
getStartCharacter(tokenIndex: number): number;
getEndCharacter(tokenIndex: number): number;
getMetadata(tokenIndex: number): number;
clear(): void;
acceptDeleteRange(horizontalShiftForFirstLineTokens: number, startDeltaLine: number, startCharacter: number, endDeltaLine: number, endCharacter: number): void;
acceptInsertText(deltaLine: number, character: number, eolCount: number, firstLineLength: number, lastLineLength: number, firstCharCode: number): void;
}
export class SparseEncodedTokens implements IEncodedTokens {
export class SparseEncodedTokens {
/**
* The encoding of tokens is:
* 4*i deltaLine (from `startLineNumber`)
@@ -145,7 +132,7 @@ export class SparseEncodedTokens implements IEncodedTokens {
* 4*i+2 endCharacter (from the line start)
* 4*i+3 metadata
*/
private _tokens: Uint32Array;
private readonly _tokens: Uint32Array;
private _tokenCount: number;
constructor(tokens: Uint32Array) {
@@ -153,38 +140,167 @@ export class SparseEncodedTokens implements IEncodedTokens {
this._tokenCount = tokens.length / 4;
}
public toString(startLineNumber: number): string {
let pieces: string[] = [];
for (let i = 0; i < this._tokenCount; i++) {
pieces.push(`(${this._getDeltaLine(i) + startLineNumber},${this._getStartCharacter(i)}-${this._getEndCharacter(i)})`);
}
return `[${pieces.join(',')}]`;
}
public getMaxDeltaLine(): number {
const tokenCount = this.getTokenCount();
const tokenCount = this._getTokenCount();
if (tokenCount === 0) {
return -1;
}
return this.getDeltaLine(tokenCount - 1);
return this._getDeltaLine(tokenCount - 1);
}
public getTokenCount(): number {
public getRange(): Range | null {
const tokenCount = this._getTokenCount();
if (tokenCount === 0) {
return null;
}
const startChar = this._getStartCharacter(0);
const maxDeltaLine = this._getDeltaLine(tokenCount - 1);
const endChar = this._getEndCharacter(tokenCount - 1);
return new Range(0, startChar + 1, maxDeltaLine, endChar + 1);
}
private _getTokenCount(): number {
return this._tokenCount;
}
public getDeltaLine(tokenIndex: number): number {
private _getDeltaLine(tokenIndex: number): number {
return this._tokens[4 * tokenIndex];
}
public getStartCharacter(tokenIndex: number): number {
private _getStartCharacter(tokenIndex: number): number {
return this._tokens[4 * tokenIndex + 1];
}
public getEndCharacter(tokenIndex: number): number {
private _getEndCharacter(tokenIndex: number): number {
return this._tokens[4 * tokenIndex + 2];
}
public getMetadata(tokenIndex: number): number {
return this._tokens[4 * tokenIndex + 3];
public isEmpty(): boolean {
return (this._getTokenCount() === 0);
}
public getLineTokens(deltaLine: number): LineTokens2 | null {
let low = 0;
let high = this._getTokenCount() - 1;
while (low < high) {
const mid = low + Math.floor((high - low) / 2);
const midDeltaLine = this._getDeltaLine(mid);
if (midDeltaLine < deltaLine) {
low = mid + 1;
} else if (midDeltaLine > deltaLine) {
high = mid - 1;
} else {
let min = mid;
while (min > low && this._getDeltaLine(min - 1) === deltaLine) {
min--;
}
let max = mid;
while (max < high && this._getDeltaLine(max + 1) === deltaLine) {
max++;
}
return new LineTokens2(this._tokens.subarray(4 * min, 4 * max + 4));
}
}
if (this._getDeltaLine(low) === deltaLine) {
return new LineTokens2(this._tokens.subarray(4 * low, 4 * low + 4));
}
return null;
}
public clear(): void {
this._tokenCount = 0;
}
public removeTokens(startDeltaLine: number, startChar: number, endDeltaLine: number, endChar: number): number {
const tokens = this._tokens;
const tokenCount = this._tokenCount;
let newTokenCount = 0;
let hasDeletedTokens = false;
let firstDeltaLine = 0;
for (let i = 0; i < tokenCount; i++) {
const srcOffset = 4 * i;
const tokenDeltaLine = tokens[srcOffset];
const tokenStartCharacter = tokens[srcOffset + 1];
const tokenEndCharacter = tokens[srcOffset + 2];
const tokenMetadata = tokens[srcOffset + 3];
if (
(tokenDeltaLine > startDeltaLine || (tokenDeltaLine === startDeltaLine && tokenEndCharacter >= startChar))
&& (tokenDeltaLine < endDeltaLine || (tokenDeltaLine === endDeltaLine && tokenStartCharacter <= endChar))
) {
hasDeletedTokens = true;
} else {
if (newTokenCount === 0) {
firstDeltaLine = tokenDeltaLine;
}
if (hasDeletedTokens) {
// must move the token to the left
const destOffset = 4 * newTokenCount;
tokens[destOffset] = tokenDeltaLine - firstDeltaLine;
tokens[destOffset + 1] = tokenStartCharacter;
tokens[destOffset + 2] = tokenEndCharacter;
tokens[destOffset + 3] = tokenMetadata;
}
newTokenCount++;
}
}
this._tokenCount = newTokenCount;
return firstDeltaLine;
}
public split(startDeltaLine: number, startChar: number, endDeltaLine: number, endChar: number): [SparseEncodedTokens, SparseEncodedTokens, number] {
const tokens = this._tokens;
const tokenCount = this._tokenCount;
let aTokens: number[] = [];
let bTokens: number[] = [];
let destTokens: number[] = aTokens;
let destOffset = 0;
let destFirstDeltaLine: number = 0;
for (let i = 0; i < tokenCount; i++) {
const srcOffset = 4 * i;
const tokenDeltaLine = tokens[srcOffset];
const tokenStartCharacter = tokens[srcOffset + 1];
const tokenEndCharacter = tokens[srcOffset + 2];
const tokenMetadata = tokens[srcOffset + 3];
if ((tokenDeltaLine > startDeltaLine || (tokenDeltaLine === startDeltaLine && tokenEndCharacter >= startChar))) {
if ((tokenDeltaLine < endDeltaLine || (tokenDeltaLine === endDeltaLine && tokenStartCharacter <= endChar))) {
// this token is touching the range
continue;
} else {
// this token is after the range
if (destTokens !== bTokens) {
// this token is the first token after the range
destTokens = bTokens;
destOffset = 0;
destFirstDeltaLine = tokenDeltaLine;
}
}
}
destTokens[destOffset++] = tokenDeltaLine - destFirstDeltaLine;
destTokens[destOffset++] = tokenStartCharacter;
destTokens[destOffset++] = tokenEndCharacter;
destTokens[destOffset++] = tokenMetadata;
}
return [new SparseEncodedTokens(new Uint32Array(aTokens)), new SparseEncodedTokens(new Uint32Array(bTokens)), destFirstDeltaLine];
}
public acceptDeleteRange(horizontalShiftForFirstLineTokens: number, startDeltaLine: number, startCharacter: number, endDeltaLine: number, endCharacter: number): void {
// This is a bit complex, here are the cases I used to think about this:
//
@@ -414,30 +530,26 @@ export class SparseEncodedTokens implements IEncodedTokens {
export class LineTokens2 {
private readonly _actual: IEncodedTokens;
private readonly _startTokenIndex: number;
private readonly _endTokenIndex: number;
private readonly _tokens: Uint32Array;
constructor(actual: IEncodedTokens, startTokenIndex: number, endTokenIndex: number) {
this._actual = actual;
this._startTokenIndex = startTokenIndex;
this._endTokenIndex = endTokenIndex;
constructor(tokens: Uint32Array) {
this._tokens = tokens;
}
public getCount(): number {
return this._endTokenIndex - this._startTokenIndex + 1;
return this._tokens.length / 4;
}
public getStartCharacter(tokenIndex: number): number {
return this._actual.getStartCharacter(this._startTokenIndex + tokenIndex);
return this._tokens[4 * tokenIndex + 1];
}
public getEndCharacter(tokenIndex: number): number {
return this._actual.getEndCharacter(this._startTokenIndex + tokenIndex);
return this._tokens[4 * tokenIndex + 2];
}
public getMetadata(tokenIndex: number): number {
return this._actual.getMetadata(this._startTokenIndex + tokenIndex);
return this._tokens[4 * tokenIndex + 3];
}
}
@@ -445,59 +557,58 @@ export class MultilineTokens2 {
public startLineNumber: number;
public endLineNumber: number;
public tokens: IEncodedTokens;
public tokens: SparseEncodedTokens;
constructor(startLineNumber: number, tokens: IEncodedTokens) {
constructor(startLineNumber: number, tokens: SparseEncodedTokens) {
this.startLineNumber = startLineNumber;
this.tokens = tokens;
this.endLineNumber = this.startLineNumber + this.tokens.getMaxDeltaLine();
}
public toString(): string {
return this.tokens.toString(this.startLineNumber);
}
private _updateEndLineNumber(): void {
this.endLineNumber = this.startLineNumber + this.tokens.getMaxDeltaLine();
}
public isEmpty(): boolean {
return this.tokens.isEmpty();
}
public getLineTokens(lineNumber: number): LineTokens2 | null {
if (this.startLineNumber <= lineNumber && lineNumber <= this.endLineNumber) {
const findResult = MultilineTokens2._findTokensWithLine(this.tokens, lineNumber - this.startLineNumber);
if (findResult) {
const [startTokenIndex, endTokenIndex] = findResult;
return new LineTokens2(this.tokens, startTokenIndex, endTokenIndex);
}
return this.tokens.getLineTokens(lineNumber - this.startLineNumber);
}
return null;
}
private static _findTokensWithLine(tokens: IEncodedTokens, deltaLine: number): [number, number] | null {
let low = 0;
let high = tokens.getTokenCount() - 1;
while (low < high) {
const mid = low + Math.floor((high - low) / 2);
const midDeltaLine = tokens.getDeltaLine(mid);
if (midDeltaLine < deltaLine) {
low = mid + 1;
} else if (midDeltaLine > deltaLine) {
high = mid - 1;
} else {
let min = mid;
while (min > low && tokens.getDeltaLine(min - 1) === deltaLine) {
min--;
}
let max = mid;
while (max < high && tokens.getDeltaLine(max + 1) === deltaLine) {
max++;
}
return [min, max];
}
public getRange(): Range | null {
const deltaRange = this.tokens.getRange();
if (!deltaRange) {
return deltaRange;
}
return new Range(this.startLineNumber + deltaRange.startLineNumber, deltaRange.startColumn, this.startLineNumber + deltaRange.endLineNumber, deltaRange.endColumn);
}
if (tokens.getDeltaLine(low) === deltaLine) {
return [low, low];
}
public removeTokens(range: Range): void {
const startLineIndex = range.startLineNumber - this.startLineNumber;
const endLineIndex = range.endLineNumber - this.startLineNumber;
return null;
this.startLineNumber += this.tokens.removeTokens(startLineIndex, range.startColumn - 1, endLineIndex, range.endColumn - 1);
this._updateEndLineNumber();
}
public split(range: Range): [MultilineTokens2, MultilineTokens2] {
// split tokens to two:
// a) all the tokens before `range`
// b) all the tokens after `range`
const startLineIndex = range.startLineNumber - this.startLineNumber;
const endLineIndex = range.endLineNumber - this.startLineNumber;
const [a, b, bDeltaLine] = this.tokens.split(startLineIndex, range.startColumn - 1, endLineIndex, range.endColumn - 1);
return [new MultilineTokens2(this.startLineNumber, a), new MultilineTokens2(this.startLineNumber + bDeltaLine, b)];
}
public applyEdit(range: IRange, text: string): void {
@@ -761,17 +872,91 @@ function toUint32Array(arr: Uint32Array | ArrayBuffer): Uint32Array {
export class TokensStore2 {
private _pieces: MultilineTokens2[];
private _isComplete: boolean;
constructor() {
this._pieces = [];
this._isComplete = false;
}
public flush(): void {
this._pieces = [];
this._isComplete = false;
}
public set(pieces: MultilineTokens2[] | null) {
public set(pieces: MultilineTokens2[] | null, isComplete: boolean): void {
this._pieces = pieces || [];
this._isComplete = isComplete;
}
public setPartial(_range: Range, pieces: MultilineTokens2[]): Range {
if (pieces.length === 0) {
return _range;
}
const _firstRange = pieces[0].getRange();
const _lastRange = pieces[pieces.length - 1].getRange();
if (!_firstRange || !_lastRange) {
return _range;
}
const range = _range.plusRange(_firstRange).plusRange(_lastRange);
let insertPosition: { index: number; } | null = null;
for (let i = 0, len = this._pieces.length; i < len; i++) {
const piece = this._pieces[i];
if (piece.endLineNumber < range.startLineNumber) {
// this piece is before the range
continue;
}
if (piece.startLineNumber > range.endLineNumber) {
// this piece is after the range, so mark the spot before this piece
// as a good insertion position and stop looping
insertPosition = insertPosition || { index: i };
break;
}
// this piece might intersect with the range
piece.removeTokens(range);
if (piece.isEmpty()) {
// remove the piece if it became empty
this._pieces.splice(i, 1);
i--;
len--;
continue;
}
if (piece.endLineNumber < range.startLineNumber) {
// after removal, this piece is before the range
continue;
}
if (piece.startLineNumber > range.endLineNumber) {
// after removal, this piece is after the range
insertPosition = insertPosition || { index: i };
continue;
}
// after removal, this piece contains the range
const [a, b] = piece.split(range);
this._pieces.splice(i, 1, a, b);
i++;
len++;
insertPosition = insertPosition || { index: i };
}
insertPosition = insertPosition || { index: this._pieces.length };
this._pieces = arrays.arrayInsert(this._pieces, insertPosition.index, pieces);
// console.log(`I HAVE ${this._pieces.length} pieces`);
// console.log(`${this._pieces.map(p => p.toString()).join(', ')}`);
return range;
}
public isComplete(): boolean {
return this._isComplete;
}
public addSemanticTokens(lineNumber: number, aTokens: LineTokens): LineTokens {
@@ -782,7 +967,7 @@ export class TokensStore2 {
}
const pieceIndex = TokensStore2._findFirstPieceWithLine(pieces, lineNumber);
const bTokens = this._pieces[pieceIndex].getLineTokens(lineNumber);
const bTokens = pieces[pieceIndex].getLineTokens(lineNumber);
if (!bTokens) {
return aTokens;

View File

@@ -4,6 +4,7 @@
*--------------------------------------------------------------------------------------------*/
import * as strings from 'vs/base/common/strings';
import * as stringBuilder from 'vs/editor/common/core/stringBuilder';
import { Range } from 'vs/editor/common/core/range';
import { LanguageIdentifier } from 'vs/editor/common/modes';
import { CharacterPair } from 'vs/editor/common/modes/languageConfiguration';
@@ -264,14 +265,24 @@ function createBracketOrRegExp(pieces: string[]): RegExp {
return strings.createRegExp(regexStr, true);
}
let toReversedString = (function () {
const toReversedString = (function () {
function reverse(str: string): string {
let reversedStr = '';
for (let i = str.length - 1; i >= 0; i--) {
reversedStr += str.charAt(i);
if (stringBuilder.hasTextDecoder) {
// create a Uint16Array and then use a TextDecoder to create a string
const arr = new Uint16Array(str.length);
let offset = 0;
for (let i = str.length - 1; i >= 0; i--) {
arr[offset++] = str.charCodeAt(i);
}
return stringBuilder.getPlatformTextDecoder().decode(arr);
} else {
let result: string[] = [], resultLen = 0;
for (let i = str.length - 1; i >= 0; i--) {
result[resultLen++] = str.charAt(i);
}
return result.join('');
}
return reversedStr;
}
let lastInput: string | null = null;

View File

@@ -8,9 +8,13 @@ import { URI } from 'vs/base/common/uri';
import { ITextBufferFactory, ITextModel, ITextModelCreationOptions } from 'vs/editor/common/model';
import { ILanguageSelection } from 'vs/editor/common/services/modeService';
import { createDecorator } from 'vs/platform/instantiation/common/instantiation';
import { DocumentSemanticTokensProvider, DocumentRangeSemanticTokensProvider } from 'vs/editor/common/modes';
import { SemanticTokensProviderStyling } from 'vs/editor/common/services/semanticTokensProviderStyling';
export const IModelService = createDecorator<IModelService>('modelService');
export type DocumentTokensProvider = DocumentSemanticTokensProvider | DocumentRangeSemanticTokensProvider;
export interface IModelService {
_serviceBrand: undefined;
@@ -28,6 +32,8 @@ export interface IModelService {
getModel(resource: URI): ITextModel | null;
getSemanticTokensProviderStyling(provider: DocumentTokensProvider): SemanticTokensProviderStyling;
onModelAdded: Event<ITextModel>;
onModelRemoved: Event<ITextModel>;

View File

@@ -12,26 +12,26 @@ import { URI } from 'vs/base/common/uri';
import { EDITOR_MODEL_DEFAULTS } from 'vs/editor/common/config/editorOptions';
import { EditOperation } from 'vs/editor/common/core/editOperation';
import { Range } from 'vs/editor/common/core/range';
import { DefaultEndOfLine, EndOfLinePreference, EndOfLineSequence, IIdentifiedSingleEditOperation, ITextBuffer, ITextBufferFactory, ITextModel, ITextModelCreationOptions, IValidEditOperation } from 'vs/editor/common/model';
import { DefaultEndOfLine, EndOfLinePreference, EndOfLineSequence, IIdentifiedSingleEditOperation, ITextBuffer, ITextBufferFactory, ITextModel, ITextModelCreationOptions } from 'vs/editor/common/model';
import { TextModel, createTextBuffer } from 'vs/editor/common/model/textModel';
import { IModelLanguageChangedEvent, IModelContentChangedEvent } from 'vs/editor/common/model/textModelEvents';
import { LanguageIdentifier, DocumentSemanticTokensProviderRegistry, DocumentSemanticTokensProvider, SemanticTokensLegend, SemanticTokens, SemanticTokensEdits, TokenMetadata, FontStyle, MetadataConsts } from 'vs/editor/common/modes';
import { LanguageIdentifier, DocumentSemanticTokensProviderRegistry, DocumentSemanticTokensProvider, SemanticTokens, SemanticTokensEdits } from 'vs/editor/common/modes';
import { PLAINTEXT_LANGUAGE_IDENTIFIER } from 'vs/editor/common/modes/modesRegistry';
import { ILanguageSelection } from 'vs/editor/common/services/modeService';
import { IModelService } from 'vs/editor/common/services/modelService';
import { IModelService, DocumentTokensProvider } from 'vs/editor/common/services/modelService';
import { ITextResourcePropertiesService } from 'vs/editor/common/services/textResourceConfigurationService';
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
import { RunOnceScheduler } from 'vs/base/common/async';
import { CancellationTokenSource } from 'vs/base/common/cancellation';
import { SparseEncodedTokens, MultilineTokens2 } from 'vs/editor/common/model/tokensStore';
import { IThemeService } from 'vs/platform/theme/common/themeService';
import { ILogService, LogLevel } from 'vs/platform/log/common/log';
import { ILogService } from 'vs/platform/log/common/log';
import { IUndoRedoService, IUndoRedoElement, IPastFutureElements } from 'vs/platform/undoRedo/common/undoRedo';
import { StringSHA1 } from 'vs/base/common/hash';
import { SingleModelEditStackElement, MultiModelEditStackElement, EditStackElement } from 'vs/editor/common/model/editStack';
import { IDialogService } from 'vs/platform/dialogs/common/dialogs';
import { Schemas } from 'vs/base/common/network';
import Severity from 'vs/base/common/severity';
import { SemanticTokensProviderStyling, toMultilineTokens2 } from 'vs/editor/common/services/semanticTokensProviderStyling';
export const MAINTAIN_UNDO_REDO_STACK = true;
@@ -171,6 +171,7 @@ export class ModelServiceImpl extends Disposable implements IModelService {
*/
private readonly _models: { [modelId: string]: ModelData; };
private readonly _disposedModels: Map<string, DisposedModelInfo>;
private readonly _semanticStyling: SemanticStyling;
constructor(
@IConfigurationService private readonly _configurationService: IConfigurationService,
@@ -184,11 +185,12 @@ export class ModelServiceImpl extends Disposable implements IModelService {
this._modelCreationOptionsByLanguageAndResource = Object.create(null);
this._models = {};
this._disposedModels = new Map<string, DisposedModelInfo>();
this._semanticStyling = this._register(new SemanticStyling(this._themeService, this._logService));
this._register(this._configurationService.onDidChangeConfiguration(e => this._updateModelOptions()));
this._register(this._configurationService.onDidChangeConfiguration(() => this._updateModelOptions()));
this._updateModelOptions();
this._register(new SemanticColoringFeature(this, this._themeService, this._configurationService, this._logService));
this._register(new SemanticColoringFeature(this, this._themeService, this._configurationService, this._semanticStyling));
}
private static _readModelOptions(config: IRawConfig, isForSimpleWidget: boolean): ITextModelCreationOptions {
@@ -380,7 +382,7 @@ export class ModelServiceImpl extends Disposable implements IModelService {
model.pushEditOperations(
[],
ModelServiceImpl._computeEdits(model, textBuffer),
(inverseEditOperations: IValidEditOperation[]) => []
() => []
);
model.pushStackElement();
}
@@ -542,6 +544,10 @@ export class ModelServiceImpl extends Disposable implements IModelService {
return modelData.model;
}
public getSemanticTokensProviderStyling(provider: DocumentTokensProvider): SemanticTokensProviderStyling {
return this._semanticStyling.get(provider);
}
// --- end IModelService
private _onWillDispose(model: ITextModel): void {
@@ -575,13 +581,13 @@ class SemanticColoringFeature extends Disposable {
private static readonly SETTING_ID = 'editor.semanticHighlighting';
private _watchers: Record<string, ModelSemanticColoring>;
private _semanticStyling: SemanticStyling;
private readonly _watchers: Record<string, ModelSemanticColoring>;
private readonly _semanticStyling: SemanticStyling;
constructor(modelService: IModelService, themeService: IThemeService, configurationService: IConfigurationService, logService: ILogService) {
constructor(modelService: IModelService, themeService: IThemeService, configurationService: IConfigurationService, semanticStyling: SemanticStyling) {
super();
this._watchers = Object.create(null);
this._semanticStyling = this._register(new SemanticStyling(themeService, logService));
this._semanticStyling = semanticStyling;
const isSemanticColoringEnabled = (model: ITextModel) => {
if (!themeService.getColorTheme().semanticHighlighting) {
@@ -633,204 +639,27 @@ class SemanticColoringFeature extends Disposable {
class SemanticStyling extends Disposable {
private _caches: WeakMap<DocumentSemanticTokensProvider, SemanticColoringProviderStyling>;
private _caches: WeakMap<DocumentTokensProvider, SemanticTokensProviderStyling>;
constructor(
private readonly _themeService: IThemeService,
private readonly _logService: ILogService
) {
super();
this._caches = new WeakMap<DocumentSemanticTokensProvider, SemanticColoringProviderStyling>();
this._caches = new WeakMap<DocumentTokensProvider, SemanticTokensProviderStyling>();
this._register(this._themeService.onDidColorThemeChange(() => {
this._caches = new WeakMap<DocumentSemanticTokensProvider, SemanticColoringProviderStyling>();
this._caches = new WeakMap<DocumentTokensProvider, SemanticTokensProviderStyling>();
}));
}
public get(provider: DocumentSemanticTokensProvider): SemanticColoringProviderStyling {
public get(provider: DocumentTokensProvider): SemanticTokensProviderStyling {
if (!this._caches.has(provider)) {
this._caches.set(provider, new SemanticColoringProviderStyling(provider.getLegend(), this._themeService, this._logService));
this._caches.set(provider, new SemanticTokensProviderStyling(provider.getLegend(), this._themeService, this._logService));
}
return this._caches.get(provider)!;
}
}
const enum Constants {
NO_STYLING = 0b01111111111111111111111111111111
}
class HashTableEntry {
public readonly tokenTypeIndex: number;
public readonly tokenModifierSet: number;
public readonly languageId: number;
public readonly metadata: number;
public next: HashTableEntry | null;
constructor(tokenTypeIndex: number, tokenModifierSet: number, languageId: number, metadata: number) {
this.tokenTypeIndex = tokenTypeIndex;
this.tokenModifierSet = tokenModifierSet;
this.languageId = languageId;
this.metadata = metadata;
this.next = null;
}
}
class HashTable {
private static _SIZES = [3, 7, 13, 31, 61, 127, 251, 509, 1021, 2039, 4093, 8191, 16381, 32749, 65521, 131071, 262139, 524287, 1048573, 2097143];
private _elementsCount: number;
private _currentLengthIndex: number;
private _currentLength: number;
private _growCount: number;
private _elements: (HashTableEntry | null)[];
constructor() {
this._elementsCount = 0;
this._currentLengthIndex = 0;
this._currentLength = HashTable._SIZES[this._currentLengthIndex];
this._growCount = Math.round(this._currentLengthIndex + 1 < HashTable._SIZES.length ? 2 / 3 * this._currentLength : 0);
this._elements = [];
HashTable._nullOutEntries(this._elements, this._currentLength);
}
private static _nullOutEntries(entries: (HashTableEntry | null)[], length: number): void {
for (let i = 0; i < length; i++) {
entries[i] = null;
}
}
private _hashFunc(tokenTypeIndex: number, tokenModifierSet: number, languageId: number): number {
const hash = (n1: number, n2: number) => (((n1 << 5) - n1) + n2) | 0; // n1 * 31 + n2, keep as int32
return hash(hash(tokenTypeIndex, tokenModifierSet), languageId) % this._currentLength;
}
public get(tokenTypeIndex: number, tokenModifierSet: number, languageId: number): HashTableEntry | null {
const hash = this._hashFunc(tokenTypeIndex, tokenModifierSet, languageId);
let p = this._elements[hash];
while (p) {
if (p.tokenTypeIndex === tokenTypeIndex && p.tokenModifierSet === tokenModifierSet && p.languageId === languageId) {
return p;
}
p = p.next;
}
return null;
}
public add(tokenTypeIndex: number, tokenModifierSet: number, languageId: number, metadata: number): void {
this._elementsCount++;
if (this._growCount !== 0 && this._elementsCount >= this._growCount) {
// expand!
const oldElements = this._elements;
this._currentLengthIndex++;
this._currentLength = HashTable._SIZES[this._currentLengthIndex];
this._growCount = Math.round(this._currentLengthIndex + 1 < HashTable._SIZES.length ? 2 / 3 * this._currentLength : 0);
this._elements = [];
HashTable._nullOutEntries(this._elements, this._currentLength);
for (const first of oldElements) {
let p = first;
while (p) {
const oldNext = p.next;
p.next = null;
this._add(p);
p = oldNext;
}
}
}
this._add(new HashTableEntry(tokenTypeIndex, tokenModifierSet, languageId, metadata));
}
private _add(element: HashTableEntry): void {
const hash = this._hashFunc(element.tokenTypeIndex, element.tokenModifierSet, element.languageId);
element.next = this._elements[hash];
this._elements[hash] = element;
}
}
class SemanticColoringProviderStyling {
private readonly _hashTable: HashTable;
constructor(
private readonly _legend: SemanticTokensLegend,
private readonly _themeService: IThemeService,
private readonly _logService: ILogService
) {
this._hashTable = new HashTable();
}
public getMetadata(tokenTypeIndex: number, tokenModifierSet: number, languageId: LanguageIdentifier): number {
const entry = this._hashTable.get(tokenTypeIndex, tokenModifierSet, languageId.id);
let metadata: number;
if (entry) {
metadata = entry.metadata;
} else {
const tokenType = this._legend.tokenTypes[tokenTypeIndex];
const tokenModifiers: string[] = [];
let modifierSet = tokenModifierSet;
for (let modifierIndex = 0; modifierSet > 0 && modifierIndex < this._legend.tokenModifiers.length; modifierIndex++) {
if (modifierSet & 1) {
tokenModifiers.push(this._legend.tokenModifiers[modifierIndex]);
}
modifierSet = modifierSet >> 1;
}
const tokenStyle = this._themeService.getColorTheme().getTokenStyleMetadata(tokenType, tokenModifiers, languageId.language);
if (typeof tokenStyle === 'undefined') {
metadata = Constants.NO_STYLING;
} else {
metadata = 0;
if (typeof tokenStyle.italic !== 'undefined') {
const italicBit = (tokenStyle.italic ? FontStyle.Italic : 0) << MetadataConsts.FONT_STYLE_OFFSET;
metadata |= italicBit | MetadataConsts.SEMANTIC_USE_ITALIC;
}
if (typeof tokenStyle.bold !== 'undefined') {
const boldBit = (tokenStyle.bold ? FontStyle.Bold : 0) << MetadataConsts.FONT_STYLE_OFFSET;
metadata |= boldBit | MetadataConsts.SEMANTIC_USE_BOLD;
}
if (typeof tokenStyle.underline !== 'undefined') {
const underlineBit = (tokenStyle.underline ? FontStyle.Underline : 0) << MetadataConsts.FONT_STYLE_OFFSET;
metadata |= underlineBit | MetadataConsts.SEMANTIC_USE_UNDERLINE;
}
if (tokenStyle.foreground) {
const foregroundBits = (tokenStyle.foreground) << MetadataConsts.FOREGROUND_OFFSET;
metadata |= foregroundBits | MetadataConsts.SEMANTIC_USE_FOREGROUND;
}
if (metadata === 0) {
// Nothing!
metadata = Constants.NO_STYLING;
}
}
this._hashTable.add(tokenTypeIndex, tokenModifierSet, languageId.id, metadata);
}
if (this._logService.getLevel() === LogLevel.Trace) {
const type = this._legend.tokenTypes[tokenTypeIndex];
const modifiers = tokenModifierSet ? ' ' + this._legend.tokenModifiers.filter((_, i) => tokenModifierSet & (1 << i)).join(' ') : '';
this._logService.trace(`tokenStyleMetadata ${entry ? '[CACHED] ' : ''}${type}${modifiers}: foreground ${TokenMetadata.getForeground(metadata)}, fontStyle ${TokenMetadata.getFontStyle(metadata).toString(2)}`);
}
return metadata;
}
}
const enum SemanticColoringConstants {
/**
* Let's aim at having 8KB buffers if possible...
* So that would be 8192 / (5 * 4) = 409.6 tokens per area
*/
DesiredTokensPerArea = 400,
/**
* Try to keep the total number of areas under 1024 if possible,
* simply compensate by having more tokens per area...
*/
DesiredMaxAreas = 1024,
}
class SemanticTokensResponse {
constructor(
private readonly _provider: DocumentSemanticTokensProvider,
@@ -848,10 +677,10 @@ class ModelSemanticColoring extends Disposable {
private _isDisposed: boolean;
private readonly _model: ITextModel;
private readonly _semanticStyling: SemanticStyling;
private readonly _fetchSemanticTokens: RunOnceScheduler;
private _currentResponse: SemanticTokensResponse | null;
private _currentRequestCancellationTokenSource: CancellationTokenSource | null;
private _providersChangeListeners: IDisposable[];
private readonly _fetchDocumentSemanticTokens: RunOnceScheduler;
private _currentDocumentResponse: SemanticTokensResponse | null;
private _currentDocumentRequestCancellationTokenSource: CancellationTokenSource | null;
private _documentProvidersChangeListeners: IDisposable[];
constructor(model: ITextModel, themeService: IThemeService, stylingProvider: SemanticStyling) {
super();
@@ -859,57 +688,57 @@ class ModelSemanticColoring extends Disposable {
this._isDisposed = false;
this._model = model;
this._semanticStyling = stylingProvider;
this._fetchSemanticTokens = this._register(new RunOnceScheduler(() => this._fetchSemanticTokensNow(), 300));
this._currentResponse = null;
this._currentRequestCancellationTokenSource = null;
this._providersChangeListeners = [];
this._fetchDocumentSemanticTokens = this._register(new RunOnceScheduler(() => this._fetchDocumentSemanticTokensNow(), 300));
this._currentDocumentResponse = null;
this._currentDocumentRequestCancellationTokenSource = null;
this._documentProvidersChangeListeners = [];
this._register(this._model.onDidChangeContent(e => {
if (!this._fetchSemanticTokens.isScheduled()) {
this._fetchSemanticTokens.schedule();
this._register(this._model.onDidChangeContent(() => {
if (!this._fetchDocumentSemanticTokens.isScheduled()) {
this._fetchDocumentSemanticTokens.schedule();
}
}));
const bindChangeListeners = () => {
dispose(this._providersChangeListeners);
this._providersChangeListeners = [];
const bindDocumentChangeListeners = () => {
dispose(this._documentProvidersChangeListeners);
this._documentProvidersChangeListeners = [];
for (const provider of DocumentSemanticTokensProviderRegistry.all(model)) {
if (typeof provider.onDidChange === 'function') {
this._providersChangeListeners.push(provider.onDidChange(() => this._fetchSemanticTokens.schedule(0)));
this._documentProvidersChangeListeners.push(provider.onDidChange(() => this._fetchDocumentSemanticTokens.schedule(0)));
}
}
};
bindChangeListeners();
this._register(DocumentSemanticTokensProviderRegistry.onDidChange(e => {
bindChangeListeners();
this._fetchSemanticTokens.schedule();
bindDocumentChangeListeners();
this._register(DocumentSemanticTokensProviderRegistry.onDidChange(() => {
bindDocumentChangeListeners();
this._fetchDocumentSemanticTokens.schedule();
}));
this._register(themeService.onDidColorThemeChange(_ => {
// clear out existing tokens
this._setSemanticTokens(null, null, null, []);
this._fetchSemanticTokens.schedule();
this._setDocumentSemanticTokens(null, null, null, []);
this._fetchDocumentSemanticTokens.schedule();
}));
this._fetchSemanticTokens.schedule(0);
this._fetchDocumentSemanticTokens.schedule(0);
}
public dispose(): void {
if (this._currentResponse) {
this._currentResponse.dispose();
this._currentResponse = null;
if (this._currentDocumentResponse) {
this._currentDocumentResponse.dispose();
this._currentDocumentResponse = null;
}
if (this._currentRequestCancellationTokenSource) {
this._currentRequestCancellationTokenSource.cancel();
this._currentRequestCancellationTokenSource = null;
if (this._currentDocumentRequestCancellationTokenSource) {
this._currentDocumentRequestCancellationTokenSource.cancel();
this._currentDocumentRequestCancellationTokenSource = null;
}
this._setSemanticTokens(null, null, null, []);
this._setDocumentSemanticTokens(null, null, null, []);
this._isDisposed = true;
super.dispose();
}
private _fetchSemanticTokensNow(): void {
if (this._currentRequestCancellationTokenSource) {
private _fetchDocumentSemanticTokensNow(): void {
if (this._currentDocumentRequestCancellationTokenSource) {
// there is already a request running, let it finish...
return;
}
@@ -917,7 +746,7 @@ class ModelSemanticColoring extends Disposable {
if (!provider) {
return;
}
this._currentRequestCancellationTokenSource = new CancellationTokenSource();
this._currentDocumentRequestCancellationTokenSource = new CancellationTokenSource();
const pendingChanges: IModelContentChangedEvent[] = [];
const contentChangeListener = this._model.onDidChangeContent((e) => {
@@ -926,13 +755,13 @@ class ModelSemanticColoring extends Disposable {
const styling = this._semanticStyling.get(provider);
const lastResultId = this._currentResponse ? this._currentResponse.resultId || null : null;
const request = Promise.resolve(provider.provideDocumentSemanticTokens(this._model, lastResultId, this._currentRequestCancellationTokenSource.token));
const lastResultId = this._currentDocumentResponse ? this._currentDocumentResponse.resultId || null : null;
const request = Promise.resolve(provider.provideDocumentSemanticTokens(this._model, lastResultId, this._currentDocumentRequestCancellationTokenSource.token));
request.then((res) => {
this._currentRequestCancellationTokenSource = null;
this._currentDocumentRequestCancellationTokenSource = null;
contentChangeListener.dispose();
this._setSemanticTokens(provider, res || null, styling, pendingChanges);
this._setDocumentSemanticTokens(provider, res || null, styling, pendingChanges);
}, (err) => {
if (!err || typeof err.message !== 'string' || err.message.indexOf('busy') === -1) {
errors.onUnexpectedError(err);
@@ -940,13 +769,13 @@ class ModelSemanticColoring extends Disposable {
// Semantic tokens eats up all errors and considers errors to mean that the result is temporarily not available
// The API does not have a special error kind to express this...
this._currentRequestCancellationTokenSource = null;
this._currentDocumentRequestCancellationTokenSource = null;
contentChangeListener.dispose();
if (pendingChanges.length > 0) {
// More changes occurred while the request was running
if (!this._fetchSemanticTokens.isScheduled()) {
this._fetchSemanticTokens.schedule();
if (!this._fetchDocumentSemanticTokens.isScheduled()) {
this._fetchDocumentSemanticTokens.schedule();
}
}
});
@@ -966,11 +795,11 @@ class ModelSemanticColoring extends Disposable {
}
}
private _setSemanticTokens(provider: DocumentSemanticTokensProvider | null, tokens: SemanticTokens | SemanticTokensEdits | null, styling: SemanticColoringProviderStyling | null, pendingChanges: IModelContentChangedEvent[]): void {
const currentResponse = this._currentResponse;
if (this._currentResponse) {
this._currentResponse.dispose();
this._currentResponse = null;
private _setDocumentSemanticTokens(provider: DocumentSemanticTokensProvider | null, tokens: SemanticTokens | SemanticTokensEdits | null, styling: SemanticTokensProviderStyling | null, pendingChanges: IModelContentChangedEvent[]): void {
const currentResponse = this._currentDocumentResponse;
if (this._currentDocumentResponse) {
this._currentDocumentResponse.dispose();
this._currentDocumentResponse = null;
}
if (this._isDisposed) {
// disposed!
@@ -979,15 +808,19 @@ class ModelSemanticColoring extends Disposable {
}
return;
}
if (!provider || !tokens || !styling) {
this._model.setSemanticTokens(null);
if (!provider || !styling) {
this._model.setSemanticTokens(null, false);
return;
}
if (!tokens) {
this._model.setSemanticTokens(null, true);
return;
}
if (ModelSemanticColoring._isSemanticTokensEdits(tokens)) {
if (!currentResponse) {
// not possible!
this._model.setSemanticTokens(null);
this._model.setSemanticTokens(null, true);
return;
}
if (tokens.edits.length === 0) {
@@ -1037,80 +870,9 @@ class ModelSemanticColoring extends Disposable {
if (ModelSemanticColoring._isSemanticTokens(tokens)) {
this._currentResponse = new SemanticTokensResponse(provider, tokens.resultId, tokens.data);
this._currentDocumentResponse = new SemanticTokensResponse(provider, tokens.resultId, tokens.data);
const srcData = tokens.data;
const tokenCount = (tokens.data.length / 5) | 0;
const tokensPerArea = Math.max(Math.ceil(tokenCount / SemanticColoringConstants.DesiredMaxAreas), SemanticColoringConstants.DesiredTokensPerArea);
const result: MultilineTokens2[] = [];
const languageId = this._model.getLanguageIdentifier();
let tokenIndex = 0;
let lastLineNumber = 1;
let lastStartCharacter = 0;
while (tokenIndex < tokenCount) {
const tokenStartIndex = tokenIndex;
let tokenEndIndex = Math.min(tokenStartIndex + tokensPerArea, tokenCount);
// Keep tokens on the same line in the same area...
if (tokenEndIndex < tokenCount) {
let smallTokenEndIndex = tokenEndIndex;
while (smallTokenEndIndex - 1 > tokenStartIndex && srcData[5 * smallTokenEndIndex] === 0) {
smallTokenEndIndex--;
}
if (smallTokenEndIndex - 1 === tokenStartIndex) {
// there are so many tokens on this line that our area would be empty, we must now go right
let bigTokenEndIndex = tokenEndIndex;
while (bigTokenEndIndex + 1 < tokenCount && srcData[5 * bigTokenEndIndex] === 0) {
bigTokenEndIndex++;
}
tokenEndIndex = bigTokenEndIndex;
} else {
tokenEndIndex = smallTokenEndIndex;
}
}
let destData = new Uint32Array((tokenEndIndex - tokenStartIndex) * 4);
let destOffset = 0;
let areaLine = 0;
while (tokenIndex < tokenEndIndex) {
const srcOffset = 5 * tokenIndex;
const deltaLine = srcData[srcOffset];
const deltaCharacter = srcData[srcOffset + 1];
const lineNumber = lastLineNumber + deltaLine;
const startCharacter = (deltaLine === 0 ? lastStartCharacter + deltaCharacter : deltaCharacter);
const length = srcData[srcOffset + 2];
const tokenTypeIndex = srcData[srcOffset + 3];
const tokenModifierSet = srcData[srcOffset + 4];
const metadata = styling.getMetadata(tokenTypeIndex, tokenModifierSet, languageId);
if (metadata !== Constants.NO_STYLING) {
if (areaLine === 0) {
areaLine = lineNumber;
}
destData[destOffset] = lineNumber - areaLine;
destData[destOffset + 1] = startCharacter;
destData[destOffset + 2] = startCharacter + length;
destData[destOffset + 3] = metadata;
destOffset += 4;
}
lastLineNumber = lineNumber;
lastStartCharacter = startCharacter;
tokenIndex++;
}
if (destOffset !== destData.length) {
destData = destData.subarray(0, destOffset);
}
const tokens = new MultilineTokens2(areaLine, new SparseEncodedTokens(destData));
result.push(tokens);
}
const result = toMultilineTokens2(tokens, styling, this._model.getLanguageIdentifier());
// Adjust incoming semantic tokens
if (pendingChanges.length > 0) {
@@ -1126,16 +888,16 @@ class ModelSemanticColoring extends Disposable {
}
}
if (!this._fetchSemanticTokens.isScheduled()) {
this._fetchSemanticTokens.schedule();
if (!this._fetchDocumentSemanticTokens.isScheduled()) {
this._fetchDocumentSemanticTokens.schedule();
}
}
this._model.setSemanticTokens(result);
this._model.setSemanticTokens(result, true);
return;
}
this._model.setSemanticTokens(null);
this._model.setSemanticTokens(null, true);
}
private _getSemanticColoringProvider(): DocumentSemanticTokensProvider | null {

View File

@@ -0,0 +1,261 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { SemanticTokensLegend, TokenMetadata, FontStyle, MetadataConsts, SemanticTokens, LanguageIdentifier } from 'vs/editor/common/modes';
import { IThemeService } from 'vs/platform/theme/common/themeService';
import { ILogService, LogLevel } from 'vs/platform/log/common/log';
import { MultilineTokens2, SparseEncodedTokens } from 'vs/editor/common/model/tokensStore';
export const enum SemanticTokensProviderStylingConstants {
NO_STYLING = 0b01111111111111111111111111111111
}
export class SemanticTokensProviderStyling {
private readonly _hashTable: HashTable;
constructor(
private readonly _legend: SemanticTokensLegend,
private readonly _themeService: IThemeService,
private readonly _logService: ILogService
) {
this._hashTable = new HashTable();
}
public getMetadata(tokenTypeIndex: number, tokenModifierSet: number, languageId: LanguageIdentifier): number {
const entry = this._hashTable.get(tokenTypeIndex, tokenModifierSet, languageId.id);
let metadata: number;
if (entry) {
metadata = entry.metadata;
} else {
const tokenType = this._legend.tokenTypes[tokenTypeIndex];
const tokenModifiers: string[] = [];
let modifierSet = tokenModifierSet;
for (let modifierIndex = 0; modifierSet > 0 && modifierIndex < this._legend.tokenModifiers.length; modifierIndex++) {
if (modifierSet & 1) {
tokenModifiers.push(this._legend.tokenModifiers[modifierIndex]);
}
modifierSet = modifierSet >> 1;
}
const tokenStyle = this._themeService.getColorTheme().getTokenStyleMetadata(tokenType, tokenModifiers, languageId.language);
if (typeof tokenStyle === 'undefined') {
metadata = SemanticTokensProviderStylingConstants.NO_STYLING;
} else {
metadata = 0;
if (typeof tokenStyle.italic !== 'undefined') {
const italicBit = (tokenStyle.italic ? FontStyle.Italic : 0) << MetadataConsts.FONT_STYLE_OFFSET;
metadata |= italicBit | MetadataConsts.SEMANTIC_USE_ITALIC;
}
if (typeof tokenStyle.bold !== 'undefined') {
const boldBit = (tokenStyle.bold ? FontStyle.Bold : 0) << MetadataConsts.FONT_STYLE_OFFSET;
metadata |= boldBit | MetadataConsts.SEMANTIC_USE_BOLD;
}
if (typeof tokenStyle.underline !== 'undefined') {
const underlineBit = (tokenStyle.underline ? FontStyle.Underline : 0) << MetadataConsts.FONT_STYLE_OFFSET;
metadata |= underlineBit | MetadataConsts.SEMANTIC_USE_UNDERLINE;
}
if (tokenStyle.foreground) {
const foregroundBits = (tokenStyle.foreground) << MetadataConsts.FOREGROUND_OFFSET;
metadata |= foregroundBits | MetadataConsts.SEMANTIC_USE_FOREGROUND;
}
if (metadata === 0) {
// Nothing!
metadata = SemanticTokensProviderStylingConstants.NO_STYLING;
}
}
this._hashTable.add(tokenTypeIndex, tokenModifierSet, languageId.id, metadata);
}
if (this._logService.getLevel() === LogLevel.Trace) {
const type = this._legend.tokenTypes[tokenTypeIndex];
const modifiers = tokenModifierSet ? ' ' + this._legend.tokenModifiers.filter((_, i) => tokenModifierSet & (1 << i)).join(' ') : '';
this._logService.trace(`tokenStyleMetadata ${entry ? '[CACHED] ' : ''}${type}${modifiers}: foreground ${TokenMetadata.getForeground(metadata)}, fontStyle ${TokenMetadata.getFontStyle(metadata).toString(2)}`);
}
return metadata;
}
}
const enum SemanticColoringConstants {
/**
* Let's aim at having 8KB buffers if possible...
* So that would be 8192 / (5 * 4) = 409.6 tokens per area
*/
DesiredTokensPerArea = 400,
/**
* Try to keep the total number of areas under 1024 if possible,
* simply compensate by having more tokens per area...
*/
DesiredMaxAreas = 1024,
}
export function toMultilineTokens2(tokens: SemanticTokens, styling: SemanticTokensProviderStyling, languageId: LanguageIdentifier): MultilineTokens2[] {
const srcData = tokens.data;
const tokenCount = (tokens.data.length / 5) | 0;
const tokensPerArea = Math.max(Math.ceil(tokenCount / SemanticColoringConstants.DesiredMaxAreas), SemanticColoringConstants.DesiredTokensPerArea);
const result: MultilineTokens2[] = [];
let tokenIndex = 0;
let lastLineNumber = 1;
let lastStartCharacter = 0;
while (tokenIndex < tokenCount) {
const tokenStartIndex = tokenIndex;
let tokenEndIndex = Math.min(tokenStartIndex + tokensPerArea, tokenCount);
// Keep tokens on the same line in the same area...
if (tokenEndIndex < tokenCount) {
let smallTokenEndIndex = tokenEndIndex;
while (smallTokenEndIndex - 1 > tokenStartIndex && srcData[5 * smallTokenEndIndex] === 0) {
smallTokenEndIndex--;
}
if (smallTokenEndIndex - 1 === tokenStartIndex) {
// there are so many tokens on this line that our area would be empty, we must now go right
let bigTokenEndIndex = tokenEndIndex;
while (bigTokenEndIndex + 1 < tokenCount && srcData[5 * bigTokenEndIndex] === 0) {
bigTokenEndIndex++;
}
tokenEndIndex = bigTokenEndIndex;
} else {
tokenEndIndex = smallTokenEndIndex;
}
}
let destData = new Uint32Array((tokenEndIndex - tokenStartIndex) * 4);
let destOffset = 0;
let areaLine = 0;
while (tokenIndex < tokenEndIndex) {
const srcOffset = 5 * tokenIndex;
const deltaLine = srcData[srcOffset];
const deltaCharacter = srcData[srcOffset + 1];
const lineNumber = lastLineNumber + deltaLine;
const startCharacter = (deltaLine === 0 ? lastStartCharacter + deltaCharacter : deltaCharacter);
const length = srcData[srcOffset + 2];
const tokenTypeIndex = srcData[srcOffset + 3];
const tokenModifierSet = srcData[srcOffset + 4];
const metadata = styling.getMetadata(tokenTypeIndex, tokenModifierSet, languageId);
if (metadata !== SemanticTokensProviderStylingConstants.NO_STYLING) {
if (areaLine === 0) {
areaLine = lineNumber;
}
destData[destOffset] = lineNumber - areaLine;
destData[destOffset + 1] = startCharacter;
destData[destOffset + 2] = startCharacter + length;
destData[destOffset + 3] = metadata;
destOffset += 4;
}
lastLineNumber = lineNumber;
lastStartCharacter = startCharacter;
tokenIndex++;
}
if (destOffset !== destData.length) {
destData = destData.subarray(0, destOffset);
}
const tokens = new MultilineTokens2(areaLine, new SparseEncodedTokens(destData));
result.push(tokens);
}
return result;
}
class HashTableEntry {
public readonly tokenTypeIndex: number;
public readonly tokenModifierSet: number;
public readonly languageId: number;
public readonly metadata: number;
public next: HashTableEntry | null;
constructor(tokenTypeIndex: number, tokenModifierSet: number, languageId: number, metadata: number) {
this.tokenTypeIndex = tokenTypeIndex;
this.tokenModifierSet = tokenModifierSet;
this.languageId = languageId;
this.metadata = metadata;
this.next = null;
}
}
class HashTable {
private static _SIZES = [3, 7, 13, 31, 61, 127, 251, 509, 1021, 2039, 4093, 8191, 16381, 32749, 65521, 131071, 262139, 524287, 1048573, 2097143];
private _elementsCount: number;
private _currentLengthIndex: number;
private _currentLength: number;
private _growCount: number;
private _elements: (HashTableEntry | null)[];
constructor() {
this._elementsCount = 0;
this._currentLengthIndex = 0;
this._currentLength = HashTable._SIZES[this._currentLengthIndex];
this._growCount = Math.round(this._currentLengthIndex + 1 < HashTable._SIZES.length ? 2 / 3 * this._currentLength : 0);
this._elements = [];
HashTable._nullOutEntries(this._elements, this._currentLength);
}
private static _nullOutEntries(entries: (HashTableEntry | null)[], length: number): void {
for (let i = 0; i < length; i++) {
entries[i] = null;
}
}
private _hash2(n1: number, n2: number): number {
return (((n1 << 5) - n1) + n2) | 0; // n1 * 31 + n2, keep as int32
}
private _hashFunc(tokenTypeIndex: number, tokenModifierSet: number, languageId: number): number {
return this._hash2(this._hash2(tokenTypeIndex, tokenModifierSet), languageId) % this._currentLength;
}
public get(tokenTypeIndex: number, tokenModifierSet: number, languageId: number): HashTableEntry | null {
const hash = this._hashFunc(tokenTypeIndex, tokenModifierSet, languageId);
let p = this._elements[hash];
while (p) {
if (p.tokenTypeIndex === tokenTypeIndex && p.tokenModifierSet === tokenModifierSet && p.languageId === languageId) {
return p;
}
p = p.next;
}
return null;
}
public add(tokenTypeIndex: number, tokenModifierSet: number, languageId: number, metadata: number): void {
this._elementsCount++;
if (this._growCount !== 0 && this._elementsCount >= this._growCount) {
// expand!
const oldElements = this._elements;
this._currentLengthIndex++;
this._currentLength = HashTable._SIZES[this._currentLengthIndex];
this._growCount = Math.round(this._currentLengthIndex + 1 < HashTable._SIZES.length ? 2 / 3 * this._currentLength : 0);
this._elements = [];
HashTable._nullOutEntries(this._elements, this._currentLength);
for (const first of oldElements) {
let p = first;
while (p) {
const oldNext = p.next;
p.next = null;
this._add(p);
p = oldNext;
}
}
}
this._add(new HashTableEntry(tokenTypeIndex, tokenModifierSet, languageId, metadata));
}
private _add(element: HashTableEntry): void {
const hash = this._hashFunc(element.tokenTypeIndex, element.tokenModifierSet, element.languageId);
element.next = this._elements[hash];
this._elements[hash] = element;
}
}

View File

@@ -813,7 +813,11 @@ function _renderLine(input: ResolvedRenderLineInput, sb: IStringBuilder): Render
let prevPartContentCnt = 0;
let partAbsoluteOffset = 0;
sb.appendASCIIString('<span>');
if (containsRTL) {
sb.appendASCIIString('<span dir="ltr">');
} else {
sb.appendASCIIString('<span>');
}
for (let partIndex = 0, tokensLen = parts.length; partIndex < tokensLen; partIndex++) {
partAbsoluteOffset += prevPartContentCnt;
@@ -890,9 +894,6 @@ function _renderLine(input: ResolvedRenderLineInput, sb: IStringBuilder): Render
let partContentCnt = 0;
if (containsRTL) {
sb.appendASCIIString(' dir="ltr"');
}
sb.appendASCII(CharCode.GreaterThan);
for (; charIndex < partEndIndex; charIndex++) {