mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 10:58:30 -05:00
Merge from vscode 2f984aad710215f4e4684a035bb02f55d1a9e2cc (#9819)
This commit is contained in:
@@ -8,9 +8,13 @@ import { URI } from 'vs/base/common/uri';
|
||||
import { ITextBufferFactory, ITextModel, ITextModelCreationOptions } from 'vs/editor/common/model';
|
||||
import { ILanguageSelection } from 'vs/editor/common/services/modeService';
|
||||
import { createDecorator } from 'vs/platform/instantiation/common/instantiation';
|
||||
import { DocumentSemanticTokensProvider, DocumentRangeSemanticTokensProvider } from 'vs/editor/common/modes';
|
||||
import { SemanticTokensProviderStyling } from 'vs/editor/common/services/semanticTokensProviderStyling';
|
||||
|
||||
export const IModelService = createDecorator<IModelService>('modelService');
|
||||
|
||||
export type DocumentTokensProvider = DocumentSemanticTokensProvider | DocumentRangeSemanticTokensProvider;
|
||||
|
||||
export interface IModelService {
|
||||
_serviceBrand: undefined;
|
||||
|
||||
@@ -28,6 +32,8 @@ export interface IModelService {
|
||||
|
||||
getModel(resource: URI): ITextModel | null;
|
||||
|
||||
getSemanticTokensProviderStyling(provider: DocumentTokensProvider): SemanticTokensProviderStyling;
|
||||
|
||||
onModelAdded: Event<ITextModel>;
|
||||
|
||||
onModelRemoved: Event<ITextModel>;
|
||||
|
||||
@@ -12,26 +12,26 @@ import { URI } from 'vs/base/common/uri';
|
||||
import { EDITOR_MODEL_DEFAULTS } from 'vs/editor/common/config/editorOptions';
|
||||
import { EditOperation } from 'vs/editor/common/core/editOperation';
|
||||
import { Range } from 'vs/editor/common/core/range';
|
||||
import { DefaultEndOfLine, EndOfLinePreference, EndOfLineSequence, IIdentifiedSingleEditOperation, ITextBuffer, ITextBufferFactory, ITextModel, ITextModelCreationOptions, IValidEditOperation } from 'vs/editor/common/model';
|
||||
import { DefaultEndOfLine, EndOfLinePreference, EndOfLineSequence, IIdentifiedSingleEditOperation, ITextBuffer, ITextBufferFactory, ITextModel, ITextModelCreationOptions } from 'vs/editor/common/model';
|
||||
import { TextModel, createTextBuffer } from 'vs/editor/common/model/textModel';
|
||||
import { IModelLanguageChangedEvent, IModelContentChangedEvent } from 'vs/editor/common/model/textModelEvents';
|
||||
import { LanguageIdentifier, DocumentSemanticTokensProviderRegistry, DocumentSemanticTokensProvider, SemanticTokensLegend, SemanticTokens, SemanticTokensEdits, TokenMetadata, FontStyle, MetadataConsts } from 'vs/editor/common/modes';
|
||||
import { LanguageIdentifier, DocumentSemanticTokensProviderRegistry, DocumentSemanticTokensProvider, SemanticTokens, SemanticTokensEdits } from 'vs/editor/common/modes';
|
||||
import { PLAINTEXT_LANGUAGE_IDENTIFIER } from 'vs/editor/common/modes/modesRegistry';
|
||||
import { ILanguageSelection } from 'vs/editor/common/services/modeService';
|
||||
import { IModelService } from 'vs/editor/common/services/modelService';
|
||||
import { IModelService, DocumentTokensProvider } from 'vs/editor/common/services/modelService';
|
||||
import { ITextResourcePropertiesService } from 'vs/editor/common/services/textResourceConfigurationService';
|
||||
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
|
||||
import { RunOnceScheduler } from 'vs/base/common/async';
|
||||
import { CancellationTokenSource } from 'vs/base/common/cancellation';
|
||||
import { SparseEncodedTokens, MultilineTokens2 } from 'vs/editor/common/model/tokensStore';
|
||||
import { IThemeService } from 'vs/platform/theme/common/themeService';
|
||||
import { ILogService, LogLevel } from 'vs/platform/log/common/log';
|
||||
import { ILogService } from 'vs/platform/log/common/log';
|
||||
import { IUndoRedoService, IUndoRedoElement, IPastFutureElements } from 'vs/platform/undoRedo/common/undoRedo';
|
||||
import { StringSHA1 } from 'vs/base/common/hash';
|
||||
import { SingleModelEditStackElement, MultiModelEditStackElement, EditStackElement } from 'vs/editor/common/model/editStack';
|
||||
import { IDialogService } from 'vs/platform/dialogs/common/dialogs';
|
||||
import { Schemas } from 'vs/base/common/network';
|
||||
import Severity from 'vs/base/common/severity';
|
||||
import { SemanticTokensProviderStyling, toMultilineTokens2 } from 'vs/editor/common/services/semanticTokensProviderStyling';
|
||||
|
||||
export const MAINTAIN_UNDO_REDO_STACK = true;
|
||||
|
||||
@@ -171,6 +171,7 @@ export class ModelServiceImpl extends Disposable implements IModelService {
|
||||
*/
|
||||
private readonly _models: { [modelId: string]: ModelData; };
|
||||
private readonly _disposedModels: Map<string, DisposedModelInfo>;
|
||||
private readonly _semanticStyling: SemanticStyling;
|
||||
|
||||
constructor(
|
||||
@IConfigurationService private readonly _configurationService: IConfigurationService,
|
||||
@@ -184,11 +185,12 @@ export class ModelServiceImpl extends Disposable implements IModelService {
|
||||
this._modelCreationOptionsByLanguageAndResource = Object.create(null);
|
||||
this._models = {};
|
||||
this._disposedModels = new Map<string, DisposedModelInfo>();
|
||||
this._semanticStyling = this._register(new SemanticStyling(this._themeService, this._logService));
|
||||
|
||||
this._register(this._configurationService.onDidChangeConfiguration(e => this._updateModelOptions()));
|
||||
this._register(this._configurationService.onDidChangeConfiguration(() => this._updateModelOptions()));
|
||||
this._updateModelOptions();
|
||||
|
||||
this._register(new SemanticColoringFeature(this, this._themeService, this._configurationService, this._logService));
|
||||
this._register(new SemanticColoringFeature(this, this._themeService, this._configurationService, this._semanticStyling));
|
||||
}
|
||||
|
||||
private static _readModelOptions(config: IRawConfig, isForSimpleWidget: boolean): ITextModelCreationOptions {
|
||||
@@ -380,7 +382,7 @@ export class ModelServiceImpl extends Disposable implements IModelService {
|
||||
model.pushEditOperations(
|
||||
[],
|
||||
ModelServiceImpl._computeEdits(model, textBuffer),
|
||||
(inverseEditOperations: IValidEditOperation[]) => []
|
||||
() => []
|
||||
);
|
||||
model.pushStackElement();
|
||||
}
|
||||
@@ -542,6 +544,10 @@ export class ModelServiceImpl extends Disposable implements IModelService {
|
||||
return modelData.model;
|
||||
}
|
||||
|
||||
public getSemanticTokensProviderStyling(provider: DocumentTokensProvider): SemanticTokensProviderStyling {
|
||||
return this._semanticStyling.get(provider);
|
||||
}
|
||||
|
||||
// --- end IModelService
|
||||
|
||||
private _onWillDispose(model: ITextModel): void {
|
||||
@@ -575,13 +581,13 @@ class SemanticColoringFeature extends Disposable {
|
||||
|
||||
private static readonly SETTING_ID = 'editor.semanticHighlighting';
|
||||
|
||||
private _watchers: Record<string, ModelSemanticColoring>;
|
||||
private _semanticStyling: SemanticStyling;
|
||||
private readonly _watchers: Record<string, ModelSemanticColoring>;
|
||||
private readonly _semanticStyling: SemanticStyling;
|
||||
|
||||
constructor(modelService: IModelService, themeService: IThemeService, configurationService: IConfigurationService, logService: ILogService) {
|
||||
constructor(modelService: IModelService, themeService: IThemeService, configurationService: IConfigurationService, semanticStyling: SemanticStyling) {
|
||||
super();
|
||||
this._watchers = Object.create(null);
|
||||
this._semanticStyling = this._register(new SemanticStyling(themeService, logService));
|
||||
this._semanticStyling = semanticStyling;
|
||||
|
||||
const isSemanticColoringEnabled = (model: ITextModel) => {
|
||||
if (!themeService.getColorTheme().semanticHighlighting) {
|
||||
@@ -633,204 +639,27 @@ class SemanticColoringFeature extends Disposable {
|
||||
|
||||
class SemanticStyling extends Disposable {
|
||||
|
||||
private _caches: WeakMap<DocumentSemanticTokensProvider, SemanticColoringProviderStyling>;
|
||||
private _caches: WeakMap<DocumentTokensProvider, SemanticTokensProviderStyling>;
|
||||
|
||||
constructor(
|
||||
private readonly _themeService: IThemeService,
|
||||
private readonly _logService: ILogService
|
||||
) {
|
||||
super();
|
||||
this._caches = new WeakMap<DocumentSemanticTokensProvider, SemanticColoringProviderStyling>();
|
||||
this._caches = new WeakMap<DocumentTokensProvider, SemanticTokensProviderStyling>();
|
||||
this._register(this._themeService.onDidColorThemeChange(() => {
|
||||
this._caches = new WeakMap<DocumentSemanticTokensProvider, SemanticColoringProviderStyling>();
|
||||
this._caches = new WeakMap<DocumentTokensProvider, SemanticTokensProviderStyling>();
|
||||
}));
|
||||
}
|
||||
|
||||
public get(provider: DocumentSemanticTokensProvider): SemanticColoringProviderStyling {
|
||||
public get(provider: DocumentTokensProvider): SemanticTokensProviderStyling {
|
||||
if (!this._caches.has(provider)) {
|
||||
this._caches.set(provider, new SemanticColoringProviderStyling(provider.getLegend(), this._themeService, this._logService));
|
||||
this._caches.set(provider, new SemanticTokensProviderStyling(provider.getLegend(), this._themeService, this._logService));
|
||||
}
|
||||
return this._caches.get(provider)!;
|
||||
}
|
||||
}
|
||||
|
||||
const enum Constants {
|
||||
NO_STYLING = 0b01111111111111111111111111111111
|
||||
}
|
||||
|
||||
class HashTableEntry {
|
||||
public readonly tokenTypeIndex: number;
|
||||
public readonly tokenModifierSet: number;
|
||||
public readonly languageId: number;
|
||||
public readonly metadata: number;
|
||||
public next: HashTableEntry | null;
|
||||
|
||||
constructor(tokenTypeIndex: number, tokenModifierSet: number, languageId: number, metadata: number) {
|
||||
this.tokenTypeIndex = tokenTypeIndex;
|
||||
this.tokenModifierSet = tokenModifierSet;
|
||||
this.languageId = languageId;
|
||||
this.metadata = metadata;
|
||||
this.next = null;
|
||||
}
|
||||
}
|
||||
|
||||
class HashTable {
|
||||
|
||||
private static _SIZES = [3, 7, 13, 31, 61, 127, 251, 509, 1021, 2039, 4093, 8191, 16381, 32749, 65521, 131071, 262139, 524287, 1048573, 2097143];
|
||||
|
||||
private _elementsCount: number;
|
||||
private _currentLengthIndex: number;
|
||||
private _currentLength: number;
|
||||
private _growCount: number;
|
||||
private _elements: (HashTableEntry | null)[];
|
||||
|
||||
constructor() {
|
||||
this._elementsCount = 0;
|
||||
this._currentLengthIndex = 0;
|
||||
this._currentLength = HashTable._SIZES[this._currentLengthIndex];
|
||||
this._growCount = Math.round(this._currentLengthIndex + 1 < HashTable._SIZES.length ? 2 / 3 * this._currentLength : 0);
|
||||
this._elements = [];
|
||||
HashTable._nullOutEntries(this._elements, this._currentLength);
|
||||
}
|
||||
|
||||
private static _nullOutEntries(entries: (HashTableEntry | null)[], length: number): void {
|
||||
for (let i = 0; i < length; i++) {
|
||||
entries[i] = null;
|
||||
}
|
||||
}
|
||||
|
||||
private _hashFunc(tokenTypeIndex: number, tokenModifierSet: number, languageId: number): number {
|
||||
const hash = (n1: number, n2: number) => (((n1 << 5) - n1) + n2) | 0; // n1 * 31 + n2, keep as int32
|
||||
return hash(hash(tokenTypeIndex, tokenModifierSet), languageId) % this._currentLength;
|
||||
}
|
||||
|
||||
public get(tokenTypeIndex: number, tokenModifierSet: number, languageId: number): HashTableEntry | null {
|
||||
const hash = this._hashFunc(tokenTypeIndex, tokenModifierSet, languageId);
|
||||
|
||||
let p = this._elements[hash];
|
||||
while (p) {
|
||||
if (p.tokenTypeIndex === tokenTypeIndex && p.tokenModifierSet === tokenModifierSet && p.languageId === languageId) {
|
||||
return p;
|
||||
}
|
||||
p = p.next;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public add(tokenTypeIndex: number, tokenModifierSet: number, languageId: number, metadata: number): void {
|
||||
this._elementsCount++;
|
||||
if (this._growCount !== 0 && this._elementsCount >= this._growCount) {
|
||||
// expand!
|
||||
const oldElements = this._elements;
|
||||
|
||||
this._currentLengthIndex++;
|
||||
this._currentLength = HashTable._SIZES[this._currentLengthIndex];
|
||||
this._growCount = Math.round(this._currentLengthIndex + 1 < HashTable._SIZES.length ? 2 / 3 * this._currentLength : 0);
|
||||
this._elements = [];
|
||||
HashTable._nullOutEntries(this._elements, this._currentLength);
|
||||
|
||||
for (const first of oldElements) {
|
||||
let p = first;
|
||||
while (p) {
|
||||
const oldNext = p.next;
|
||||
p.next = null;
|
||||
this._add(p);
|
||||
p = oldNext;
|
||||
}
|
||||
}
|
||||
}
|
||||
this._add(new HashTableEntry(tokenTypeIndex, tokenModifierSet, languageId, metadata));
|
||||
}
|
||||
|
||||
private _add(element: HashTableEntry): void {
|
||||
const hash = this._hashFunc(element.tokenTypeIndex, element.tokenModifierSet, element.languageId);
|
||||
element.next = this._elements[hash];
|
||||
this._elements[hash] = element;
|
||||
}
|
||||
}
|
||||
|
||||
class SemanticColoringProviderStyling {
|
||||
|
||||
private readonly _hashTable: HashTable;
|
||||
|
||||
constructor(
|
||||
private readonly _legend: SemanticTokensLegend,
|
||||
private readonly _themeService: IThemeService,
|
||||
private readonly _logService: ILogService
|
||||
) {
|
||||
this._hashTable = new HashTable();
|
||||
}
|
||||
|
||||
public getMetadata(tokenTypeIndex: number, tokenModifierSet: number, languageId: LanguageIdentifier): number {
|
||||
const entry = this._hashTable.get(tokenTypeIndex, tokenModifierSet, languageId.id);
|
||||
let metadata: number;
|
||||
if (entry) {
|
||||
metadata = entry.metadata;
|
||||
} else {
|
||||
const tokenType = this._legend.tokenTypes[tokenTypeIndex];
|
||||
const tokenModifiers: string[] = [];
|
||||
let modifierSet = tokenModifierSet;
|
||||
for (let modifierIndex = 0; modifierSet > 0 && modifierIndex < this._legend.tokenModifiers.length; modifierIndex++) {
|
||||
if (modifierSet & 1) {
|
||||
tokenModifiers.push(this._legend.tokenModifiers[modifierIndex]);
|
||||
}
|
||||
modifierSet = modifierSet >> 1;
|
||||
}
|
||||
|
||||
const tokenStyle = this._themeService.getColorTheme().getTokenStyleMetadata(tokenType, tokenModifiers, languageId.language);
|
||||
if (typeof tokenStyle === 'undefined') {
|
||||
metadata = Constants.NO_STYLING;
|
||||
} else {
|
||||
metadata = 0;
|
||||
if (typeof tokenStyle.italic !== 'undefined') {
|
||||
const italicBit = (tokenStyle.italic ? FontStyle.Italic : 0) << MetadataConsts.FONT_STYLE_OFFSET;
|
||||
metadata |= italicBit | MetadataConsts.SEMANTIC_USE_ITALIC;
|
||||
}
|
||||
if (typeof tokenStyle.bold !== 'undefined') {
|
||||
const boldBit = (tokenStyle.bold ? FontStyle.Bold : 0) << MetadataConsts.FONT_STYLE_OFFSET;
|
||||
metadata |= boldBit | MetadataConsts.SEMANTIC_USE_BOLD;
|
||||
}
|
||||
if (typeof tokenStyle.underline !== 'undefined') {
|
||||
const underlineBit = (tokenStyle.underline ? FontStyle.Underline : 0) << MetadataConsts.FONT_STYLE_OFFSET;
|
||||
metadata |= underlineBit | MetadataConsts.SEMANTIC_USE_UNDERLINE;
|
||||
}
|
||||
if (tokenStyle.foreground) {
|
||||
const foregroundBits = (tokenStyle.foreground) << MetadataConsts.FOREGROUND_OFFSET;
|
||||
metadata |= foregroundBits | MetadataConsts.SEMANTIC_USE_FOREGROUND;
|
||||
}
|
||||
if (metadata === 0) {
|
||||
// Nothing!
|
||||
metadata = Constants.NO_STYLING;
|
||||
}
|
||||
}
|
||||
this._hashTable.add(tokenTypeIndex, tokenModifierSet, languageId.id, metadata);
|
||||
}
|
||||
if (this._logService.getLevel() === LogLevel.Trace) {
|
||||
const type = this._legend.tokenTypes[tokenTypeIndex];
|
||||
const modifiers = tokenModifierSet ? ' ' + this._legend.tokenModifiers.filter((_, i) => tokenModifierSet & (1 << i)).join(' ') : '';
|
||||
this._logService.trace(`tokenStyleMetadata ${entry ? '[CACHED] ' : ''}${type}${modifiers}: foreground ${TokenMetadata.getForeground(metadata)}, fontStyle ${TokenMetadata.getFontStyle(metadata).toString(2)}`);
|
||||
}
|
||||
return metadata;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
const enum SemanticColoringConstants {
|
||||
/**
|
||||
* Let's aim at having 8KB buffers if possible...
|
||||
* So that would be 8192 / (5 * 4) = 409.6 tokens per area
|
||||
*/
|
||||
DesiredTokensPerArea = 400,
|
||||
|
||||
/**
|
||||
* Try to keep the total number of areas under 1024 if possible,
|
||||
* simply compensate by having more tokens per area...
|
||||
*/
|
||||
DesiredMaxAreas = 1024,
|
||||
}
|
||||
|
||||
class SemanticTokensResponse {
|
||||
constructor(
|
||||
private readonly _provider: DocumentSemanticTokensProvider,
|
||||
@@ -848,10 +677,10 @@ class ModelSemanticColoring extends Disposable {
|
||||
private _isDisposed: boolean;
|
||||
private readonly _model: ITextModel;
|
||||
private readonly _semanticStyling: SemanticStyling;
|
||||
private readonly _fetchSemanticTokens: RunOnceScheduler;
|
||||
private _currentResponse: SemanticTokensResponse | null;
|
||||
private _currentRequestCancellationTokenSource: CancellationTokenSource | null;
|
||||
private _providersChangeListeners: IDisposable[];
|
||||
private readonly _fetchDocumentSemanticTokens: RunOnceScheduler;
|
||||
private _currentDocumentResponse: SemanticTokensResponse | null;
|
||||
private _currentDocumentRequestCancellationTokenSource: CancellationTokenSource | null;
|
||||
private _documentProvidersChangeListeners: IDisposable[];
|
||||
|
||||
constructor(model: ITextModel, themeService: IThemeService, stylingProvider: SemanticStyling) {
|
||||
super();
|
||||
@@ -859,57 +688,57 @@ class ModelSemanticColoring extends Disposable {
|
||||
this._isDisposed = false;
|
||||
this._model = model;
|
||||
this._semanticStyling = stylingProvider;
|
||||
this._fetchSemanticTokens = this._register(new RunOnceScheduler(() => this._fetchSemanticTokensNow(), 300));
|
||||
this._currentResponse = null;
|
||||
this._currentRequestCancellationTokenSource = null;
|
||||
this._providersChangeListeners = [];
|
||||
this._fetchDocumentSemanticTokens = this._register(new RunOnceScheduler(() => this._fetchDocumentSemanticTokensNow(), 300));
|
||||
this._currentDocumentResponse = null;
|
||||
this._currentDocumentRequestCancellationTokenSource = null;
|
||||
this._documentProvidersChangeListeners = [];
|
||||
|
||||
this._register(this._model.onDidChangeContent(e => {
|
||||
if (!this._fetchSemanticTokens.isScheduled()) {
|
||||
this._fetchSemanticTokens.schedule();
|
||||
this._register(this._model.onDidChangeContent(() => {
|
||||
if (!this._fetchDocumentSemanticTokens.isScheduled()) {
|
||||
this._fetchDocumentSemanticTokens.schedule();
|
||||
}
|
||||
}));
|
||||
const bindChangeListeners = () => {
|
||||
dispose(this._providersChangeListeners);
|
||||
this._providersChangeListeners = [];
|
||||
const bindDocumentChangeListeners = () => {
|
||||
dispose(this._documentProvidersChangeListeners);
|
||||
this._documentProvidersChangeListeners = [];
|
||||
for (const provider of DocumentSemanticTokensProviderRegistry.all(model)) {
|
||||
if (typeof provider.onDidChange === 'function') {
|
||||
this._providersChangeListeners.push(provider.onDidChange(() => this._fetchSemanticTokens.schedule(0)));
|
||||
this._documentProvidersChangeListeners.push(provider.onDidChange(() => this._fetchDocumentSemanticTokens.schedule(0)));
|
||||
}
|
||||
}
|
||||
};
|
||||
bindChangeListeners();
|
||||
this._register(DocumentSemanticTokensProviderRegistry.onDidChange(e => {
|
||||
bindChangeListeners();
|
||||
this._fetchSemanticTokens.schedule();
|
||||
bindDocumentChangeListeners();
|
||||
this._register(DocumentSemanticTokensProviderRegistry.onDidChange(() => {
|
||||
bindDocumentChangeListeners();
|
||||
this._fetchDocumentSemanticTokens.schedule();
|
||||
}));
|
||||
|
||||
this._register(themeService.onDidColorThemeChange(_ => {
|
||||
// clear out existing tokens
|
||||
this._setSemanticTokens(null, null, null, []);
|
||||
this._fetchSemanticTokens.schedule();
|
||||
this._setDocumentSemanticTokens(null, null, null, []);
|
||||
this._fetchDocumentSemanticTokens.schedule();
|
||||
}));
|
||||
|
||||
this._fetchSemanticTokens.schedule(0);
|
||||
this._fetchDocumentSemanticTokens.schedule(0);
|
||||
}
|
||||
|
||||
public dispose(): void {
|
||||
if (this._currentResponse) {
|
||||
this._currentResponse.dispose();
|
||||
this._currentResponse = null;
|
||||
if (this._currentDocumentResponse) {
|
||||
this._currentDocumentResponse.dispose();
|
||||
this._currentDocumentResponse = null;
|
||||
}
|
||||
if (this._currentRequestCancellationTokenSource) {
|
||||
this._currentRequestCancellationTokenSource.cancel();
|
||||
this._currentRequestCancellationTokenSource = null;
|
||||
if (this._currentDocumentRequestCancellationTokenSource) {
|
||||
this._currentDocumentRequestCancellationTokenSource.cancel();
|
||||
this._currentDocumentRequestCancellationTokenSource = null;
|
||||
}
|
||||
this._setSemanticTokens(null, null, null, []);
|
||||
this._setDocumentSemanticTokens(null, null, null, []);
|
||||
this._isDisposed = true;
|
||||
|
||||
super.dispose();
|
||||
}
|
||||
|
||||
private _fetchSemanticTokensNow(): void {
|
||||
if (this._currentRequestCancellationTokenSource) {
|
||||
private _fetchDocumentSemanticTokensNow(): void {
|
||||
if (this._currentDocumentRequestCancellationTokenSource) {
|
||||
// there is already a request running, let it finish...
|
||||
return;
|
||||
}
|
||||
@@ -917,7 +746,7 @@ class ModelSemanticColoring extends Disposable {
|
||||
if (!provider) {
|
||||
return;
|
||||
}
|
||||
this._currentRequestCancellationTokenSource = new CancellationTokenSource();
|
||||
this._currentDocumentRequestCancellationTokenSource = new CancellationTokenSource();
|
||||
|
||||
const pendingChanges: IModelContentChangedEvent[] = [];
|
||||
const contentChangeListener = this._model.onDidChangeContent((e) => {
|
||||
@@ -926,13 +755,13 @@ class ModelSemanticColoring extends Disposable {
|
||||
|
||||
const styling = this._semanticStyling.get(provider);
|
||||
|
||||
const lastResultId = this._currentResponse ? this._currentResponse.resultId || null : null;
|
||||
const request = Promise.resolve(provider.provideDocumentSemanticTokens(this._model, lastResultId, this._currentRequestCancellationTokenSource.token));
|
||||
const lastResultId = this._currentDocumentResponse ? this._currentDocumentResponse.resultId || null : null;
|
||||
const request = Promise.resolve(provider.provideDocumentSemanticTokens(this._model, lastResultId, this._currentDocumentRequestCancellationTokenSource.token));
|
||||
|
||||
request.then((res) => {
|
||||
this._currentRequestCancellationTokenSource = null;
|
||||
this._currentDocumentRequestCancellationTokenSource = null;
|
||||
contentChangeListener.dispose();
|
||||
this._setSemanticTokens(provider, res || null, styling, pendingChanges);
|
||||
this._setDocumentSemanticTokens(provider, res || null, styling, pendingChanges);
|
||||
}, (err) => {
|
||||
if (!err || typeof err.message !== 'string' || err.message.indexOf('busy') === -1) {
|
||||
errors.onUnexpectedError(err);
|
||||
@@ -940,13 +769,13 @@ class ModelSemanticColoring extends Disposable {
|
||||
|
||||
// Semantic tokens eats up all errors and considers errors to mean that the result is temporarily not available
|
||||
// The API does not have a special error kind to express this...
|
||||
this._currentRequestCancellationTokenSource = null;
|
||||
this._currentDocumentRequestCancellationTokenSource = null;
|
||||
contentChangeListener.dispose();
|
||||
|
||||
if (pendingChanges.length > 0) {
|
||||
// More changes occurred while the request was running
|
||||
if (!this._fetchSemanticTokens.isScheduled()) {
|
||||
this._fetchSemanticTokens.schedule();
|
||||
if (!this._fetchDocumentSemanticTokens.isScheduled()) {
|
||||
this._fetchDocumentSemanticTokens.schedule();
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -966,11 +795,11 @@ class ModelSemanticColoring extends Disposable {
|
||||
}
|
||||
}
|
||||
|
||||
private _setSemanticTokens(provider: DocumentSemanticTokensProvider | null, tokens: SemanticTokens | SemanticTokensEdits | null, styling: SemanticColoringProviderStyling | null, pendingChanges: IModelContentChangedEvent[]): void {
|
||||
const currentResponse = this._currentResponse;
|
||||
if (this._currentResponse) {
|
||||
this._currentResponse.dispose();
|
||||
this._currentResponse = null;
|
||||
private _setDocumentSemanticTokens(provider: DocumentSemanticTokensProvider | null, tokens: SemanticTokens | SemanticTokensEdits | null, styling: SemanticTokensProviderStyling | null, pendingChanges: IModelContentChangedEvent[]): void {
|
||||
const currentResponse = this._currentDocumentResponse;
|
||||
if (this._currentDocumentResponse) {
|
||||
this._currentDocumentResponse.dispose();
|
||||
this._currentDocumentResponse = null;
|
||||
}
|
||||
if (this._isDisposed) {
|
||||
// disposed!
|
||||
@@ -979,15 +808,19 @@ class ModelSemanticColoring extends Disposable {
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (!provider || !tokens || !styling) {
|
||||
this._model.setSemanticTokens(null);
|
||||
if (!provider || !styling) {
|
||||
this._model.setSemanticTokens(null, false);
|
||||
return;
|
||||
}
|
||||
if (!tokens) {
|
||||
this._model.setSemanticTokens(null, true);
|
||||
return;
|
||||
}
|
||||
|
||||
if (ModelSemanticColoring._isSemanticTokensEdits(tokens)) {
|
||||
if (!currentResponse) {
|
||||
// not possible!
|
||||
this._model.setSemanticTokens(null);
|
||||
this._model.setSemanticTokens(null, true);
|
||||
return;
|
||||
}
|
||||
if (tokens.edits.length === 0) {
|
||||
@@ -1037,80 +870,9 @@ class ModelSemanticColoring extends Disposable {
|
||||
|
||||
if (ModelSemanticColoring._isSemanticTokens(tokens)) {
|
||||
|
||||
this._currentResponse = new SemanticTokensResponse(provider, tokens.resultId, tokens.data);
|
||||
this._currentDocumentResponse = new SemanticTokensResponse(provider, tokens.resultId, tokens.data);
|
||||
|
||||
const srcData = tokens.data;
|
||||
const tokenCount = (tokens.data.length / 5) | 0;
|
||||
const tokensPerArea = Math.max(Math.ceil(tokenCount / SemanticColoringConstants.DesiredMaxAreas), SemanticColoringConstants.DesiredTokensPerArea);
|
||||
|
||||
const result: MultilineTokens2[] = [];
|
||||
|
||||
const languageId = this._model.getLanguageIdentifier();
|
||||
|
||||
let tokenIndex = 0;
|
||||
let lastLineNumber = 1;
|
||||
let lastStartCharacter = 0;
|
||||
while (tokenIndex < tokenCount) {
|
||||
const tokenStartIndex = tokenIndex;
|
||||
let tokenEndIndex = Math.min(tokenStartIndex + tokensPerArea, tokenCount);
|
||||
|
||||
// Keep tokens on the same line in the same area...
|
||||
if (tokenEndIndex < tokenCount) {
|
||||
|
||||
let smallTokenEndIndex = tokenEndIndex;
|
||||
while (smallTokenEndIndex - 1 > tokenStartIndex && srcData[5 * smallTokenEndIndex] === 0) {
|
||||
smallTokenEndIndex--;
|
||||
}
|
||||
|
||||
if (smallTokenEndIndex - 1 === tokenStartIndex) {
|
||||
// there are so many tokens on this line that our area would be empty, we must now go right
|
||||
let bigTokenEndIndex = tokenEndIndex;
|
||||
while (bigTokenEndIndex + 1 < tokenCount && srcData[5 * bigTokenEndIndex] === 0) {
|
||||
bigTokenEndIndex++;
|
||||
}
|
||||
tokenEndIndex = bigTokenEndIndex;
|
||||
} else {
|
||||
tokenEndIndex = smallTokenEndIndex;
|
||||
}
|
||||
}
|
||||
|
||||
let destData = new Uint32Array((tokenEndIndex - tokenStartIndex) * 4);
|
||||
let destOffset = 0;
|
||||
let areaLine = 0;
|
||||
while (tokenIndex < tokenEndIndex) {
|
||||
const srcOffset = 5 * tokenIndex;
|
||||
const deltaLine = srcData[srcOffset];
|
||||
const deltaCharacter = srcData[srcOffset + 1];
|
||||
const lineNumber = lastLineNumber + deltaLine;
|
||||
const startCharacter = (deltaLine === 0 ? lastStartCharacter + deltaCharacter : deltaCharacter);
|
||||
const length = srcData[srcOffset + 2];
|
||||
const tokenTypeIndex = srcData[srcOffset + 3];
|
||||
const tokenModifierSet = srcData[srcOffset + 4];
|
||||
const metadata = styling.getMetadata(tokenTypeIndex, tokenModifierSet, languageId);
|
||||
|
||||
if (metadata !== Constants.NO_STYLING) {
|
||||
if (areaLine === 0) {
|
||||
areaLine = lineNumber;
|
||||
}
|
||||
destData[destOffset] = lineNumber - areaLine;
|
||||
destData[destOffset + 1] = startCharacter;
|
||||
destData[destOffset + 2] = startCharacter + length;
|
||||
destData[destOffset + 3] = metadata;
|
||||
destOffset += 4;
|
||||
}
|
||||
|
||||
lastLineNumber = lineNumber;
|
||||
lastStartCharacter = startCharacter;
|
||||
tokenIndex++;
|
||||
}
|
||||
|
||||
if (destOffset !== destData.length) {
|
||||
destData = destData.subarray(0, destOffset);
|
||||
}
|
||||
|
||||
const tokens = new MultilineTokens2(areaLine, new SparseEncodedTokens(destData));
|
||||
result.push(tokens);
|
||||
}
|
||||
const result = toMultilineTokens2(tokens, styling, this._model.getLanguageIdentifier());
|
||||
|
||||
// Adjust incoming semantic tokens
|
||||
if (pendingChanges.length > 0) {
|
||||
@@ -1126,16 +888,16 @@ class ModelSemanticColoring extends Disposable {
|
||||
}
|
||||
}
|
||||
|
||||
if (!this._fetchSemanticTokens.isScheduled()) {
|
||||
this._fetchSemanticTokens.schedule();
|
||||
if (!this._fetchDocumentSemanticTokens.isScheduled()) {
|
||||
this._fetchDocumentSemanticTokens.schedule();
|
||||
}
|
||||
}
|
||||
|
||||
this._model.setSemanticTokens(result);
|
||||
this._model.setSemanticTokens(result, true);
|
||||
return;
|
||||
}
|
||||
|
||||
this._model.setSemanticTokens(null);
|
||||
this._model.setSemanticTokens(null, true);
|
||||
}
|
||||
|
||||
private _getSemanticColoringProvider(): DocumentSemanticTokensProvider | null {
|
||||
|
||||
261
src/vs/editor/common/services/semanticTokensProviderStyling.ts
Normal file
261
src/vs/editor/common/services/semanticTokensProviderStyling.ts
Normal file
@@ -0,0 +1,261 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { SemanticTokensLegend, TokenMetadata, FontStyle, MetadataConsts, SemanticTokens, LanguageIdentifier } from 'vs/editor/common/modes';
|
||||
import { IThemeService } from 'vs/platform/theme/common/themeService';
|
||||
import { ILogService, LogLevel } from 'vs/platform/log/common/log';
|
||||
import { MultilineTokens2, SparseEncodedTokens } from 'vs/editor/common/model/tokensStore';
|
||||
|
||||
export const enum SemanticTokensProviderStylingConstants {
|
||||
NO_STYLING = 0b01111111111111111111111111111111
|
||||
}
|
||||
|
||||
export class SemanticTokensProviderStyling {
|
||||
|
||||
private readonly _hashTable: HashTable;
|
||||
|
||||
constructor(
|
||||
private readonly _legend: SemanticTokensLegend,
|
||||
private readonly _themeService: IThemeService,
|
||||
private readonly _logService: ILogService
|
||||
) {
|
||||
this._hashTable = new HashTable();
|
||||
}
|
||||
|
||||
public getMetadata(tokenTypeIndex: number, tokenModifierSet: number, languageId: LanguageIdentifier): number {
|
||||
const entry = this._hashTable.get(tokenTypeIndex, tokenModifierSet, languageId.id);
|
||||
let metadata: number;
|
||||
if (entry) {
|
||||
metadata = entry.metadata;
|
||||
} else {
|
||||
const tokenType = this._legend.tokenTypes[tokenTypeIndex];
|
||||
const tokenModifiers: string[] = [];
|
||||
let modifierSet = tokenModifierSet;
|
||||
for (let modifierIndex = 0; modifierSet > 0 && modifierIndex < this._legend.tokenModifiers.length; modifierIndex++) {
|
||||
if (modifierSet & 1) {
|
||||
tokenModifiers.push(this._legend.tokenModifiers[modifierIndex]);
|
||||
}
|
||||
modifierSet = modifierSet >> 1;
|
||||
}
|
||||
|
||||
const tokenStyle = this._themeService.getColorTheme().getTokenStyleMetadata(tokenType, tokenModifiers, languageId.language);
|
||||
if (typeof tokenStyle === 'undefined') {
|
||||
metadata = SemanticTokensProviderStylingConstants.NO_STYLING;
|
||||
} else {
|
||||
metadata = 0;
|
||||
if (typeof tokenStyle.italic !== 'undefined') {
|
||||
const italicBit = (tokenStyle.italic ? FontStyle.Italic : 0) << MetadataConsts.FONT_STYLE_OFFSET;
|
||||
metadata |= italicBit | MetadataConsts.SEMANTIC_USE_ITALIC;
|
||||
}
|
||||
if (typeof tokenStyle.bold !== 'undefined') {
|
||||
const boldBit = (tokenStyle.bold ? FontStyle.Bold : 0) << MetadataConsts.FONT_STYLE_OFFSET;
|
||||
metadata |= boldBit | MetadataConsts.SEMANTIC_USE_BOLD;
|
||||
}
|
||||
if (typeof tokenStyle.underline !== 'undefined') {
|
||||
const underlineBit = (tokenStyle.underline ? FontStyle.Underline : 0) << MetadataConsts.FONT_STYLE_OFFSET;
|
||||
metadata |= underlineBit | MetadataConsts.SEMANTIC_USE_UNDERLINE;
|
||||
}
|
||||
if (tokenStyle.foreground) {
|
||||
const foregroundBits = (tokenStyle.foreground) << MetadataConsts.FOREGROUND_OFFSET;
|
||||
metadata |= foregroundBits | MetadataConsts.SEMANTIC_USE_FOREGROUND;
|
||||
}
|
||||
if (metadata === 0) {
|
||||
// Nothing!
|
||||
metadata = SemanticTokensProviderStylingConstants.NO_STYLING;
|
||||
}
|
||||
}
|
||||
this._hashTable.add(tokenTypeIndex, tokenModifierSet, languageId.id, metadata);
|
||||
}
|
||||
if (this._logService.getLevel() === LogLevel.Trace) {
|
||||
const type = this._legend.tokenTypes[tokenTypeIndex];
|
||||
const modifiers = tokenModifierSet ? ' ' + this._legend.tokenModifiers.filter((_, i) => tokenModifierSet & (1 << i)).join(' ') : '';
|
||||
this._logService.trace(`tokenStyleMetadata ${entry ? '[CACHED] ' : ''}${type}${modifiers}: foreground ${TokenMetadata.getForeground(metadata)}, fontStyle ${TokenMetadata.getFontStyle(metadata).toString(2)}`);
|
||||
}
|
||||
return metadata;
|
||||
}
|
||||
}
|
||||
|
||||
const enum SemanticColoringConstants {
|
||||
/**
|
||||
* Let's aim at having 8KB buffers if possible...
|
||||
* So that would be 8192 / (5 * 4) = 409.6 tokens per area
|
||||
*/
|
||||
DesiredTokensPerArea = 400,
|
||||
|
||||
/**
|
||||
* Try to keep the total number of areas under 1024 if possible,
|
||||
* simply compensate by having more tokens per area...
|
||||
*/
|
||||
DesiredMaxAreas = 1024,
|
||||
}
|
||||
|
||||
export function toMultilineTokens2(tokens: SemanticTokens, styling: SemanticTokensProviderStyling, languageId: LanguageIdentifier): MultilineTokens2[] {
|
||||
const srcData = tokens.data;
|
||||
const tokenCount = (tokens.data.length / 5) | 0;
|
||||
const tokensPerArea = Math.max(Math.ceil(tokenCount / SemanticColoringConstants.DesiredMaxAreas), SemanticColoringConstants.DesiredTokensPerArea);
|
||||
const result: MultilineTokens2[] = [];
|
||||
|
||||
let tokenIndex = 0;
|
||||
let lastLineNumber = 1;
|
||||
let lastStartCharacter = 0;
|
||||
while (tokenIndex < tokenCount) {
|
||||
const tokenStartIndex = tokenIndex;
|
||||
let tokenEndIndex = Math.min(tokenStartIndex + tokensPerArea, tokenCount);
|
||||
|
||||
// Keep tokens on the same line in the same area...
|
||||
if (tokenEndIndex < tokenCount) {
|
||||
|
||||
let smallTokenEndIndex = tokenEndIndex;
|
||||
while (smallTokenEndIndex - 1 > tokenStartIndex && srcData[5 * smallTokenEndIndex] === 0) {
|
||||
smallTokenEndIndex--;
|
||||
}
|
||||
|
||||
if (smallTokenEndIndex - 1 === tokenStartIndex) {
|
||||
// there are so many tokens on this line that our area would be empty, we must now go right
|
||||
let bigTokenEndIndex = tokenEndIndex;
|
||||
while (bigTokenEndIndex + 1 < tokenCount && srcData[5 * bigTokenEndIndex] === 0) {
|
||||
bigTokenEndIndex++;
|
||||
}
|
||||
tokenEndIndex = bigTokenEndIndex;
|
||||
} else {
|
||||
tokenEndIndex = smallTokenEndIndex;
|
||||
}
|
||||
}
|
||||
|
||||
let destData = new Uint32Array((tokenEndIndex - tokenStartIndex) * 4);
|
||||
let destOffset = 0;
|
||||
let areaLine = 0;
|
||||
while (tokenIndex < tokenEndIndex) {
|
||||
const srcOffset = 5 * tokenIndex;
|
||||
const deltaLine = srcData[srcOffset];
|
||||
const deltaCharacter = srcData[srcOffset + 1];
|
||||
const lineNumber = lastLineNumber + deltaLine;
|
||||
const startCharacter = (deltaLine === 0 ? lastStartCharacter + deltaCharacter : deltaCharacter);
|
||||
const length = srcData[srcOffset + 2];
|
||||
const tokenTypeIndex = srcData[srcOffset + 3];
|
||||
const tokenModifierSet = srcData[srcOffset + 4];
|
||||
const metadata = styling.getMetadata(tokenTypeIndex, tokenModifierSet, languageId);
|
||||
|
||||
if (metadata !== SemanticTokensProviderStylingConstants.NO_STYLING) {
|
||||
if (areaLine === 0) {
|
||||
areaLine = lineNumber;
|
||||
}
|
||||
destData[destOffset] = lineNumber - areaLine;
|
||||
destData[destOffset + 1] = startCharacter;
|
||||
destData[destOffset + 2] = startCharacter + length;
|
||||
destData[destOffset + 3] = metadata;
|
||||
destOffset += 4;
|
||||
}
|
||||
|
||||
lastLineNumber = lineNumber;
|
||||
lastStartCharacter = startCharacter;
|
||||
tokenIndex++;
|
||||
}
|
||||
|
||||
if (destOffset !== destData.length) {
|
||||
destData = destData.subarray(0, destOffset);
|
||||
}
|
||||
|
||||
const tokens = new MultilineTokens2(areaLine, new SparseEncodedTokens(destData));
|
||||
result.push(tokens);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
class HashTableEntry {
|
||||
public readonly tokenTypeIndex: number;
|
||||
public readonly tokenModifierSet: number;
|
||||
public readonly languageId: number;
|
||||
public readonly metadata: number;
|
||||
public next: HashTableEntry | null;
|
||||
|
||||
constructor(tokenTypeIndex: number, tokenModifierSet: number, languageId: number, metadata: number) {
|
||||
this.tokenTypeIndex = tokenTypeIndex;
|
||||
this.tokenModifierSet = tokenModifierSet;
|
||||
this.languageId = languageId;
|
||||
this.metadata = metadata;
|
||||
this.next = null;
|
||||
}
|
||||
}
|
||||
|
||||
class HashTable {
|
||||
|
||||
private static _SIZES = [3, 7, 13, 31, 61, 127, 251, 509, 1021, 2039, 4093, 8191, 16381, 32749, 65521, 131071, 262139, 524287, 1048573, 2097143];
|
||||
|
||||
private _elementsCount: number;
|
||||
private _currentLengthIndex: number;
|
||||
private _currentLength: number;
|
||||
private _growCount: number;
|
||||
private _elements: (HashTableEntry | null)[];
|
||||
|
||||
constructor() {
|
||||
this._elementsCount = 0;
|
||||
this._currentLengthIndex = 0;
|
||||
this._currentLength = HashTable._SIZES[this._currentLengthIndex];
|
||||
this._growCount = Math.round(this._currentLengthIndex + 1 < HashTable._SIZES.length ? 2 / 3 * this._currentLength : 0);
|
||||
this._elements = [];
|
||||
HashTable._nullOutEntries(this._elements, this._currentLength);
|
||||
}
|
||||
|
||||
private static _nullOutEntries(entries: (HashTableEntry | null)[], length: number): void {
|
||||
for (let i = 0; i < length; i++) {
|
||||
entries[i] = null;
|
||||
}
|
||||
}
|
||||
|
||||
private _hash2(n1: number, n2: number): number {
|
||||
return (((n1 << 5) - n1) + n2) | 0; // n1 * 31 + n2, keep as int32
|
||||
}
|
||||
|
||||
private _hashFunc(tokenTypeIndex: number, tokenModifierSet: number, languageId: number): number {
|
||||
return this._hash2(this._hash2(tokenTypeIndex, tokenModifierSet), languageId) % this._currentLength;
|
||||
}
|
||||
|
||||
public get(tokenTypeIndex: number, tokenModifierSet: number, languageId: number): HashTableEntry | null {
|
||||
const hash = this._hashFunc(tokenTypeIndex, tokenModifierSet, languageId);
|
||||
|
||||
let p = this._elements[hash];
|
||||
while (p) {
|
||||
if (p.tokenTypeIndex === tokenTypeIndex && p.tokenModifierSet === tokenModifierSet && p.languageId === languageId) {
|
||||
return p;
|
||||
}
|
||||
p = p.next;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
public add(tokenTypeIndex: number, tokenModifierSet: number, languageId: number, metadata: number): void {
|
||||
this._elementsCount++;
|
||||
if (this._growCount !== 0 && this._elementsCount >= this._growCount) {
|
||||
// expand!
|
||||
const oldElements = this._elements;
|
||||
|
||||
this._currentLengthIndex++;
|
||||
this._currentLength = HashTable._SIZES[this._currentLengthIndex];
|
||||
this._growCount = Math.round(this._currentLengthIndex + 1 < HashTable._SIZES.length ? 2 / 3 * this._currentLength : 0);
|
||||
this._elements = [];
|
||||
HashTable._nullOutEntries(this._elements, this._currentLength);
|
||||
|
||||
for (const first of oldElements) {
|
||||
let p = first;
|
||||
while (p) {
|
||||
const oldNext = p.next;
|
||||
p.next = null;
|
||||
this._add(p);
|
||||
p = oldNext;
|
||||
}
|
||||
}
|
||||
}
|
||||
this._add(new HashTableEntry(tokenTypeIndex, tokenModifierSet, languageId, metadata));
|
||||
}
|
||||
|
||||
private _add(element: HashTableEntry): void {
|
||||
const hash = this._hashFunc(element.tokenTypeIndex, element.tokenModifierSet, element.languageId);
|
||||
element.next = this._elements[hash];
|
||||
this._elements[hash] = element;
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user