mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 10:58:30 -05:00
Merge from vscode a416c77e56ef0314ae00633faa04878151610de8 (#8600)
* Merge from vscode a416c77e56ef0314ae00633faa04878151610de8 * distro * fix tests * fix tests
This commit is contained in:
168
src/vs/editor/test/common/model/tokensStore.test.ts
Normal file
168
src/vs/editor/test/common/model/tokensStore.test.ts
Normal file
@@ -0,0 +1,168 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as assert from 'assert';
|
||||
import { MultilineTokens2, SparseEncodedTokens } from 'vs/editor/common/model/tokensStore';
|
||||
import { Range } from 'vs/editor/common/core/range';
|
||||
import { TextModel } from 'vs/editor/common/model/textModel';
|
||||
import { IIdentifiedSingleEditOperation } from 'vs/editor/common/model';
|
||||
import { MetadataConsts, TokenMetadata } from 'vs/editor/common/modes';
|
||||
|
||||
suite('TokensStore', () => {
|
||||
|
||||
const SEMANTIC_COLOR = 5;
|
||||
|
||||
function parseTokensState(state: string[]): { text: string; tokens: MultilineTokens2; } {
|
||||
let text: string[] = [];
|
||||
let tokens: number[] = [];
|
||||
let baseLine = 1;
|
||||
for (let i = 0; i < state.length; i++) {
|
||||
const line = state[i];
|
||||
|
||||
let startOffset = 0;
|
||||
let lineText = '';
|
||||
while (true) {
|
||||
const firstPipeOffset = line.indexOf('|', startOffset);
|
||||
if (firstPipeOffset === -1) {
|
||||
break;
|
||||
}
|
||||
const secondPipeOffset = line.indexOf('|', firstPipeOffset + 1);
|
||||
if (secondPipeOffset === -1) {
|
||||
break;
|
||||
}
|
||||
if (firstPipeOffset + 1 === secondPipeOffset) {
|
||||
// skip ||
|
||||
lineText += line.substring(startOffset, secondPipeOffset + 1);
|
||||
startOffset = secondPipeOffset + 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
lineText += line.substring(startOffset, firstPipeOffset);
|
||||
const tokenStartCharacter = lineText.length;
|
||||
const tokenLength = secondPipeOffset - firstPipeOffset - 1;
|
||||
const metadata = (SEMANTIC_COLOR << MetadataConsts.FOREGROUND_OFFSET);
|
||||
|
||||
if (tokens.length === 0) {
|
||||
baseLine = i + 1;
|
||||
}
|
||||
tokens.push(i + 1 - baseLine, tokenStartCharacter, tokenStartCharacter + tokenLength, metadata);
|
||||
|
||||
lineText += line.substr(firstPipeOffset + 1, tokenLength);
|
||||
startOffset = secondPipeOffset + 1;
|
||||
}
|
||||
|
||||
lineText += line.substring(startOffset);
|
||||
|
||||
text.push(lineText);
|
||||
}
|
||||
|
||||
return {
|
||||
text: text.join('\n'),
|
||||
tokens: new MultilineTokens2(baseLine, new SparseEncodedTokens(new Uint32Array(tokens)))
|
||||
};
|
||||
}
|
||||
|
||||
function extractState(model: TextModel): string[] {
|
||||
let result: string[] = [];
|
||||
for (let lineNumber = 1; lineNumber <= model.getLineCount(); lineNumber++) {
|
||||
const lineTokens = model.getLineTokens(lineNumber);
|
||||
const lineContent = model.getLineContent(lineNumber);
|
||||
|
||||
let lineText = '';
|
||||
for (let i = 0; i < lineTokens.getCount(); i++) {
|
||||
const tokenStartCharacter = lineTokens.getStartOffset(i);
|
||||
const tokenEndCharacter = lineTokens.getEndOffset(i);
|
||||
const metadata = lineTokens.getMetadata(i);
|
||||
const color = TokenMetadata.getForeground(metadata);
|
||||
const tokenText = lineContent.substring(tokenStartCharacter, tokenEndCharacter);
|
||||
if (color === SEMANTIC_COLOR) {
|
||||
lineText += `|${tokenText}|`;
|
||||
} else {
|
||||
lineText += tokenText;
|
||||
}
|
||||
}
|
||||
|
||||
result.push(lineText);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
// function extractState
|
||||
|
||||
function testTokensAdjustment(rawInitialState: string[], edits: IIdentifiedSingleEditOperation[], rawFinalState: string[]) {
|
||||
const initialState = parseTokensState(rawInitialState);
|
||||
const model = TextModel.createFromString(initialState.text);
|
||||
model.setSemanticTokens([initialState.tokens]);
|
||||
|
||||
model.applyEdits(edits);
|
||||
|
||||
const actualState = extractState(model);
|
||||
assert.deepEqual(actualState, rawFinalState);
|
||||
|
||||
model.dispose();
|
||||
}
|
||||
|
||||
test('issue #86303 - color shifting between different tokens', () => {
|
||||
testTokensAdjustment(
|
||||
[
|
||||
`import { |URI| } from 'vs/base/common/uri';`,
|
||||
`const foo = |URI|.parse('hey');`
|
||||
],
|
||||
[
|
||||
{ range: new Range(2, 9, 2, 10), text: '' }
|
||||
],
|
||||
[
|
||||
`import { |URI| } from 'vs/base/common/uri';`,
|
||||
`const fo = |URI|.parse('hey');`
|
||||
]
|
||||
);
|
||||
});
|
||||
|
||||
test('deleting a newline', () => {
|
||||
testTokensAdjustment(
|
||||
[
|
||||
`import { |URI| } from 'vs/base/common/uri';`,
|
||||
`const foo = |URI|.parse('hey');`
|
||||
],
|
||||
[
|
||||
{ range: new Range(1, 42, 2, 1), text: '' }
|
||||
],
|
||||
[
|
||||
`import { |URI| } from 'vs/base/common/uri';const foo = |URI|.parse('hey');`
|
||||
]
|
||||
);
|
||||
});
|
||||
|
||||
test('inserting a newline', () => {
|
||||
testTokensAdjustment(
|
||||
[
|
||||
`import { |URI| } from 'vs/base/common/uri';const foo = |URI|.parse('hey');`
|
||||
],
|
||||
[
|
||||
{ range: new Range(1, 42, 1, 42), text: '\n' }
|
||||
],
|
||||
[
|
||||
`import { |URI| } from 'vs/base/common/uri';`,
|
||||
`const foo = |URI|.parse('hey');`
|
||||
]
|
||||
);
|
||||
});
|
||||
|
||||
test('deleting a newline 2', () => {
|
||||
testTokensAdjustment(
|
||||
[
|
||||
`import { `,
|
||||
` |URI| } from 'vs/base/common/uri';const foo = |URI|.parse('hey');`
|
||||
],
|
||||
[
|
||||
{ range: new Range(1, 10, 2, 5), text: '' }
|
||||
],
|
||||
[
|
||||
`import { |URI| } from 'vs/base/common/uri';const foo = |URI|.parse('hey');`
|
||||
]
|
||||
);
|
||||
});
|
||||
|
||||
});
|
||||
Reference in New Issue
Block a user