mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-09 17:52:34 -05:00
Merge from vscode a348d103d1256a06a2c9b3f9b406298a9fef6898 (#15681)
* Merge from vscode a348d103d1256a06a2c9b3f9b406298a9fef6898 * Fixes and cleanup * Distro * Fix hygiene yarn * delete no yarn lock changes file * Fix hygiene * Fix layer check * Fix CI * Skip lib checks * Remove tests deleted in vs code * Fix tests * Distro * Fix tests and add removed extension point * Skip failing notebook tests for now * Disable broken tests and cleanup build folder * Update yarn.lock and fix smoke tests * Bump sqlite * fix contributed actions and file spacing * Fix user data path * Update yarn.locks Co-authored-by: ADS Merger <karlb@microsoft.com>
This commit is contained in:
31
src/vs/editor/test/common/model/editStack.test.ts
Normal file
31
src/vs/editor/test/common/model/editStack.test.ts
Normal file
@@ -0,0 +1,31 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as assert from 'assert';
|
||||
import { EndOfLineSequence } from 'vs/editor/common/model';
|
||||
import { SingleModelEditStackData } from 'vs/editor/common/model/editStack';
|
||||
import { Selection } from 'vs/editor/common/core/selection';
|
||||
import { TextChange } from 'vs/editor/common/model/textChange';
|
||||
|
||||
suite('EditStack', () => {
|
||||
|
||||
test('issue #118041: unicode character undo bug', () => {
|
||||
const stackData = new SingleModelEditStackData(
|
||||
1,
|
||||
2,
|
||||
EndOfLineSequence.LF,
|
||||
EndOfLineSequence.LF,
|
||||
[new Selection(10, 2, 10, 2)],
|
||||
[new Selection(10, 1, 10, 1)],
|
||||
[new TextChange(428, '', 428, '')]
|
||||
);
|
||||
|
||||
const buff = stackData.serialize();
|
||||
const actual = SingleModelEditStackData.deserialize(buff);
|
||||
|
||||
assert.deepStrictEqual(actual, stackData);
|
||||
});
|
||||
|
||||
});
|
||||
@@ -20,7 +20,7 @@ export function testApplyEditsWithSyncedModels(original: string[], edits: IIdent
|
||||
let inverseEdits = model.applyEdits(edits, true);
|
||||
|
||||
// Assert edits produced expected result
|
||||
assert.deepEqual(model.getValue(EndOfLinePreference.LF), expectedStr);
|
||||
assert.deepStrictEqual(model.getValue(EndOfLinePreference.LF), expectedStr);
|
||||
|
||||
assertMirrorModels();
|
||||
|
||||
@@ -28,7 +28,7 @@ export function testApplyEditsWithSyncedModels(original: string[], edits: IIdent
|
||||
let inverseInverseEdits = model.applyEdits(inverseEdits, true);
|
||||
|
||||
// Assert the inverse edits brought back model to original state
|
||||
assert.deepEqual(model.getValue(EndOfLinePreference.LF), originalStr);
|
||||
assert.deepStrictEqual(model.getValue(EndOfLinePreference.LF), originalStr);
|
||||
|
||||
if (!inputEditsAreInvalid) {
|
||||
const simplifyEdit = (edit: IIdentifiedSingleEditOperation) => {
|
||||
@@ -41,7 +41,7 @@ export function testApplyEditsWithSyncedModels(original: string[], edits: IIdent
|
||||
};
|
||||
};
|
||||
// Assert the inverse of the inverse edits are the original edits
|
||||
assert.deepEqual(inverseInverseEdits.map(simplifyEdit), edits.map(simplifyEdit));
|
||||
assert.deepStrictEqual(inverseInverseEdits.map(simplifyEdit), edits.map(simplifyEdit));
|
||||
}
|
||||
|
||||
assertMirrorModels();
|
||||
@@ -59,16 +59,16 @@ function assertOneDirectionLineMapping(model: TextModel, direction: AssertDocume
|
||||
let line = 1, column = 1, previousIsCarriageReturn = false;
|
||||
for (let offset = 0; offset <= allText.length; offset++) {
|
||||
// The position coordinate system cannot express the position between \r and \n
|
||||
let position = new Position(line, column + (previousIsCarriageReturn ? -1 : 0));
|
||||
let position: Position = new Position(line, column + (previousIsCarriageReturn ? -1 : 0));
|
||||
|
||||
if (direction === AssertDocumentLineMappingDirection.OffsetToPosition) {
|
||||
let actualPosition = model.getPositionAt(offset);
|
||||
assert.equal(actualPosition.toString(), position.toString(), msg + ' - getPositionAt mismatch for offset ' + offset);
|
||||
assert.strictEqual(actualPosition.toString(), position.toString(), msg + ' - getPositionAt mismatch for offset ' + offset);
|
||||
} else {
|
||||
// The position coordinate system cannot express the position between \r and \n
|
||||
let expectedOffset = offset + (previousIsCarriageReturn ? -1 : 0);
|
||||
let expectedOffset: number = offset + (previousIsCarriageReturn ? -1 : 0);
|
||||
let actualOffset = model.getOffsetAt(position);
|
||||
assert.equal(actualOffset, expectedOffset, msg + ' - getOffsetAt mismatch for position ' + position.toString());
|
||||
assert.strictEqual(actualOffset, expectedOffset, msg + ' - getOffsetAt mismatch for position ' + position.toString());
|
||||
}
|
||||
|
||||
if (allText.charAt(offset) === '\n') {
|
||||
@@ -112,8 +112,8 @@ export function assertSyncedModels(text: string, callback: (model: TextModel, as
|
||||
|
||||
let assertMirrorModels = () => {
|
||||
assertLineMapping(model, 'model');
|
||||
assert.equal(mirrorModel2.getText(), model.getValue(), 'mirror model 2 text OK');
|
||||
assert.equal(mirrorModel2.version, model.getVersionId(), 'mirror model 2 version OK');
|
||||
assert.strictEqual(mirrorModel2.getText(), model.getValue(), 'mirror model 2 text OK');
|
||||
assert.strictEqual(mirrorModel2.version, model.getVersionId(), 'mirror model 2 version OK');
|
||||
};
|
||||
|
||||
callback(model, assertMirrorModels);
|
||||
|
||||
@@ -119,6 +119,19 @@ suite('TextChangeCompressor', () => {
|
||||
);
|
||||
});
|
||||
|
||||
// test('issue #118041', () => {
|
||||
// assertCompression(
|
||||
// '',
|
||||
// [
|
||||
// { offset: 0, length: 1, text: '' },
|
||||
// ],
|
||||
// [
|
||||
// { offset: 1, length: 0, text: 'Z' },
|
||||
// { offset: 3, length: 3, text: 'Y' },
|
||||
// ]
|
||||
// );
|
||||
// })
|
||||
|
||||
test('gen1', () => {
|
||||
assertCompression(
|
||||
'kxm',
|
||||
@@ -267,3 +280,16 @@ suite('TextChangeCompressor', () => {
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
suite('TextChange', () => {
|
||||
|
||||
test('issue #118041: unicode character undo bug', () => {
|
||||
const textChange = new TextChange(428, '', 428, '');
|
||||
const buff = new Uint8Array(textChange.writeSize());
|
||||
textChange.write(buff, 0);
|
||||
const actual: TextChange[] = [];
|
||||
TextChange.read(buff, 0, actual);
|
||||
assert.deepStrictEqual(actual[0], textChange);
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
@@ -1062,4 +1062,11 @@ suite('TextModel.createSnapshot', () => {
|
||||
model.dispose();
|
||||
});
|
||||
|
||||
test('issue #119632: invalid range', () => {
|
||||
const model = createTextModel('hello world!');
|
||||
const actual = model._validateRangeRelaxedNoAllocations(new Range(<any>undefined, 0, <any>undefined, 1));
|
||||
assert.deepStrictEqual(actual, new Range(1, 1, 1, 1));
|
||||
model.dispose();
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as assert from 'assert';
|
||||
import { IDisposable } from 'vs/base/common/lifecycle';
|
||||
import { DisposableStore, IDisposable } from 'vs/base/common/lifecycle';
|
||||
import { Position } from 'vs/editor/common/core/position';
|
||||
import { Range } from 'vs/editor/common/core/range';
|
||||
import { TokenizationResult2 } from 'vs/editor/common/core/token';
|
||||
@@ -337,6 +337,97 @@ suite('TextModelWithTokens', () => {
|
||||
registration.dispose();
|
||||
});
|
||||
|
||||
test('issue #95843: Highlighting of closing braces is indicating wrong brace when cursor is behind opening brace', () => {
|
||||
const mode1 = new LanguageIdentifier('testMode1', 3);
|
||||
const mode2 = new LanguageIdentifier('testMode2', 4);
|
||||
const otherMetadata1 = (
|
||||
(mode1.id << MetadataConsts.LANGUAGEID_OFFSET)
|
||||
| (StandardTokenType.Other << MetadataConsts.TOKEN_TYPE_OFFSET)
|
||||
) >>> 0;
|
||||
const otherMetadata2 = (
|
||||
(mode2.id << MetadataConsts.LANGUAGEID_OFFSET)
|
||||
| (StandardTokenType.Other << MetadataConsts.TOKEN_TYPE_OFFSET)
|
||||
) >>> 0;
|
||||
|
||||
const tokenizationSupport: ITokenizationSupport = {
|
||||
getInitialState: () => NULL_STATE,
|
||||
tokenize: undefined!,
|
||||
tokenize2: (line, hasEOL, state) => {
|
||||
switch (line) {
|
||||
case 'function f() {': {
|
||||
const tokens = new Uint32Array([
|
||||
0, otherMetadata1,
|
||||
8, otherMetadata1,
|
||||
9, otherMetadata1,
|
||||
10, otherMetadata1,
|
||||
11, otherMetadata1,
|
||||
12, otherMetadata1,
|
||||
13, otherMetadata1,
|
||||
]);
|
||||
return new TokenizationResult2(tokens, state);
|
||||
}
|
||||
case ' return <p>{true}</p>;': {
|
||||
const tokens = new Uint32Array([
|
||||
0, otherMetadata1,
|
||||
2, otherMetadata1,
|
||||
8, otherMetadata1,
|
||||
9, otherMetadata2,
|
||||
10, otherMetadata2,
|
||||
11, otherMetadata2,
|
||||
12, otherMetadata2,
|
||||
13, otherMetadata1,
|
||||
17, otherMetadata2,
|
||||
18, otherMetadata2,
|
||||
20, otherMetadata2,
|
||||
21, otherMetadata2,
|
||||
22, otherMetadata2,
|
||||
]);
|
||||
return new TokenizationResult2(tokens, state);
|
||||
}
|
||||
case '}': {
|
||||
const tokens = new Uint32Array([
|
||||
0, otherMetadata1
|
||||
]);
|
||||
return new TokenizationResult2(tokens, state);
|
||||
}
|
||||
}
|
||||
throw new Error(`Unexpected`);
|
||||
}
|
||||
};
|
||||
|
||||
const disposableStore = new DisposableStore();
|
||||
|
||||
disposableStore.add(TokenizationRegistry.register(mode1.language, tokenizationSupport));
|
||||
disposableStore.add(LanguageConfigurationRegistry.register(mode1, {
|
||||
brackets: [
|
||||
['{', '}'],
|
||||
['[', ']'],
|
||||
['(', ')']
|
||||
],
|
||||
}));
|
||||
disposableStore.add(LanguageConfigurationRegistry.register(mode2, {
|
||||
brackets: [
|
||||
['{', '}'],
|
||||
['[', ']'],
|
||||
['(', ')']
|
||||
],
|
||||
}));
|
||||
|
||||
const model = disposableStore.add(createTextModel([
|
||||
'function f() {',
|
||||
' return <p>{true}</p>;',
|
||||
'}',
|
||||
].join('\n'), undefined, mode1));
|
||||
|
||||
model.forceTokenization(1);
|
||||
model.forceTokenization(2);
|
||||
model.forceTokenization(3);
|
||||
|
||||
assert.deepStrictEqual(model.matchBracket(new Position(2, 14)), [new Range(2, 13, 2, 14), new Range(2, 18, 2, 19)]);
|
||||
|
||||
disposableStore.dispose();
|
||||
});
|
||||
|
||||
test('issue #88075: TypeScript brace matching is incorrect in `${}` strings', () => {
|
||||
const mode = new LanguageIdentifier('testMode', 3);
|
||||
const otherMetadata = (
|
||||
|
||||
@@ -8,13 +8,13 @@ import { MultilineTokens2, SparseEncodedTokens, TokensStore2 } from 'vs/editor/c
|
||||
import { Range } from 'vs/editor/common/core/range';
|
||||
import { TextModel } from 'vs/editor/common/model/textModel';
|
||||
import { IIdentifiedSingleEditOperation } from 'vs/editor/common/model';
|
||||
import { MetadataConsts, TokenMetadata, FontStyle } from 'vs/editor/common/modes';
|
||||
import { MetadataConsts, TokenMetadata, FontStyle, ColorId } from 'vs/editor/common/modes';
|
||||
import { createTextModel } from 'vs/editor/test/common/editorTestUtils';
|
||||
import { LineTokens } from 'vs/editor/common/core/lineTokens';
|
||||
|
||||
suite('TokensStore', () => {
|
||||
|
||||
const SEMANTIC_COLOR = 5;
|
||||
const SEMANTIC_COLOR: ColorId = 5;
|
||||
|
||||
function parseTokensState(state: string[]): { text: string; tokens: MultilineTokens2; } {
|
||||
let text: string[] = [];
|
||||
|
||||
Reference in New Issue
Block a user