mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 18:46:40 -05:00
Merge from vscode c58aaab8a1cc22a7139b761166a0d4f37d41e998 (#7880)
* Merge from vscode c58aaab8a1cc22a7139b761166a0d4f37d41e998 * fix pipelines * fix strict-null-checks * add missing files
This commit is contained in:
@@ -95,6 +95,20 @@ suite('CancellationToken', function () {
|
||||
assert.equal(count, 0);
|
||||
});
|
||||
|
||||
test('dispose calls no listeners (unless told to cancel)', function () {
|
||||
|
||||
let count = 0;
|
||||
|
||||
let source = new CancellationTokenSource();
|
||||
source.token.onCancellationRequested(function () {
|
||||
count += 1;
|
||||
});
|
||||
|
||||
source.dispose(true);
|
||||
// source.cancel();
|
||||
assert.equal(count, 1);
|
||||
});
|
||||
|
||||
test('parent cancels child', function () {
|
||||
|
||||
let parent = new CancellationTokenSource();
|
||||
|
||||
@@ -4,14 +4,14 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
import * as assert from 'assert';
|
||||
import { IMatch } from 'vs/base/common/filters';
|
||||
import { matchesFuzzyOcticonAware, parseOcticons } from 'vs/base/common/octicon';
|
||||
import { matchesFuzzyCodiconAware, parseCodicons, IParsedCodicons } from 'vs/base/common/codicon';
|
||||
|
||||
export interface IOcticonFilter {
|
||||
export interface ICodiconFilter {
|
||||
// Returns null if word doesn't match.
|
||||
(query: string, target: { text: string, octiconOffsets?: number[] }): IMatch[] | null;
|
||||
(query: string, target: IParsedCodicons): IMatch[] | null;
|
||||
}
|
||||
|
||||
function filterOk(filter: IOcticonFilter, word: string, target: { text: string, octiconOffsets?: number[] }, highlights?: { start: number; end: number; }[]) {
|
||||
function filterOk(filter: ICodiconFilter, word: string, target: IParsedCodicons, highlights?: { start: number; end: number; }[]) {
|
||||
let r = filter(word, target);
|
||||
assert(r);
|
||||
if (highlights) {
|
||||
@@ -19,24 +19,24 @@ function filterOk(filter: IOcticonFilter, word: string, target: { text: string,
|
||||
}
|
||||
}
|
||||
|
||||
suite('Octicon', () => {
|
||||
test('matchesFuzzzyOcticonAware', () => {
|
||||
suite('Codicon', () => {
|
||||
test('matchesFuzzzyCodiconAware', () => {
|
||||
|
||||
// Camel Case
|
||||
|
||||
filterOk(matchesFuzzyOcticonAware, 'ccr', parseOcticons('$(octicon)CamelCaseRocks$(octicon)'), [
|
||||
filterOk(matchesFuzzyCodiconAware, 'ccr', parseCodicons('$(codicon)CamelCaseRocks$(codicon)'), [
|
||||
{ start: 10, end: 11 },
|
||||
{ start: 15, end: 16 },
|
||||
{ start: 19, end: 20 }
|
||||
]);
|
||||
|
||||
filterOk(matchesFuzzyOcticonAware, 'ccr', parseOcticons('$(octicon) CamelCaseRocks $(octicon)'), [
|
||||
filterOk(matchesFuzzyCodiconAware, 'ccr', parseCodicons('$(codicon) CamelCaseRocks $(codicon)'), [
|
||||
{ start: 11, end: 12 },
|
||||
{ start: 16, end: 17 },
|
||||
{ start: 20, end: 21 }
|
||||
]);
|
||||
|
||||
filterOk(matchesFuzzyOcticonAware, 'iut', parseOcticons('$(octicon) Indent $(octico) Using $(octic) Tpaces'), [
|
||||
filterOk(matchesFuzzyCodiconAware, 'iut', parseCodicons('$(codicon) Indent $(octico) Using $(octic) Tpaces'), [
|
||||
{ start: 11, end: 12 },
|
||||
{ start: 28, end: 29 },
|
||||
{ start: 43, end: 44 },
|
||||
@@ -44,22 +44,22 @@ suite('Octicon', () => {
|
||||
|
||||
// Prefix
|
||||
|
||||
filterOk(matchesFuzzyOcticonAware, 'using', parseOcticons('$(octicon) Indent Using Spaces'), [
|
||||
filterOk(matchesFuzzyCodiconAware, 'using', parseCodicons('$(codicon) Indent Using Spaces'), [
|
||||
{ start: 18, end: 23 },
|
||||
]);
|
||||
|
||||
// Broken Octicon
|
||||
// Broken Codicon
|
||||
|
||||
filterOk(matchesFuzzyOcticonAware, 'octicon', parseOcticons('This $(octicon Indent Using Spaces'), [
|
||||
filterOk(matchesFuzzyCodiconAware, 'codicon', parseCodicons('This $(codicon Indent Using Spaces'), [
|
||||
{ start: 7, end: 14 },
|
||||
]);
|
||||
|
||||
filterOk(matchesFuzzyOcticonAware, 'indent', parseOcticons('This $octicon Indent Using Spaces'), [
|
||||
filterOk(matchesFuzzyCodiconAware, 'indent', parseCodicons('This $codicon Indent Using Spaces'), [
|
||||
{ start: 14, end: 20 },
|
||||
]);
|
||||
|
||||
// Testing #59343
|
||||
filterOk(matchesFuzzyOcticonAware, 'unt', parseOcticons('$(primitive-dot) $(file-text) Untitled-1'), [
|
||||
filterOk(matchesFuzzyCodiconAware, 'unt', parseCodicons('$(primitive-dot) $(file-text) Untitled-1'), [
|
||||
{ start: 30, end: 33 },
|
||||
]);
|
||||
});
|
||||
@@ -196,7 +196,6 @@ suite('Color', () => {
|
||||
test('parseHex', () => {
|
||||
|
||||
// invalid
|
||||
assert.deepEqual(Color.Format.CSS.parseHex(null!), null);
|
||||
assert.deepEqual(Color.Format.CSS.parseHex(''), null);
|
||||
assert.deepEqual(Color.Format.CSS.parseHex('#'), null);
|
||||
assert.deepEqual(Color.Format.CSS.parseHex('#0102030'), null);
|
||||
@@ -243,4 +242,4 @@ suite('Color', () => {
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,21 +4,7 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as assert from 'assert';
|
||||
import { LcsDiff, IDiffChange, ISequence } from 'vs/base/common/diff/diff';
|
||||
|
||||
class StringDiffSequence implements ISequence {
|
||||
|
||||
constructor(private source: string) {
|
||||
}
|
||||
|
||||
getLength() {
|
||||
return this.source.length;
|
||||
}
|
||||
|
||||
getElementAtIndex(i: number) {
|
||||
return this.source.charCodeAt(i);
|
||||
}
|
||||
}
|
||||
import { LcsDiff, IDiffChange, StringDiffSequence } from 'vs/base/common/diff/diff';
|
||||
|
||||
function createArray<T>(length: number, value: T): T[] {
|
||||
const r: T[] = [];
|
||||
@@ -71,9 +57,9 @@ function assertAnswer(originalStr: string, modifiedStr: string, changes: IDiffCh
|
||||
}
|
||||
}
|
||||
|
||||
function lcsInnerTest(Algorithm: any, originalStr: string, modifiedStr: string, answerStr: string, onlyLength: boolean = false): void {
|
||||
let diff = new Algorithm(new StringDiffSequence(originalStr), new StringDiffSequence(modifiedStr));
|
||||
let changes = diff.ComputeDiff();
|
||||
function lcsInnerTest(originalStr: string, modifiedStr: string, answerStr: string, onlyLength: boolean = false): void {
|
||||
let diff = new LcsDiff(new StringDiffSequence(originalStr), new StringDiffSequence(modifiedStr));
|
||||
let changes = diff.ComputeDiff(false).changes;
|
||||
assertAnswer(originalStr, modifiedStr, changes, answerStr, onlyLength);
|
||||
}
|
||||
|
||||
@@ -85,32 +71,28 @@ function stringPower(str: string, power: number): string {
|
||||
return r;
|
||||
}
|
||||
|
||||
function lcsTest(Algorithm: any, originalStr: string, modifiedStr: string, answerStr: string) {
|
||||
lcsInnerTest(Algorithm, originalStr, modifiedStr, answerStr);
|
||||
function lcsTest(originalStr: string, modifiedStr: string, answerStr: string) {
|
||||
lcsInnerTest(originalStr, modifiedStr, answerStr);
|
||||
for (let i = 2; i <= 5; i++) {
|
||||
lcsInnerTest(Algorithm, stringPower(originalStr, i), stringPower(modifiedStr, i), stringPower(answerStr, i), true);
|
||||
lcsInnerTest(stringPower(originalStr, i), stringPower(modifiedStr, i), stringPower(answerStr, i), true);
|
||||
}
|
||||
}
|
||||
|
||||
function lcsTests(Algorithm: any) {
|
||||
lcsTest(Algorithm, 'heLLo world', 'hello orlando', 'heo orld');
|
||||
lcsTest(Algorithm, 'abcde', 'acd', 'acd'); // simple
|
||||
lcsTest(Algorithm, 'abcdbce', 'bcede', 'bcde'); // skip
|
||||
lcsTest(Algorithm, 'abcdefgabcdefg', 'bcehafg', 'bceafg'); // long
|
||||
lcsTest(Algorithm, 'abcde', 'fgh', ''); // no match
|
||||
lcsTest(Algorithm, 'abcfabc', 'fabc', 'fabc');
|
||||
lcsTest(Algorithm, '0azby0', '9axbzby9', 'azby');
|
||||
lcsTest(Algorithm, '0abc00000', '9a1b2c399999', 'abc');
|
||||
|
||||
lcsTest(Algorithm, 'fooBar', 'myfooBar', 'fooBar'); // all insertions
|
||||
lcsTest(Algorithm, 'fooBar', 'fooMyBar', 'fooBar'); // all insertions
|
||||
lcsTest(Algorithm, 'fooBar', 'fooBar', 'fooBar'); // identical sequences
|
||||
}
|
||||
|
||||
suite('Diff', () => {
|
||||
test('LcsDiff - different strings tests', function () {
|
||||
this.timeout(10000);
|
||||
lcsTests(LcsDiff);
|
||||
lcsTest('heLLo world', 'hello orlando', 'heo orld');
|
||||
lcsTest('abcde', 'acd', 'acd'); // simple
|
||||
lcsTest('abcdbce', 'bcede', 'bcde'); // skip
|
||||
lcsTest('abcdefgabcdefg', 'bcehafg', 'bceafg'); // long
|
||||
lcsTest('abcde', 'fgh', ''); // no match
|
||||
lcsTest('abcfabc', 'fabc', 'fabc');
|
||||
lcsTest('0azby0', '9axbzby9', 'azby');
|
||||
lcsTest('0abc00000', '9a1b2c399999', 'abc');
|
||||
|
||||
lcsTest('fooBar', 'myfooBar', 'fooBar'); // all insertions
|
||||
lcsTest('fooBar', 'fooMyBar', 'fooBar'); // all insertions
|
||||
lcsTest('fooBar', 'fooBar', 'fooBar'); // identical sequences
|
||||
});
|
||||
});
|
||||
|
||||
@@ -123,18 +105,17 @@ suite('Diff - Ported from VS', () => {
|
||||
// doesn't get there first.
|
||||
let predicateCallCount = 0;
|
||||
|
||||
let diff = new LcsDiff(new StringDiffSequence(left), new StringDiffSequence(right), function (leftIndex, leftSequence, longestMatchSoFar) {
|
||||
let diff = new LcsDiff(new StringDiffSequence(left), new StringDiffSequence(right), function (leftIndex, longestMatchSoFar) {
|
||||
assert.equal(predicateCallCount, 0);
|
||||
|
||||
predicateCallCount++;
|
||||
|
||||
assert.equal(leftSequence.getLength(), left.length);
|
||||
assert.equal(leftIndex, 1);
|
||||
|
||||
// cancel processing
|
||||
return false;
|
||||
});
|
||||
let changes = diff.ComputeDiff(true);
|
||||
let changes = diff.ComputeDiff(true).changes;
|
||||
|
||||
assert.equal(predicateCallCount, 1);
|
||||
|
||||
@@ -144,26 +125,26 @@ suite('Diff - Ported from VS', () => {
|
||||
|
||||
|
||||
// Cancel after the first match ('c')
|
||||
diff = new LcsDiff(new StringDiffSequence(left), new StringDiffSequence(right), function (leftIndex, leftSequence, longestMatchSoFar) {
|
||||
diff = new LcsDiff(new StringDiffSequence(left), new StringDiffSequence(right), function (leftIndex, longestMatchSoFar) {
|
||||
assert(longestMatchSoFar <= 1); // We never see a match of length > 1
|
||||
|
||||
// Continue processing as long as there hasn't been a match made.
|
||||
return longestMatchSoFar < 1;
|
||||
});
|
||||
changes = diff.ComputeDiff(true);
|
||||
changes = diff.ComputeDiff(true).changes;
|
||||
|
||||
assertAnswer(left, right, changes, 'abcf');
|
||||
|
||||
|
||||
|
||||
// Cancel after the second match ('d')
|
||||
diff = new LcsDiff(new StringDiffSequence(left), new StringDiffSequence(right), function (leftIndex, leftSequence, longestMatchSoFar) {
|
||||
diff = new LcsDiff(new StringDiffSequence(left), new StringDiffSequence(right), function (leftIndex, longestMatchSoFar) {
|
||||
assert(longestMatchSoFar <= 2); // We never see a match of length > 2
|
||||
|
||||
// Continue processing as long as there hasn't been a match made.
|
||||
return longestMatchSoFar < 2;
|
||||
});
|
||||
changes = diff.ComputeDiff(true);
|
||||
changes = diff.ComputeDiff(true).changes;
|
||||
|
||||
assertAnswer(left, right, changes, 'abcdf');
|
||||
|
||||
@@ -171,7 +152,7 @@ suite('Diff - Ported from VS', () => {
|
||||
|
||||
// Cancel *one iteration* after the second match ('d')
|
||||
let hitSecondMatch = false;
|
||||
diff = new LcsDiff(new StringDiffSequence(left), new StringDiffSequence(right), function (leftIndex, leftSequence, longestMatchSoFar) {
|
||||
diff = new LcsDiff(new StringDiffSequence(left), new StringDiffSequence(right), function (leftIndex, longestMatchSoFar) {
|
||||
assert(longestMatchSoFar <= 2); // We never see a match of length > 2
|
||||
|
||||
let hitYet = hitSecondMatch;
|
||||
@@ -179,20 +160,20 @@ suite('Diff - Ported from VS', () => {
|
||||
// Continue processing as long as there hasn't been a match made.
|
||||
return !hitYet;
|
||||
});
|
||||
changes = diff.ComputeDiff(true);
|
||||
changes = diff.ComputeDiff(true).changes;
|
||||
|
||||
assertAnswer(left, right, changes, 'abcdf');
|
||||
|
||||
|
||||
|
||||
// Cancel after the third and final match ('e')
|
||||
diff = new LcsDiff(new StringDiffSequence(left), new StringDiffSequence(right), function (leftIndex, leftSequence, longestMatchSoFar) {
|
||||
diff = new LcsDiff(new StringDiffSequence(left), new StringDiffSequence(right), function (leftIndex, longestMatchSoFar) {
|
||||
assert(longestMatchSoFar <= 3); // We never see a match of length > 3
|
||||
|
||||
// Continue processing as long as there hasn't been a match made.
|
||||
return longestMatchSoFar < 3;
|
||||
});
|
||||
changes = diff.ComputeDiff(true);
|
||||
changes = diff.ComputeDiff(true).changes;
|
||||
|
||||
assertAnswer(left, right, changes, 'abcdef');
|
||||
});
|
||||
|
||||
64
src/vs/base/test/common/lazy.test.ts
Normal file
64
src/vs/base/test/common/lazy.test.ts
Normal file
@@ -0,0 +1,64 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as assert from 'assert';
|
||||
import { Lazy } from 'vs/base/common/lazy';
|
||||
|
||||
suite('Lazy', () => {
|
||||
|
||||
test('lazy values should only be resolved once', () => {
|
||||
let counter = 0;
|
||||
const value = new Lazy(() => ++counter);
|
||||
|
||||
assert.strictEqual(value.hasValue(), false);
|
||||
assert.strictEqual(value.getValue(), 1);
|
||||
assert.strictEqual(value.hasValue(), true);
|
||||
assert.strictEqual(value.getValue(), 1); // make sure we did not evaluate again
|
||||
});
|
||||
|
||||
test('lazy values handle error case', () => {
|
||||
let counter = 0;
|
||||
const value = new Lazy(() => { throw new Error(`${++counter}`); });
|
||||
|
||||
assert.strictEqual(value.hasValue(), false);
|
||||
assert.throws(() => value.getValue(), /\b1\b/);
|
||||
assert.strictEqual(value.hasValue(), true);
|
||||
assert.throws(() => value.getValue(), /\b1\b/);
|
||||
});
|
||||
|
||||
test('map should not cause lazy values to be re-resolved', () => {
|
||||
let outer = 0;
|
||||
let inner = 10;
|
||||
const outerLazy = new Lazy(() => ++outer);
|
||||
const innerLazy = outerLazy.map(x => [x, ++inner]);
|
||||
|
||||
assert.strictEqual(outerLazy.hasValue(), false);
|
||||
assert.strictEqual(innerLazy.hasValue(), false);
|
||||
|
||||
assert.deepEqual(innerLazy.getValue(), [1, 11]);
|
||||
assert.strictEqual(outerLazy.hasValue(), true);
|
||||
assert.strictEqual(innerLazy.hasValue(), true);
|
||||
assert.strictEqual(outerLazy.getValue(), 1);
|
||||
|
||||
// make sure we did not evaluate again
|
||||
assert.strictEqual(outerLazy.getValue(), 1);
|
||||
assert.deepEqual(innerLazy.getValue(), [1, 11]);
|
||||
});
|
||||
|
||||
test('map should should handle error values', () => {
|
||||
let outer = 0;
|
||||
let inner = 10;
|
||||
const outerLazy = new Lazy(() => { throw new Error(`${++outer}`); });
|
||||
const innerLazy = outerLazy.map(x => { throw new Error(`${++inner}`); });
|
||||
|
||||
assert.strictEqual(outerLazy.hasValue(), false);
|
||||
assert.strictEqual(innerLazy.hasValue(), false);
|
||||
|
||||
assert.throws(() => innerLazy.getValue(), /\b1\b/); // we should get result from outer
|
||||
assert.strictEqual(outerLazy.hasValue(), true);
|
||||
assert.strictEqual(innerLazy.hasValue(), true);
|
||||
assert.throws(() => outerLazy.getValue(), /\b1\b/);
|
||||
});
|
||||
});
|
||||
@@ -4,46 +4,70 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as assert from 'assert';
|
||||
import { ResourceTree, IBranchNode, ILeafNode } from 'vs/base/common/resourceTree';
|
||||
import { ResourceTree } from 'vs/base/common/resourceTree';
|
||||
import { URI } from 'vs/base/common/uri';
|
||||
|
||||
suite('ResourceTree', function () {
|
||||
test('ctor', function () {
|
||||
const tree = new ResourceTree<string, null>(null);
|
||||
assert(ResourceTree.isBranchNode(tree.root));
|
||||
assert.equal(tree.root.size, 0);
|
||||
assert.equal(tree.root.childrenCount, 0);
|
||||
});
|
||||
|
||||
test('simple', function () {
|
||||
const tree = new ResourceTree<string, null>(null);
|
||||
|
||||
tree.add(URI.file('/foo/bar.txt'), 'bar contents');
|
||||
assert(ResourceTree.isBranchNode(tree.root));
|
||||
assert.equal(tree.root.size, 1);
|
||||
assert.equal(tree.root.childrenCount, 1);
|
||||
|
||||
let foo = tree.root.get('foo') as IBranchNode<string, null>;
|
||||
let foo = tree.root.get('foo')!;
|
||||
assert(foo);
|
||||
assert(ResourceTree.isBranchNode(foo));
|
||||
assert.equal(foo.size, 1);
|
||||
assert.equal(foo.childrenCount, 1);
|
||||
|
||||
let bar = foo.get('bar.txt') as ILeafNode<string, null>;
|
||||
let bar = foo.get('bar.txt')!;
|
||||
assert(bar);
|
||||
assert(!ResourceTree.isBranchNode(bar));
|
||||
assert.equal(bar.element, 'bar contents');
|
||||
|
||||
tree.add(URI.file('/hello.txt'), 'hello contents');
|
||||
assert.equal(tree.root.size, 2);
|
||||
assert.equal(tree.root.childrenCount, 2);
|
||||
|
||||
let hello = tree.root.get('hello.txt') as ILeafNode<string, null>;
|
||||
let hello = tree.root.get('hello.txt')!;
|
||||
assert(hello);
|
||||
assert(!ResourceTree.isBranchNode(hello));
|
||||
assert.equal(hello.element, 'hello contents');
|
||||
|
||||
tree.delete(URI.file('/foo/bar.txt'));
|
||||
assert.equal(tree.root.size, 1);
|
||||
hello = tree.root.get('hello.txt') as ILeafNode<string, null>;
|
||||
assert.equal(tree.root.childrenCount, 1);
|
||||
hello = tree.root.get('hello.txt')!;
|
||||
assert(hello);
|
||||
assert(!ResourceTree.isBranchNode(hello));
|
||||
assert.equal(hello.element, 'hello contents');
|
||||
});
|
||||
|
||||
test('folders with data', function () {
|
||||
const tree = new ResourceTree<string, null>(null);
|
||||
|
||||
assert.equal(tree.root.childrenCount, 0);
|
||||
|
||||
tree.add(URI.file('/foo'), 'foo');
|
||||
assert.equal(tree.root.childrenCount, 1);
|
||||
assert.equal(tree.root.get('foo')!.element, 'foo');
|
||||
|
||||
tree.add(URI.file('/bar'), 'bar');
|
||||
assert.equal(tree.root.childrenCount, 2);
|
||||
assert.equal(tree.root.get('bar')!.element, 'bar');
|
||||
|
||||
tree.add(URI.file('/foo/file.txt'), 'file');
|
||||
assert.equal(tree.root.childrenCount, 2);
|
||||
assert.equal(tree.root.get('foo')!.element, 'foo');
|
||||
assert.equal(tree.root.get('bar')!.element, 'bar');
|
||||
assert.equal(tree.root.get('foo')!.get('file.txt')!.element, 'file');
|
||||
|
||||
tree.delete(URI.file('/foo'));
|
||||
assert.equal(tree.root.childrenCount, 1);
|
||||
assert(!tree.root.get('foo'));
|
||||
assert.equal(tree.root.get('bar')!.element, 'bar');
|
||||
|
||||
tree.delete(URI.file('/bar'));
|
||||
assert.equal(tree.root.childrenCount, 0);
|
||||
assert(!tree.root.get('foo'));
|
||||
assert(!tree.root.get('bar'));
|
||||
});
|
||||
});
|
||||
|
||||
@@ -169,6 +169,24 @@ suite('Types', () => {
|
||||
assert(types.isUndefinedOrNull(null));
|
||||
});
|
||||
|
||||
test('assertIsDefined / assertAreDefined', () => {
|
||||
assert.throws(() => types.assertIsDefined(undefined));
|
||||
assert.throws(() => types.assertIsDefined(null));
|
||||
assert.throws(() => types.assertAllDefined(null, undefined));
|
||||
assert.throws(() => types.assertAllDefined(true, undefined));
|
||||
assert.throws(() => types.assertAllDefined(undefined, false));
|
||||
|
||||
assert.equal(types.assertIsDefined(true), true);
|
||||
assert.equal(types.assertIsDefined(false), false);
|
||||
assert.equal(types.assertIsDefined('Hello'), 'Hello');
|
||||
assert.equal(types.assertIsDefined(''), '');
|
||||
|
||||
const res = types.assertAllDefined(1, true, 'Hello');
|
||||
assert.equal(res[0], 1);
|
||||
assert.equal(res[1], true);
|
||||
assert.equal(res[2], 'Hello');
|
||||
});
|
||||
|
||||
test('validateConstraints', () => {
|
||||
types.validateConstraints([1, 'test', true], [Number, String, Boolean]);
|
||||
types.validateConstraints([1, 'test', true], ['number', 'string', 'boolean']);
|
||||
|
||||
@@ -439,6 +439,37 @@ suite('URI', () => {
|
||||
assert.equal(uri.path, uri2.path);
|
||||
});
|
||||
|
||||
|
||||
test('Links in markdown are broken if url contains encoded parameters #79474', function () {
|
||||
this.skip();
|
||||
let strIn = 'https://myhost.com/Redirect?url=http%3A%2F%2Fwww.bing.com%3Fsearch%3Dtom';
|
||||
let uri1 = URI.parse(strIn);
|
||||
let strOut = uri1.toString();
|
||||
let uri2 = URI.parse(strOut);
|
||||
|
||||
assert.equal(uri1.scheme, uri2.scheme);
|
||||
assert.equal(uri1.authority, uri2.authority);
|
||||
assert.equal(uri1.path, uri2.path);
|
||||
assert.equal(uri1.query, uri2.query);
|
||||
assert.equal(uri1.fragment, uri2.fragment);
|
||||
assert.equal(strIn, strOut); // fails here!!
|
||||
});
|
||||
|
||||
test('Uri#parse can break path-component #45515', function () {
|
||||
this.skip();
|
||||
let strIn = 'https://firebasestorage.googleapis.com/v0/b/brewlangerie.appspot.com/o/products%2FzVNZkudXJyq8bPGTXUxx%2FBetterave-Sesame.jpg?alt=media&token=0b2310c4-3ea6-4207-bbde-9c3710ba0437';
|
||||
let uri1 = URI.parse(strIn);
|
||||
let strOut = uri1.toString();
|
||||
let uri2 = URI.parse(strOut);
|
||||
|
||||
assert.equal(uri1.scheme, uri2.scheme);
|
||||
assert.equal(uri1.authority, uri2.authority);
|
||||
assert.equal(uri1.path, uri2.path);
|
||||
assert.equal(uri1.query, uri2.query);
|
||||
assert.equal(uri1.fragment, uri2.fragment);
|
||||
assert.equal(strIn, strOut); // fails here!!
|
||||
});
|
||||
|
||||
test('URI - (de)serialize', function () {
|
||||
|
||||
const values = [
|
||||
|
||||
@@ -16,6 +16,7 @@ import { CancellationTokenSource } from 'vs/base/common/cancellation';
|
||||
import { isWindows, isLinux } from 'vs/base/common/platform';
|
||||
import { canNormalize } from 'vs/base/common/normalization';
|
||||
import { VSBuffer } from 'vs/base/common/buffer';
|
||||
import { join } from 'path';
|
||||
|
||||
const chunkSize = 64 * 1024;
|
||||
const readError = 'Error while reading';
|
||||
@@ -386,6 +387,31 @@ suite('PFS', () => {
|
||||
}
|
||||
});
|
||||
|
||||
test('readdirWithFileTypes', async () => {
|
||||
if (canNormalize && typeof process.versions['electron'] !== 'undefined' /* needs electron */) {
|
||||
const id = uuid.generateUuid();
|
||||
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
|
||||
const testDir = join(parentDir, 'pfs', id);
|
||||
|
||||
const newDir = path.join(testDir, 'öäü');
|
||||
await pfs.mkdirp(newDir, 493);
|
||||
|
||||
await pfs.writeFile(join(testDir, 'somefile.txt'), 'contents');
|
||||
|
||||
assert.ok(fs.existsSync(newDir));
|
||||
|
||||
const children = await pfs.readdirWithFileTypes(testDir);
|
||||
|
||||
assert.equal(children.some(n => n.name === 'öäü'), true); // Mac always converts to NFD, so
|
||||
assert.equal(children.some(n => n.isDirectory()), true);
|
||||
|
||||
assert.equal(children.some(n => n.name === 'somefile.txt'), true);
|
||||
assert.equal(children.some(n => n.isFile()), true);
|
||||
|
||||
await pfs.rimraf(parentDir);
|
||||
}
|
||||
});
|
||||
|
||||
test('writeFile (string)', async () => {
|
||||
const smallData = 'Hello World';
|
||||
const bigData = (new Array(100 * 1024)).join('Large String\n');
|
||||
|
||||
Reference in New Issue
Block a user