Merge from vscode a5cf1da01d5db3d2557132be8d30f89c38019f6c (#8525)

* Merge from vscode a5cf1da01d5db3d2557132be8d30f89c38019f6c

* remove files we don't want

* fix hygiene

* update distro

* update distro

* fix hygiene

* fix strict nulls

* distro

* distro

* fix tests

* fix tests

* add another edit

* fix viewlet icon

* fix azure dialog

* fix some padding

* fix more padding issues
This commit is contained in:
Anthony Dresser
2019-12-04 19:28:22 -08:00
committed by GitHub
parent a8818ab0df
commit f5ce7fb2a5
1507 changed files with 42813 additions and 27370 deletions

View File

@@ -8,7 +8,7 @@ import { Emitter } from 'vs/base/common/event';
import { SplitView, IView, Sizing, LayoutPriority } from 'vs/base/browser/ui/splitview/splitview';
import { Sash, SashState } from 'vs/base/browser/ui/sash/sash';
class TestView implements IView {
class TestView implements IView<number> {
private readonly _onDidChange = new Emitter<number | undefined>();
readonly onDidChange = this._onDidChange.event;
@@ -43,7 +43,7 @@ class TestView implements IView {
assert(_minimumSize <= _maximumSize, 'splitview view minimum size must be <= maximum size');
}
layout(size: number, orthogonalSize: number | undefined): void {
layout(size: number, _offset: number, orthogonalSize: number | undefined): void {
this._size = size;
this._orthogonalSize = orthogonalSize;
this._onDidLayout.fire({ size, orthogonalSize });
@@ -527,11 +527,11 @@ suite('Splitview', () => {
view1.dispose();
});
test('orthogonal size propagates to views', () => {
test('context propagates to views', () => {
const view1 = new TestView(20, Number.POSITIVE_INFINITY);
const view2 = new TestView(20, Number.POSITIVE_INFINITY);
const view3 = new TestView(20, Number.POSITIVE_INFINITY, LayoutPriority.Low);
const splitview = new SplitView(container, { proportionalLayout: false });
const splitview = new SplitView<number>(container, { proportionalLayout: false });
splitview.layout(200);
splitview.addView(view1, Sizing.Distribute);

View File

@@ -12,19 +12,28 @@ import { timeout } from 'vs/base/common/async';
interface Element {
id: string;
suffix?: string;
children?: Element[];
}
function find(elements: Element[] | undefined, id: string): Element {
while (elements) {
for (const element of elements) {
if (element.id === id) {
return element;
}
function find(element: Element, id: string): Element | undefined {
if (element.id === id) {
return element;
}
if (!element.children) {
return undefined;
}
for (const child of element.children) {
const result = find(child, id);
if (result) {
return result;
}
}
throw new Error('element not found');
return undefined;
}
class Renderer implements ITreeRenderer<Element, void, HTMLElement> {
@@ -33,7 +42,7 @@ class Renderer implements ITreeRenderer<Element, void, HTMLElement> {
return container;
}
renderElement(element: ITreeNode<Element, void>, index: number, templateData: HTMLElement): void {
templateData.textContent = element.element.id;
templateData.textContent = element.element.id + (element.element.suffix || '');
}
disposeTemplate(templateData: HTMLElement): void {
// noop
@@ -65,7 +74,13 @@ class Model {
constructor(readonly root: Element) { }
get(id: string): Element {
return find(this.root.children, id);
const result = find(this.root, id);
if (!result) {
throw new Error('element not found');
}
return result;
}
}
@@ -389,4 +404,36 @@ suite('AsyncDataTree', function () {
assert(!hasClass(twistie, 'collapsible'));
assert(!hasClass(twistie, 'collapsed'));
});
test('issues #84569, #82629 - rerender', async () => {
const container = document.createElement('div');
const model = new Model({
id: 'root',
children: [{
id: 'a',
children: [{
id: 'b',
suffix: '1'
}]
}]
});
const tree = new AsyncDataTree<Element, Element>('test', container, new VirtualDelegate(), [new Renderer()], new DataSource(), { identityProvider: new IdentityProvider() });
tree.layout(200);
await tree.setInput(model.root);
await tree.expand(model.get('a'));
assert.deepEqual(Array.from(container.querySelectorAll('.monaco-list-row')).map(e => e.textContent), ['a', 'b1']);
const a = model.get('a');
const b = model.get('b');
a.children?.splice(0, 1, { id: 'b', suffix: '2' });
await Promise.all([
tree.updateChildren(a, true, true),
tree.updateChildren(b, true, true)
]);
assert.deepEqual(Array.from(container.querySelectorAll('.monaco-list-row')).map(e => e.textContent), ['a', 'b2']);
});
});

View File

@@ -724,4 +724,35 @@ suite('IndexTreeModel', function () {
model.refilter();
assert.deepEqual(toArray(list), ['platinum']);
});
test('explicit hidden nodes should have renderNodeCount == 0, issue #83211', function () {
const list: ITreeNode<string>[] = [];
let query = new RegExp('');
const filter = new class implements ITreeFilter<string> {
filter(element: string): boolean {
return query.test(element);
}
};
const model = new IndexTreeModel<string>('test', toSpliceable(list), 'root', { filter });
model.splice([0], 0, [
{ element: 'a', children: [{ element: 'aa' }] },
{ element: 'b', children: [{ element: 'bb' }] }
]);
assert.deepEqual(toArray(list), ['a', 'aa', 'b', 'bb']);
assert.deepEqual(model.getListIndex([0]), 0);
assert.deepEqual(model.getListIndex([0, 0]), 1);
assert.deepEqual(model.getListIndex([1]), 2);
assert.deepEqual(model.getListIndex([1, 0]), 3);
query = /b/;
model.refilter();
assert.deepEqual(toArray(list), ['b', 'bb']);
assert.deepEqual(model.getListIndex([0]), -1);
assert.deepEqual(model.getListIndex([0, 0]), -1);
assert.deepEqual(model.getListIndex([1]), 0);
assert.deepEqual(model.getListIndex([1, 0]), 1);
});
});

View File

@@ -348,8 +348,6 @@ suite('CompressibleObjectTree', function () {
const tree = new CompressibleObjectTree<number>('test', container, new Delegate(), [new Renderer()]);
tree.layout(200);
assert.equal(tree.isCompressionEnabled(), true);
tree.setChildren(null, Iterator.fromArray([
{
element: 1, children: Iterator.fromArray([{
@@ -367,11 +365,11 @@ suite('CompressibleObjectTree', function () {
let rows = toArray(container.querySelectorAll('.monaco-tl-contents')).map(row => row.textContent);
assert.deepEqual(rows, ['1/11/111', '1111', '1112', '1113']);
tree.setCompressionEnabled(false);
tree.updateOptions({ compressionEnabled: false });
rows = toArray(container.querySelectorAll('.monaco-tl-contents')).map(row => row.textContent);
assert.deepEqual(rows, ['1', '11', '111', '1111', '1112', '1113']);
tree.setCompressionEnabled(true);
tree.updateOptions({ compressionEnabled: true });
rows = toArray(container.querySelectorAll('.monaco-tl-contents')).map(row => row.textContent);
assert.deepEqual(rows, ['1/11/111', '1111', '1112', '1113']);
});

View File

@@ -3,10 +3,11 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as assert from 'assert';
import { Event, Emitter, EventBufferer, EventMultiplexer, AsyncEmitter, IWaitUntil, PauseableEmitter } from 'vs/base/common/event';
import { Event, Emitter, EventBufferer, EventMultiplexer, IWaitUntil, PauseableEmitter, AsyncEmitter } from 'vs/base/common/event';
import { IDisposable, DisposableStore } from 'vs/base/common/lifecycle';
import * as Errors from 'vs/base/common/errors';
import { timeout } from 'vs/base/common/async';
import { CancellationToken } from 'vs/base/common/cancellation';
namespace Samples {
@@ -174,7 +175,7 @@ suite('Event', function () {
test('Debounce Event', function (done: () => void) {
let doc = new Samples.Document3();
let onDocDidChange = Event.debounce(doc.onDidChange, (prev: string[], cur) => {
let onDocDidChange = Event.debounce(doc.onDidChange, (prev: string[] | undefined, cur) => {
if (!prev) {
prev = [cur];
} else if (prev.indexOf(cur) < 0) {
@@ -272,11 +273,7 @@ suite('AsyncEmitter', function () {
assert.equal(typeof e.waitUntil, 'function');
});
emitter.fireAsync(thenables => ({
foo: true,
bar: 1,
waitUntil(t: Promise<void>) { thenables.push(t); }
}));
emitter.fireAsync({ foo: true, bar: 1, }, CancellationToken.None);
emitter.dispose();
});
@@ -303,12 +300,7 @@ suite('AsyncEmitter', function () {
}));
});
await emitter.fireAsync(thenables => ({
foo: true,
waitUntil(t) {
thenables.push(t);
}
}));
await emitter.fireAsync({ foo: true }, CancellationToken.None);
assert.equal(globalState, 2);
});
@@ -324,12 +316,7 @@ suite('AsyncEmitter', function () {
emitter.event(e => {
e.waitUntil(timeout(10).then(async _ => {
if (e.foo === 1) {
await emitter.fireAsync(thenables => ({
foo: 2,
waitUntil(t) {
thenables.push(t);
}
}));
await emitter.fireAsync({ foo: 2 }, CancellationToken.None);
assert.deepEqual(events, [1, 2]);
done = true;
}
@@ -342,14 +329,40 @@ suite('AsyncEmitter', function () {
e.waitUntil(timeout(7));
});
await emitter.fireAsync(thenables => ({
foo: 1,
waitUntil(t) {
thenables.push(t);
}
}));
await emitter.fireAsync({ foo: 1 }, CancellationToken.None);
assert.ok(done);
});
test('catch errors', async function () {
const origErrorHandler = Errors.errorHandler.getUnexpectedErrorHandler();
Errors.setUnexpectedErrorHandler(() => null);
interface E extends IWaitUntil {
foo: boolean;
}
let globalState = 0;
let emitter = new AsyncEmitter<E>();
emitter.event(e => {
globalState += 1;
e.waitUntil(new Promise((_r, reject) => reject(new Error())));
});
emitter.event(e => {
globalState += 1;
e.waitUntil(timeout(10));
});
await emitter.fireAsync({ foo: true }, CancellationToken.None).then(() => {
assert.equal(globalState, 2);
}).catch(e => {
console.log(e);
assert.ok(false);
});
Errors.setUnexpectedErrorHandler(origErrorHandler);
});
});
suite('PausableEmitter', function () {

View File

@@ -498,4 +498,14 @@ suite('Filters', () => {
fuzzyScore
);
});
test('"Go to Symbol" with the exact method name doesn\'t work as expected #84787', function () {
const match = fuzzyScore(':get', ':get', 1, 'get', 'get', 0, true);
assert.ok(Boolean(match));
});
test('Suggestion is not highlighted #85826', function () {
assertMatches('SemanticTokens', 'SemanticTokensEdits', '^S^e^m^a^n^t^i^c^T^o^k^e^n^sEdits', fuzzyScore);
assertMatches('SemanticTokens', 'SemanticTokensEdits', '^S^e^m^a^n^t^i^c^T^o^k^e^n^sEdits', fuzzyScoreGracefulAggressive);
});
});

View File

@@ -118,13 +118,43 @@ suite('JSON - edits', () => {
assertEdit(content, edits, '{\n "x": "y"\n}');
});
test('insert item to empty array', () => {
test('insert item at 0', () => {
let content = '[\n 2,\n 3\n]';
let edits = setProperty(content, [0], 1, formatterOptions);
assertEdit(content, edits, '[\n 1,\n 2,\n 3\n]');
});
test('insert item at 0 in empty array', () => {
let content = '[\n]';
let edits = setProperty(content, [0], 1, formatterOptions);
assertEdit(content, edits, '[\n 1\n]');
});
test('insert item at an index', () => {
let content = '[\n 1,\n 3\n]';
let edits = setProperty(content, [1], 2, formatterOptions);
assertEdit(content, edits, '[\n 1,\n 2,\n 3\n]');
});
test('insert item at an index im empty array', () => {
let content = '[\n]';
let edits = setProperty(content, [1], 1, formatterOptions);
assertEdit(content, edits, '[\n 1\n]');
});
test('insert item at end index', () => {
let content = '[\n 1,\n 2\n]';
let edits = setProperty(content, [2], 3, formatterOptions);
assertEdit(content, edits, '[\n 1,\n 2,\n 3\n]');
});
test('insert item at end to empty array', () => {
let content = '[\n]';
let edits = setProperty(content, [-1], 'bar', formatterOptions);
assertEdit(content, edits, '[\n "bar"\n]');
});
test('insert item', () => {
test('insert item at end', () => {
let content = '[\n 1,\n 2\n]';
let edits = setProperty(content, [-1], 'bar', formatterOptions);
assertEdit(content, edits, '[\n 1,\n 2,\n "bar"\n]');
@@ -160,4 +190,4 @@ suite('JSON - edits', () => {
assertEdit(content, edits, '// This is a comment\n[\n 1,\n "foo"\n]');
});
});
});

View File

@@ -0,0 +1,176 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as assert from 'assert';
import { isReadableStream, newWriteableStream, Readable, consumeReadable, consumeReadableWithLimit, consumeStream, ReadableStream, toStream, toReadable, transform, consumeStreamWithLimit } from 'vs/base/common/stream';
suite('Stream', () => {
test('isReadableStream', () => {
assert.ok(!isReadableStream(Object.create(null)));
assert.ok(isReadableStream(newWriteableStream(d => d)));
});
test('WriteableStream', () => {
const stream = newWriteableStream<string>(strings => strings.join());
let error = false;
stream.on('error', e => {
error = true;
});
let end = false;
stream.on('end', () => {
end = true;
});
stream.write('Hello');
const chunks: string[] = [];
stream.on('data', data => {
chunks.push(data);
});
assert.equal(chunks[0], 'Hello');
stream.write('World');
assert.equal(chunks[1], 'World');
assert.equal(error, false);
assert.equal(end, false);
stream.pause();
stream.write('1');
stream.write('2');
stream.write('3');
assert.equal(chunks.length, 2);
stream.resume();
assert.equal(chunks.length, 3);
assert.equal(chunks[2], '1,2,3');
stream.error(new Error());
assert.equal(error, true);
stream.end('Final Bit');
assert.equal(chunks.length, 4);
assert.equal(chunks[3], 'Final Bit');
stream.destroy();
stream.write('Unexpected');
assert.equal(chunks.length, 4);
});
test('consumeReadable', () => {
const readable = arrayToReadable(['1', '2', '3', '4', '5']);
const consumed = consumeReadable(readable, strings => strings.join());
assert.equal(consumed, '1,2,3,4,5');
});
test('consumeReadableWithLimit', () => {
for (let i = 0; i < 5; i++) {
const readable = arrayToReadable(['1', '2', '3', '4', '5']);
const consumedOrReadable = consumeReadableWithLimit(readable, strings => strings.join(), i);
if (typeof consumedOrReadable === 'string') {
assert.fail('Unexpected result');
} else {
const consumed = consumeReadable(consumedOrReadable, strings => strings.join());
assert.equal(consumed, '1,2,3,4,5');
}
}
let readable = arrayToReadable(['1', '2', '3', '4', '5']);
let consumedOrReadable = consumeReadableWithLimit(readable, strings => strings.join(), 5);
assert.equal(consumedOrReadable, '1,2,3,4,5');
readable = arrayToReadable(['1', '2', '3', '4', '5']);
consumedOrReadable = consumeReadableWithLimit(readable, strings => strings.join(), 6);
assert.equal(consumedOrReadable, '1,2,3,4,5');
});
function arrayToReadable<T>(array: T[]): Readable<T> {
return {
read: () => array.shift() || null
};
}
function readableToStream(readable: Readable<string>): ReadableStream<string> {
const stream = newWriteableStream<string>(strings => strings.join());
// Simulate async behavior
setTimeout(() => {
let chunk: string | null = null;
while ((chunk = readable.read()) !== null) {
stream.write(chunk);
}
stream.end();
}, 0);
return stream;
}
test('consumeStream', async () => {
const stream = readableToStream(arrayToReadable(['1', '2', '3', '4', '5']));
const consumed = await consumeStream(stream, strings => strings.join());
assert.equal(consumed, '1,2,3,4,5');
});
test('consumeStreamWithLimit', async () => {
for (let i = 0; i < 5; i++) {
const readable = readableToStream(arrayToReadable(['1', '2', '3', '4', '5']));
const consumedOrStream = await consumeStreamWithLimit(readable, strings => strings.join(), i);
if (typeof consumedOrStream === 'string') {
assert.fail('Unexpected result');
} else {
const consumed = await consumeStream(consumedOrStream, strings => strings.join());
assert.equal(consumed, '1,2,3,4,5');
}
}
let stream = readableToStream(arrayToReadable(['1', '2', '3', '4', '5']));
let consumedOrStream = await consumeStreamWithLimit(stream, strings => strings.join(), 5);
assert.equal(consumedOrStream, '1,2,3,4,5');
stream = readableToStream(arrayToReadable(['1', '2', '3', '4', '5']));
consumedOrStream = await consumeStreamWithLimit(stream, strings => strings.join(), 6);
assert.equal(consumedOrStream, '1,2,3,4,5');
});
test('toStream', async () => {
const stream = toStream('1,2,3,4,5', strings => strings.join());
const consumed = await consumeStream(stream, strings => strings.join());
assert.equal(consumed, '1,2,3,4,5');
});
test('toReadable', async () => {
const readable = toReadable('1,2,3,4,5');
const consumed = await consumeReadable(readable, strings => strings.join());
assert.equal(consumed, '1,2,3,4,5');
});
test('transform', async () => {
const source = newWriteableStream<string>(strings => strings.join());
const result = transform(source, { data: string => string + string }, strings => strings.join());
// Simulate async behavior
setTimeout(() => {
source.write('1');
source.write('2');
source.write('3');
source.write('4');
source.end('5');
}, 0);
const consumed = await consumeStream(result, strings => strings.join());
assert.equal(consumed, '11,22,33,44,55');
});
});

View File

@@ -458,4 +458,42 @@ suite('Strings', () => {
assert.equal(strings.removeAccents('ñice'), 'nice');
assert.equal(strings.removeAccents('ńice'), 'nice');
});
test('encodeUTF8', function () {
function assertEncodeUTF8(str: string, expected: number[]): void {
const actual = strings.encodeUTF8(str);
const actualArr: number[] = [];
for (let offset = 0; offset < actual.byteLength; offset++) {
actualArr[offset] = actual[offset];
}
assert.deepEqual(actualArr, expected);
}
function assertDecodeUTF8(data: number[], expected: string): void {
const actual = strings.decodeUTF8(new Uint8Array(data));
assert.deepEqual(actual, expected);
}
function assertEncodeDecodeUTF8(str: string, buff: number[]): void {
assertEncodeUTF8(str, buff);
assertDecodeUTF8(buff, str);
}
assertEncodeDecodeUTF8('\u0000', [0]);
assertEncodeDecodeUTF8('!', [33]);
assertEncodeDecodeUTF8('\u007F', [127]);
assertEncodeDecodeUTF8('\u0080', [194, 128]);
assertEncodeDecodeUTF8('Ɲ', [198, 157]);
assertEncodeDecodeUTF8('\u07FF', [223, 191]);
assertEncodeDecodeUTF8('\u0800', [224, 160, 128]);
assertEncodeDecodeUTF8('ஂ', [224, 174, 130]);
assertEncodeDecodeUTF8('\uffff', [239, 191, 191]);
assertEncodeDecodeUTF8('\u10000', [225, 128, 128, 48]);
assertEncodeDecodeUTF8('🧝', [240, 159, 167, 157]);
});
test('getGraphemeBreakType', () => {
assert.equal(strings.getGraphemeBreakType(0xBC1), strings.GraphemeBreakType.SpacingMark);
});
});

View File

@@ -439,6 +439,10 @@ suite('URI', () => {
assert.equal(uri.path, uri2.path);
});
test('Unable to open \'%A0.txt\': URI malformed #76506', function () {
assert.equal(URI.parse('file://some/%.txt'), 'file://some/%25.txt');
assert.equal(URI.parse('file://some/%A0.txt'), 'file://some/%25A0.txt');
});
test('Links in markdown are broken if url contains encoded parameters #79474', function () {
this.skip();

View File

@@ -4,7 +4,7 @@
*--------------------------------------------------------------------------------------------*/
import * as assert from 'assert';
import { VSBuffer, bufferToReadable, readableToBuffer, bufferToStream, streamToBuffer, writeableBufferStream } from 'vs/base/common/buffer';
import { VSBuffer, bufferToReadable, readableToBuffer, bufferToStream, streamToBuffer, newWriteableBufferStream } from 'vs/base/common/buffer';
import { timeout } from 'vs/base/common/async';
suite('Buffer', () => {
@@ -30,7 +30,7 @@ suite('Buffer', () => {
});
test('bufferWriteableStream - basics (no error)', async () => {
const stream = writeableBufferStream();
const stream = newWriteableBufferStream();
let chunks: VSBuffer[] = [];
stream.on('data', data => {
@@ -60,7 +60,7 @@ suite('Buffer', () => {
});
test('bufferWriteableStream - basics (error)', async () => {
const stream = writeableBufferStream();
const stream = newWriteableBufferStream();
let chunks: VSBuffer[] = [];
stream.on('data', data => {
@@ -89,7 +89,7 @@ suite('Buffer', () => {
});
test('bufferWriteableStream - buffers data when no listener', async () => {
const stream = writeableBufferStream();
const stream = newWriteableBufferStream();
await timeout(0);
stream.write(VSBuffer.fromString('Hello'));
@@ -118,7 +118,7 @@ suite('Buffer', () => {
});
test('bufferWriteableStream - buffers errors when no listener', async () => {
const stream = writeableBufferStream();
const stream = newWriteableBufferStream();
await timeout(0);
stream.write(VSBuffer.fromString('Hello'));
@@ -149,7 +149,7 @@ suite('Buffer', () => {
});
test('bufferWriteableStream - buffers end when no listener', async () => {
const stream = writeableBufferStream();
const stream = newWriteableBufferStream();
await timeout(0);
stream.write(VSBuffer.fromString('Hello'));
@@ -178,7 +178,7 @@ suite('Buffer', () => {
});
test('bufferWriteableStream - nothing happens after end()', async () => {
const stream = writeableBufferStream();
const stream = newWriteableBufferStream();
let chunks: VSBuffer[] = [];
stream.on('data', data => {
@@ -222,7 +222,7 @@ suite('Buffer', () => {
});
test('bufferWriteableStream - pause/resume (simple)', async () => {
const stream = writeableBufferStream();
const stream = newWriteableBufferStream();
let chunks: VSBuffer[] = [];
stream.on('data', data => {
@@ -259,7 +259,7 @@ suite('Buffer', () => {
});
test('bufferWriteableStream - pause/resume (pause after first write)', async () => {
const stream = writeableBufferStream();
const stream = newWriteableBufferStream();
let chunks: VSBuffer[] = [];
stream.on('data', data => {
@@ -299,7 +299,7 @@ suite('Buffer', () => {
});
test('bufferWriteableStream - pause/resume (error)', async () => {
const stream = writeableBufferStream();
const stream = newWriteableBufferStream();
let chunks: VSBuffer[] = [];
stream.on('data', data => {
@@ -336,7 +336,7 @@ suite('Buffer', () => {
});
test('bufferWriteableStream - destroy', async () => {
const stream = writeableBufferStream();
const stream = newWriteableBufferStream();
let chunks: VSBuffer[] = [];
stream.on('data', data => {

View File

@@ -9,7 +9,7 @@ import * as encoding from 'vs/base/node/encoding';
import { Readable } from 'stream';
import { getPathFromAmdModule } from 'vs/base/common/amd';
export async function detectEncodingByBOM(file: string): Promise<string | null> {
export async function detectEncodingByBOM(file: string): Promise<typeof encoding.UTF16be | typeof encoding.UTF16le | typeof encoding.UTF8_with_bom | null> {
try {
const { buffer, bytesRead } = await readExactlyByFile(file, 3);
@@ -86,7 +86,7 @@ suite('Encoding', () => {
const file = getPathFromAmdModule(require, './fixtures/some_utf8.css');
const detectedEncoding = await detectEncodingByBOM(file);
assert.equal(detectedEncoding, 'utf8');
assert.equal(detectedEncoding, 'utf8bom');
});
test('detectBOM UTF-16 LE', async () => {
@@ -189,6 +189,20 @@ suite('Encoding', () => {
assert.equal(mimes.seemsBinary, false);
});
test('autoGuessEncoding (UTF8)', async function () {
const file = getPathFromAmdModule(require, './fixtures/some_file.css');
const buffer = await readExactlyByFile(file, 512 * 8);
const mimes = await encoding.detectEncodingFromBuffer(buffer, true);
assert.equal(mimes.encoding, 'utf8');
});
test('autoGuessEncoding (ASCII)', async function () {
const file = getPathFromAmdModule(require, './fixtures/some_ansi.css');
const buffer = await readExactlyByFile(file, 512 * 8);
const mimes = await encoding.detectEncodingFromBuffer(buffer, true);
assert.equal(mimes.encoding, null);
});
test('autoGuessEncoding (ShiftJIS)', async function () {
const file = getPathFromAmdModule(require, './fixtures/some.shiftjis.txt');
const buffer = await readExactlyByFile(file, 512 * 8);

View File

@@ -0,0 +1,42 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
/*----------------------------------------------------------
The base color for this template is #5c87b2. If you'd like
to use a different color start by replacing all instances of
#5c87b2 with your new color.
öäüßßß
----------------------------------------------------------*/
body
{
background-color: #5c87b2;
font-size: .75em;
font-family: Segoe UI, Verdana, Helvetica, Sans-Serif;
margin: 8px;
padding: 0;
color: #696969;
}
h1, h2, h3, h4, h5, h6
{
color: #000;
font-size: 40px;
margin: 0px;
}
textarea
{
font-family: Consolas
}
#results
{
margin-top: 2em;
margin-left: 2em;
color: black;
font-size: medium;
}

View File

@@ -7,6 +7,8 @@
The base color for this template is #5c87b2. If you'd like
to use a different color start by replacing all instances of
#5c87b2 with your new color.
öäüßßß
----------------------------------------------------------*/
body
{
@@ -25,12 +27,12 @@ h1, h2, h3, h4, h5, h6
margin: 0px;
}
textarea
textarea
{
font-family: Consolas
}
#results
#results
{
margin-top: 2em;
margin-left: 2em;

View File

@@ -14,12 +14,12 @@ suite('Glob', () => {
// let patterns = [
// '{**/*.cs,**/*.json,**/*.csproj,**/*.sln}',
// '{**/*.cs,**/*.csproj,**/*.sln}',
// '{**/*.ts,**/*.tsx,**/*.js,**/*.jsx,**/*.es6,**/*.mjs}',
// '{**/*.ts,**/*.tsx,**/*.js,**/*.jsx,**/*.es6,**/*.mjs,**/*.cjs}',
// '**/*.go',
// '{**/*.ps,**/*.ps1}',
// '{**/*.c,**/*.cpp,**/*.h}',
// '{**/*.fsx,**/*.fsi,**/*.fs,**/*.ml,**/*.mli}',
// '{**/*.js,**/*.jsx,**/*.es6,**/*.mjs}',
// '{**/*.js,**/*.jsx,**/*.es6,**/*.mjs,**/*.cjs}',
// '{**/*.ts,**/*.tsx}',
// '{**/*.php}',
// '{**/*.php}',
@@ -1015,4 +1015,4 @@ suite('Glob', () => {
assertNoGlobMatch(p, '/DNXConsoleApp/foo/Program.cs');
}
});
});
});

View File

@@ -12,7 +12,6 @@ import * as uuid from 'vs/base/common/uuid';
import * as pfs from 'vs/base/node/pfs';
import { timeout } from 'vs/base/common/async';
import { getPathFromAmdModule } from 'vs/base/common/amd';
import { CancellationTokenSource } from 'vs/base/common/cancellation';
import { isWindows, isLinux } from 'vs/base/common/platform';
import { canNormalize } from 'vs/base/common/normalization';
import { VSBuffer } from 'vs/base/common/buffer';
@@ -50,7 +49,13 @@ function toReadable(value: string, throwError?: boolean): Readable {
});
}
suite('PFS', () => {
suite('PFS', function () {
// Given issues such as https://github.com/microsoft/vscode/issues/84066
// we see random test failures when accessing the native file system. To
// diagnose further, we retry node.js file access tests up to 3 times to
// rule out any random disk issue.
this.retries(3);
test('writeFile', async () => {
const id = uuid.generateUuid();
@@ -253,7 +258,7 @@ suite('PFS', () => {
}
catch (error) {
assert.fail(error);
return Promise.reject(error);
throw error;
}
});
@@ -306,23 +311,6 @@ suite('PFS', () => {
return pfs.rimraf(parentDir, pfs.RimRafMode.MOVE);
});
test('mkdirp cancellation', async () => {
const id = uuid.generateUuid();
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
const newDir = path.join(parentDir, 'pfs', id);
const source = new CancellationTokenSource();
const mkdirpPromise = pfs.mkdirp(newDir, 493, source.token);
source.cancel();
await mkdirpPromise;
assert.ok(!fs.existsSync(newDir));
return pfs.rimraf(parentDir, pfs.RimRafMode.MOVE);
});
test('readDirsInDir', async () => {
const id = uuid.generateUuid();
const parentDir = path.join(os.tmpdir(), 'vsctests', id);
@@ -525,7 +513,7 @@ suite('PFS', () => {
}
if (!expectedError || (<any>expectedError).code !== 'EISDIR') {
return Promise.reject(new Error('Expected EISDIR error for writing to folder but got: ' + (expectedError ? (<any>expectedError).code : 'no error')));
throw new Error('Expected EISDIR error for writing to folder but got: ' + (expectedError ? (<any>expectedError).code : 'no error'));
}
// verify that the stream is still consumable (for https://github.com/Microsoft/vscode/issues/42542)
@@ -551,7 +539,7 @@ suite('PFS', () => {
}
if (!expectedError || expectedError.message !== readError) {
return Promise.reject(new Error('Expected error for writing to folder'));
throw new Error('Expected error for writing to folder');
}
await pfs.rimraf(parentDir);
@@ -582,7 +570,7 @@ suite('PFS', () => {
}
if (!expectedError || !((<any>expectedError).code !== 'EACCES' || (<any>expectedError).code !== 'EPERM')) {
return Promise.reject(new Error('Expected EACCES/EPERM error for writing to folder but got: ' + (expectedError ? (<any>expectedError).code : 'no error')));
throw new Error('Expected EACCES/EPERM error for writing to folder but got: ' + (expectedError ? (<any>expectedError).code : 'no error'));
}
await pfs.rimraf(parentDir);
@@ -609,7 +597,7 @@ suite('PFS', () => {
}
if (!expectedError) {
return Promise.reject(new Error('Expected error for writing to folder'));
throw new Error('Expected error for writing to folder');
}
await pfs.rimraf(parentDir);

Binary file not shown.

View File

@@ -0,0 +1,28 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as assert from 'assert';
import * as path from 'vs/base/common/path';
import * as os from 'os';
import { extract } from 'vs/base/node/zip';
import { generateUuid } from 'vs/base/common/uuid';
import { rimraf, exists } from 'vs/base/node/pfs';
import { getPathFromAmdModule } from 'vs/base/common/amd';
import { createCancelablePromise } from 'vs/base/common/async';
const fixtures = getPathFromAmdModule(require, './fixtures');
suite('Zip', () => {
test('extract should handle directories', () => {
const fixture = path.join(fixtures, 'extract.zip');
const target = path.join(os.tmpdir(), generateUuid());
return createCancelablePromise(token => extract(fixture, target, {}, token)
.then(() => exists(path.join(target, 'extension')))
.then(exists => assert(exists))
.then(() => rimraf(target)));
});
});