mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 18:46:40 -05:00
Merge from vscode 4d91d96e5e121b38d33508cdef17868bab255eae
This commit is contained in:
committed by
AzureDataStudio
parent
a971aee5bd
commit
5e7071e466
@@ -342,5 +342,14 @@ suite('Arrays', () => {
|
||||
arrays.coalesceInPlace(sparse);
|
||||
assert.equal(sparse.length, 5);
|
||||
});
|
||||
|
||||
test('insert, remove', function () {
|
||||
const array: string[] = [];
|
||||
const remove = arrays.insert(array, 'foo');
|
||||
assert.equal(array[0], 'foo');
|
||||
|
||||
remove();
|
||||
assert.equal(array.length, 0);
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -48,6 +48,21 @@ suite('Lifecycle', () => {
|
||||
assert(disposable.isDisposed);
|
||||
assert(disposable2.isDisposed);
|
||||
});
|
||||
|
||||
test('Action bar has broken accessibility #100273', function () {
|
||||
let array = [{ dispose() { } }, { dispose() { } }];
|
||||
let array2 = dispose(array);
|
||||
|
||||
assert.equal(array.length, 2);
|
||||
assert.equal(array2.length, 0);
|
||||
assert.ok(array !== array2);
|
||||
|
||||
let set = new Set<IDisposable>([{ dispose() { } }, { dispose() { } }]);
|
||||
let setValues = set.values();
|
||||
let setValues2 = dispose(setValues);
|
||||
assert.ok(setValues === setValues2);
|
||||
|
||||
});
|
||||
});
|
||||
|
||||
suite('Reference Collection', () => {
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
import { ResourceMap, TernarySearchTree, PathIterator, StringIterator, LinkedMap, Touch, LRUCache, UriIterator } from 'vs/base/common/map';
|
||||
import * as assert from 'assert';
|
||||
import { URI } from 'vs/base/common/uri';
|
||||
import { extUriIgnorePathCase } from 'vs/base/common/resources';
|
||||
|
||||
suite('Map', () => {
|
||||
|
||||
@@ -811,32 +812,32 @@ suite('Map', () => {
|
||||
assert.equal(map.get(uncFile), 'true');
|
||||
});
|
||||
|
||||
// test('ResourceMap - files (ignorecase)', function () {
|
||||
// const map = new ResourceMap<any>(true);
|
||||
test('ResourceMap - files (ignorecase)', function () {
|
||||
const map = new ResourceMap<any>(uri => extUriIgnorePathCase.getComparisonKey(uri));
|
||||
|
||||
// const fileA = URI.parse('file://some/filea');
|
||||
// const fileB = URI.parse('some://some/other/fileb');
|
||||
// const fileAUpper = URI.parse('file://SOME/FILEA');
|
||||
const fileA = URI.parse('file://some/filea');
|
||||
const fileB = URI.parse('some://some/other/fileb');
|
||||
const fileAUpper = URI.parse('file://SOME/FILEA');
|
||||
|
||||
// map.set(fileA, 'true');
|
||||
// assert.equal(map.get(fileA), 'true');
|
||||
map.set(fileA, 'true');
|
||||
assert.equal(map.get(fileA), 'true');
|
||||
|
||||
// assert.equal(map.get(fileAUpper), 'true');
|
||||
assert.equal(map.get(fileAUpper), 'true');
|
||||
|
||||
// assert.ok(!map.get(fileB));
|
||||
assert.ok(!map.get(fileB));
|
||||
|
||||
// map.set(fileAUpper, 'false');
|
||||
// assert.equal(map.get(fileAUpper), 'false');
|
||||
map.set(fileAUpper, 'false');
|
||||
assert.equal(map.get(fileAUpper), 'false');
|
||||
|
||||
// assert.equal(map.get(fileA), 'false');
|
||||
assert.equal(map.get(fileA), 'false');
|
||||
|
||||
// const windowsFile = URI.file('c:\\test with %25\\c#code');
|
||||
// const uncFile = URI.file('\\\\shäres\\path\\c#\\plugin.json');
|
||||
const windowsFile = URI.file('c:\\test with %25\\c#code');
|
||||
const uncFile = URI.file('\\\\shäres\\path\\c#\\plugin.json');
|
||||
|
||||
// map.set(windowsFile, 'true');
|
||||
// map.set(uncFile, 'true');
|
||||
map.set(windowsFile, 'true');
|
||||
map.set(uncFile, 'true');
|
||||
|
||||
// assert.equal(map.get(windowsFile), 'true');
|
||||
// assert.equal(map.get(uncFile), 'true');
|
||||
// });
|
||||
assert.equal(map.get(windowsFile), 'true');
|
||||
assert.equal(map.get(uncFile), 'true');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,7 +4,8 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as assert from 'assert';
|
||||
import { isReadableStream, newWriteableStream, Readable, consumeReadable, consumeReadableWithLimit, consumeStream, ReadableStream, toStream, toReadable, transform, consumeStreamWithLimit } from 'vs/base/common/stream';
|
||||
import { isReadableStream, newWriteableStream, Readable, consumeReadable, peekReadable, consumeStream, ReadableStream, toStream, toReadable, transform, peekStream, isReadableBufferedStream } from 'vs/base/common/stream';
|
||||
import { timeout } from 'vs/base/common/async';
|
||||
|
||||
suite('Stream', () => {
|
||||
|
||||
@@ -13,7 +14,16 @@ suite('Stream', () => {
|
||||
assert.ok(isReadableStream(newWriteableStream(d => d)));
|
||||
});
|
||||
|
||||
test('WriteableStream', () => {
|
||||
test('isReadableBufferedStream', async () => {
|
||||
assert.ok(!isReadableBufferedStream(Object.create(null)));
|
||||
|
||||
const stream = newWriteableStream(d => d);
|
||||
stream.end();
|
||||
const bufferedStream = await peekStream(stream, 1);
|
||||
assert.ok(isReadableBufferedStream(bufferedStream));
|
||||
});
|
||||
|
||||
test('WriteableStream - basics', () => {
|
||||
const stream = newWriteableStream<string>(strings => strings.join());
|
||||
|
||||
let error = false;
|
||||
@@ -66,17 +76,92 @@ suite('Stream', () => {
|
||||
assert.equal(chunks.length, 4);
|
||||
});
|
||||
|
||||
test('WriteableStream - removeListener', () => {
|
||||
const stream = newWriteableStream<string>(strings => strings.join());
|
||||
|
||||
let error = false;
|
||||
const errorListener = (e: Error) => {
|
||||
error = true;
|
||||
};
|
||||
stream.on('error', errorListener);
|
||||
|
||||
let data = false;
|
||||
const dataListener = () => {
|
||||
data = true;
|
||||
};
|
||||
stream.on('data', dataListener);
|
||||
|
||||
stream.write('Hello');
|
||||
assert.equal(data, true);
|
||||
|
||||
data = false;
|
||||
stream.removeListener('data', dataListener);
|
||||
|
||||
stream.write('World');
|
||||
assert.equal(data, false);
|
||||
|
||||
stream.error(new Error());
|
||||
assert.equal(error, true);
|
||||
|
||||
error = false;
|
||||
stream.removeListener('error', errorListener);
|
||||
|
||||
stream.error(new Error());
|
||||
assert.equal(error, false);
|
||||
});
|
||||
|
||||
test('WriteableStream - highWaterMark', async () => {
|
||||
const stream = newWriteableStream<string>(strings => strings.join(), { highWaterMark: 3 });
|
||||
|
||||
let res = stream.write('1');
|
||||
assert.ok(!res);
|
||||
|
||||
res = stream.write('2');
|
||||
assert.ok(!res);
|
||||
|
||||
res = stream.write('3');
|
||||
assert.ok(!res);
|
||||
|
||||
let promise1 = stream.write('4');
|
||||
assert.ok(promise1 instanceof Promise);
|
||||
|
||||
let promise2 = stream.write('5');
|
||||
assert.ok(promise2 instanceof Promise);
|
||||
|
||||
let drained1 = false;
|
||||
(async () => {
|
||||
await promise1;
|
||||
drained1 = true;
|
||||
})();
|
||||
|
||||
let drained2 = false;
|
||||
(async () => {
|
||||
await promise2;
|
||||
drained2 = true;
|
||||
})();
|
||||
|
||||
let data: string | undefined = undefined;
|
||||
stream.on('data', chunk => {
|
||||
data = chunk;
|
||||
});
|
||||
assert.ok(data);
|
||||
|
||||
await timeout(0);
|
||||
assert.equal(drained1, true);
|
||||
assert.equal(drained2, true);
|
||||
});
|
||||
|
||||
test('consumeReadable', () => {
|
||||
const readable = arrayToReadable(['1', '2', '3', '4', '5']);
|
||||
const consumed = consumeReadable(readable, strings => strings.join());
|
||||
assert.equal(consumed, '1,2,3,4,5');
|
||||
});
|
||||
|
||||
test('consumeReadableWithLimit', () => {
|
||||
test('peekReadable', () => {
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const readable = arrayToReadable(['1', '2', '3', '4', '5']);
|
||||
|
||||
const consumedOrReadable = consumeReadableWithLimit(readable, strings => strings.join(), i);
|
||||
const consumedOrReadable = peekReadable(readable, strings => strings.join(), i);
|
||||
if (typeof consumedOrReadable === 'string') {
|
||||
assert.fail('Unexpected result');
|
||||
} else {
|
||||
@@ -86,14 +171,75 @@ suite('Stream', () => {
|
||||
}
|
||||
|
||||
let readable = arrayToReadable(['1', '2', '3', '4', '5']);
|
||||
let consumedOrReadable = consumeReadableWithLimit(readable, strings => strings.join(), 5);
|
||||
let consumedOrReadable = peekReadable(readable, strings => strings.join(), 5);
|
||||
assert.equal(consumedOrReadable, '1,2,3,4,5');
|
||||
|
||||
readable = arrayToReadable(['1', '2', '3', '4', '5']);
|
||||
consumedOrReadable = consumeReadableWithLimit(readable, strings => strings.join(), 6);
|
||||
consumedOrReadable = peekReadable(readable, strings => strings.join(), 6);
|
||||
assert.equal(consumedOrReadable, '1,2,3,4,5');
|
||||
});
|
||||
|
||||
test('peekReadable - error handling', async () => {
|
||||
|
||||
// 0 Chunks
|
||||
let stream = newWriteableStream(data => data);
|
||||
|
||||
let error: Error | undefined = undefined;
|
||||
let promise = (async () => {
|
||||
try {
|
||||
await peekStream(stream, 1);
|
||||
} catch (err) {
|
||||
error = err;
|
||||
}
|
||||
})();
|
||||
|
||||
stream.error(new Error());
|
||||
await promise;
|
||||
|
||||
assert.ok(error);
|
||||
|
||||
// 1 Chunk
|
||||
stream = newWriteableStream(data => data);
|
||||
|
||||
error = undefined;
|
||||
promise = (async () => {
|
||||
try {
|
||||
await peekStream(stream, 1);
|
||||
} catch (err) {
|
||||
error = err;
|
||||
}
|
||||
})();
|
||||
|
||||
stream.write('foo');
|
||||
stream.error(new Error());
|
||||
await promise;
|
||||
|
||||
assert.ok(error);
|
||||
|
||||
// 2 Chunks
|
||||
stream = newWriteableStream(data => data);
|
||||
|
||||
error = undefined;
|
||||
promise = (async () => {
|
||||
try {
|
||||
await peekStream(stream, 1);
|
||||
} catch (err) {
|
||||
error = err;
|
||||
}
|
||||
})();
|
||||
|
||||
stream.write('foo');
|
||||
stream.write('bar');
|
||||
stream.error(new Error());
|
||||
await promise;
|
||||
|
||||
assert.ok(!error);
|
||||
|
||||
stream.on('error', err => error = err);
|
||||
stream.on('data', chunk => { });
|
||||
assert.ok(error);
|
||||
});
|
||||
|
||||
function arrayToReadable<T>(array: T[]): Readable<T> {
|
||||
return {
|
||||
read: () => array.shift() || null
|
||||
@@ -122,26 +268,39 @@ suite('Stream', () => {
|
||||
assert.equal(consumed, '1,2,3,4,5');
|
||||
});
|
||||
|
||||
test('consumeStreamWithLimit', async () => {
|
||||
test('peekStream', async () => {
|
||||
for (let i = 0; i < 5; i++) {
|
||||
const readable = readableToStream(arrayToReadable(['1', '2', '3', '4', '5']));
|
||||
const stream = readableToStream(arrayToReadable(['1', '2', '3', '4', '5']));
|
||||
|
||||
const consumedOrStream = await consumeStreamWithLimit(readable, strings => strings.join(), i);
|
||||
if (typeof consumedOrStream === 'string') {
|
||||
assert.fail('Unexpected result');
|
||||
const result = await peekStream(stream, i);
|
||||
assert.equal(stream, result.stream);
|
||||
if (result.ended) {
|
||||
assert.fail('Unexpected result, stream should not have ended yet');
|
||||
} else {
|
||||
const consumed = await consumeStream(consumedOrStream, strings => strings.join());
|
||||
assert.equal(consumed, '1,2,3,4,5');
|
||||
assert.equal(result.buffer.length, i + 1, `maxChunks: ${i}`);
|
||||
|
||||
const additionalResult: string[] = [];
|
||||
await consumeStream(stream, strings => {
|
||||
additionalResult.push(...strings);
|
||||
|
||||
return strings.join();
|
||||
});
|
||||
|
||||
assert.equal([...result.buffer, ...additionalResult].join(), '1,2,3,4,5');
|
||||
}
|
||||
}
|
||||
|
||||
let stream = readableToStream(arrayToReadable(['1', '2', '3', '4', '5']));
|
||||
let consumedOrStream = await consumeStreamWithLimit(stream, strings => strings.join(), 5);
|
||||
assert.equal(consumedOrStream, '1,2,3,4,5');
|
||||
let result = await peekStream(stream, 5);
|
||||
assert.equal(stream, result.stream);
|
||||
assert.equal(result.buffer.join(), '1,2,3,4,5');
|
||||
assert.equal(result.ended, true);
|
||||
|
||||
stream = readableToStream(arrayToReadable(['1', '2', '3', '4', '5']));
|
||||
consumedOrStream = await consumeStreamWithLimit(stream, strings => strings.join(), 6);
|
||||
assert.equal(consumedOrStream, '1,2,3,4,5');
|
||||
result = await peekStream(stream, 6);
|
||||
assert.equal(stream, result.stream);
|
||||
assert.equal(result.buffer.join(), '1,2,3,4,5');
|
||||
assert.equal(result.ended, true);
|
||||
});
|
||||
|
||||
test('toStream', async () => {
|
||||
|
||||
@@ -4,8 +4,9 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as assert from 'assert';
|
||||
import { VSBuffer, bufferToReadable, readableToBuffer, bufferToStream, streamToBuffer, newWriteableBufferStream } from 'vs/base/common/buffer';
|
||||
import { VSBuffer, bufferToReadable, readableToBuffer, bufferToStream, streamToBuffer, newWriteableBufferStream, bufferedStreamToBuffer } from 'vs/base/common/buffer';
|
||||
import { timeout } from 'vs/base/common/async';
|
||||
import { peekStream } from 'vs/base/common/stream';
|
||||
|
||||
suite('Buffer', () => {
|
||||
|
||||
@@ -29,6 +30,13 @@ suite('Buffer', () => {
|
||||
assert.equal((await streamToBuffer(stream)).toString(), content);
|
||||
});
|
||||
|
||||
test('bufferedStreamToBuffer', async () => {
|
||||
const content = 'Hello World';
|
||||
const stream = await peekStream(bufferToStream(VSBuffer.fromString(content)), 1);
|
||||
|
||||
assert.equal((await bufferedStreamToBuffer(stream)).toString(), content);
|
||||
});
|
||||
|
||||
test('bufferWriteableStream - basics (no error)', async () => {
|
||||
const stream = newWriteableBufferStream();
|
||||
|
||||
|
||||
@@ -7,9 +7,10 @@ import * as assert from 'assert';
|
||||
import * as fs from 'fs';
|
||||
import * as encoding from 'vs/base/node/encoding';
|
||||
import * as terminalEncoding from 'vs/base/node/terminalEncoding';
|
||||
import { Readable } from 'stream';
|
||||
import * as streams from 'vs/base/common/stream';
|
||||
import * as iconv from 'iconv-lite';
|
||||
import { getPathFromAmdModule } from 'vs/base/common/amd';
|
||||
import { newWriteableBufferStream, VSBuffer, VSBufferReadableStream, streamToBufferReadableStream } from 'vs/base/common/buffer';
|
||||
|
||||
export async function detectEncodingByBOM(file: string): Promise<typeof encoding.UTF16be | typeof encoding.UTF16le | typeof encoding.UTF8_with_bom | null> {
|
||||
try {
|
||||
@@ -22,7 +23,7 @@ export async function detectEncodingByBOM(file: string): Promise<typeof encoding
|
||||
}
|
||||
|
||||
interface ReadResult {
|
||||
buffer: Buffer | null;
|
||||
buffer: VSBuffer | null;
|
||||
bytesRead: number;
|
||||
}
|
||||
|
||||
@@ -43,7 +44,7 @@ function readExactlyByFile(file: string, totalBytes: number): Promise<ReadResult
|
||||
return reject(err); // we want to bubble this error up (file is actually a folder)
|
||||
}
|
||||
|
||||
return resolve({ buffer: resultBuffer, bytesRead });
|
||||
return resolve({ buffer: resultBuffer ? VSBuffer.wrap(resultBuffer) : null, bytesRead });
|
||||
});
|
||||
}
|
||||
|
||||
@@ -128,7 +129,7 @@ suite('Encoding', () => {
|
||||
process.env['VSCODE_CLI_ENCODING'] = 'utf16le';
|
||||
|
||||
const enc = await terminalEncoding.resolveTerminalEncoding();
|
||||
assert.ok(encoding.encodingExists(enc));
|
||||
assert.ok(await encoding.encodingExists(enc));
|
||||
assert.equal(enc, 'utf16le');
|
||||
});
|
||||
|
||||
@@ -231,32 +232,33 @@ suite('Encoding', () => {
|
||||
});
|
||||
}
|
||||
|
||||
async function readAllAsString(stream: NodeJS.ReadableStream) {
|
||||
return new Promise<string>((resolve, reject) => {
|
||||
let all = '';
|
||||
stream.on('data', chunk => {
|
||||
all += chunk;
|
||||
assert.equal(typeof chunk, 'string');
|
||||
function newTestReadableStream(buffers: Buffer[]): VSBufferReadableStream {
|
||||
const stream = newWriteableBufferStream();
|
||||
buffers
|
||||
.map(VSBuffer.wrap)
|
||||
.forEach(buffer => {
|
||||
setTimeout(() => {
|
||||
stream.write(buffer);
|
||||
});
|
||||
});
|
||||
stream.on('end', () => {
|
||||
resolve(all);
|
||||
});
|
||||
stream.on('error', reject);
|
||||
setTimeout(() => {
|
||||
stream.end();
|
||||
});
|
||||
return stream;
|
||||
}
|
||||
|
||||
async function readAllAsString(stream: streams.ReadableStream<string>) {
|
||||
return streams.consumeStream(stream, strings => strings.join(''));
|
||||
}
|
||||
|
||||
test('toDecodeStream - some stream', async function () {
|
||||
const source = newTestReadableStream([
|
||||
Buffer.from([65, 66, 67]),
|
||||
Buffer.from([65, 66, 67]),
|
||||
Buffer.from([65, 66, 67]),
|
||||
]);
|
||||
|
||||
let source = new Readable({
|
||||
read(size) {
|
||||
this.push(Buffer.from([65, 66, 67]));
|
||||
this.push(Buffer.from([65, 66, 67]));
|
||||
this.push(Buffer.from([65, 66, 67]));
|
||||
this.push(null);
|
||||
}
|
||||
});
|
||||
|
||||
let { detected, stream } = await encoding.toDecodeStream(source, { minBytesRequiredForDetection: 4, guessEncoding: false, overwriteEncoding: detected => detected || encoding.UTF8 });
|
||||
const { detected, stream } = await encoding.toDecodeStream(source, { minBytesRequiredForDetection: 4, guessEncoding: false, overwriteEncoding: async detected => detected || encoding.UTF8 });
|
||||
|
||||
assert.ok(detected);
|
||||
assert.ok(stream);
|
||||
@@ -266,17 +268,13 @@ suite('Encoding', () => {
|
||||
});
|
||||
|
||||
test('toDecodeStream - some stream, expect too much data', async function () {
|
||||
const source = newTestReadableStream([
|
||||
Buffer.from([65, 66, 67]),
|
||||
Buffer.from([65, 66, 67]),
|
||||
Buffer.from([65, 66, 67]),
|
||||
]);
|
||||
|
||||
let source = new Readable({
|
||||
read(size) {
|
||||
this.push(Buffer.from([65, 66, 67]));
|
||||
this.push(Buffer.from([65, 66, 67]));
|
||||
this.push(Buffer.from([65, 66, 67]));
|
||||
this.push(null);
|
||||
}
|
||||
});
|
||||
|
||||
let { detected, stream } = await encoding.toDecodeStream(source, { minBytesRequiredForDetection: 64, guessEncoding: false, overwriteEncoding: detected => detected || encoding.UTF8 });
|
||||
const { detected, stream } = await encoding.toDecodeStream(source, { minBytesRequiredForDetection: 64, guessEncoding: false, overwriteEncoding: async detected => detected || encoding.UTF8 });
|
||||
|
||||
assert.ok(detected);
|
||||
assert.ok(stream);
|
||||
@@ -286,14 +284,10 @@ suite('Encoding', () => {
|
||||
});
|
||||
|
||||
test('toDecodeStream - some stream, no data', async function () {
|
||||
const source = newWriteableBufferStream();
|
||||
source.end();
|
||||
|
||||
let source = new Readable({
|
||||
read(size) {
|
||||
this.push(null); // empty
|
||||
}
|
||||
});
|
||||
|
||||
let { detected, stream } = await encoding.toDecodeStream(source, { minBytesRequiredForDetection: 512, guessEncoding: false, overwriteEncoding: detected => detected || encoding.UTF8 });
|
||||
const { detected, stream } = await encoding.toDecodeStream(source, { minBytesRequiredForDetection: 512, guessEncoding: false, overwriteEncoding: async detected => detected || encoding.UTF8 });
|
||||
|
||||
assert.ok(detected);
|
||||
assert.ok(stream);
|
||||
@@ -304,29 +298,105 @@ suite('Encoding', () => {
|
||||
|
||||
|
||||
test('toDecodeStream - encoding, utf16be', async function () {
|
||||
const path = getPathFromAmdModule(require, './fixtures/some_utf16be.css');
|
||||
const source = streamToBufferReadableStream(fs.createReadStream(path));
|
||||
|
||||
let path = getPathFromAmdModule(require, './fixtures/some_utf16be.css');
|
||||
let source = fs.createReadStream(path);
|
||||
|
||||
let { detected, stream } = await encoding.toDecodeStream(source, { minBytesRequiredForDetection: 64, guessEncoding: false, overwriteEncoding: detected => detected || encoding.UTF8 });
|
||||
const { detected, stream } = await encoding.toDecodeStream(source, { minBytesRequiredForDetection: 64, guessEncoding: false, overwriteEncoding: async detected => detected || encoding.UTF8 });
|
||||
|
||||
assert.equal(detected.encoding, 'utf16be');
|
||||
assert.equal(detected.seemsBinary, false);
|
||||
|
||||
let expected = await readAndDecodeFromDisk(path, detected.encoding);
|
||||
let actual = await readAllAsString(stream);
|
||||
const expected = await readAndDecodeFromDisk(path, detected.encoding);
|
||||
const actual = await readAllAsString(stream);
|
||||
assert.equal(actual, expected);
|
||||
});
|
||||
|
||||
|
||||
test('toDecodeStream - empty file', async function () {
|
||||
const path = getPathFromAmdModule(require, './fixtures/empty.txt');
|
||||
const source = streamToBufferReadableStream(fs.createReadStream(path));
|
||||
const { detected, stream } = await encoding.toDecodeStream(source, { guessEncoding: false, overwriteEncoding: async detected => detected || encoding.UTF8 });
|
||||
|
||||
let path = getPathFromAmdModule(require, './fixtures/empty.txt');
|
||||
let source = fs.createReadStream(path);
|
||||
let { detected, stream } = await encoding.toDecodeStream(source, { guessEncoding: false, overwriteEncoding: detected => detected || encoding.UTF8 });
|
||||
|
||||
let expected = await readAndDecodeFromDisk(path, detected.encoding);
|
||||
let actual = await readAllAsString(stream);
|
||||
const expected = await readAndDecodeFromDisk(path, detected.encoding);
|
||||
const actual = await readAllAsString(stream);
|
||||
assert.equal(actual, expected);
|
||||
});
|
||||
|
||||
test('toDecodeStream - decodes buffer entirely', async function () {
|
||||
const emojis = Buffer.from('🖥️💻💾');
|
||||
const incompleteEmojis = emojis.slice(0, emojis.length - 1);
|
||||
|
||||
const buffers: Buffer[] = [];
|
||||
for (let i = 0; i < incompleteEmojis.length; i++) {
|
||||
buffers.push(incompleteEmojis.slice(i, i + 1));
|
||||
}
|
||||
|
||||
const source = newTestReadableStream(buffers);
|
||||
const { stream } = await encoding.toDecodeStream(source, { minBytesRequiredForDetection: 4, guessEncoding: false, overwriteEncoding: async detected => detected || encoding.UTF8 });
|
||||
|
||||
const expected = incompleteEmojis.toString(encoding.UTF8);
|
||||
const actual = await readAllAsString(stream);
|
||||
|
||||
assert.equal(actual, expected);
|
||||
});
|
||||
|
||||
test('toEncodeReadable - encoding, utf16be', async function () {
|
||||
const path = getPathFromAmdModule(require, './fixtures/some_utf16be.css');
|
||||
const source = await readAndDecodeFromDisk(path, encoding.UTF16be);
|
||||
|
||||
const expected = VSBuffer.wrap(
|
||||
iconv.encode(source, encoding.toNodeEncoding(encoding.UTF16be))
|
||||
).toString();
|
||||
|
||||
const actual = streams.consumeReadable(
|
||||
await encoding.toEncodeReadable(streams.toReadable(source), encoding.UTF16be),
|
||||
VSBuffer.concat
|
||||
).toString();
|
||||
|
||||
assert.equal(actual, expected);
|
||||
});
|
||||
|
||||
test('toEncodeReadable - empty readable to utf8', async function () {
|
||||
const source: streams.Readable<string> = {
|
||||
read() {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const actual = streams.consumeReadable(
|
||||
await encoding.toEncodeReadable(source, encoding.UTF8),
|
||||
VSBuffer.concat
|
||||
).toString();
|
||||
|
||||
assert.equal(actual, '');
|
||||
});
|
||||
|
||||
[{
|
||||
utfEncoding: encoding.UTF8,
|
||||
relatedBom: encoding.UTF8_BOM
|
||||
}, {
|
||||
utfEncoding: encoding.UTF8_with_bom,
|
||||
relatedBom: encoding.UTF8_BOM
|
||||
}, {
|
||||
utfEncoding: encoding.UTF16be,
|
||||
relatedBom: encoding.UTF16be_BOM,
|
||||
}, {
|
||||
utfEncoding: encoding.UTF16le,
|
||||
relatedBom: encoding.UTF16le_BOM
|
||||
}].forEach(({ utfEncoding, relatedBom }) => {
|
||||
test(`toEncodeReadable - empty readable to ${utfEncoding} with BOM`, async function () {
|
||||
const source: streams.Readable<string> = {
|
||||
read() {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
const encodedReadable = encoding.toEncodeReadable(source, utfEncoding, { addBOM: true });
|
||||
|
||||
const expected = VSBuffer.wrap(Buffer.from(relatedBom)).toString();
|
||||
const actual = streams.consumeReadable(await encodedReadable, VSBuffer.concat).toString();
|
||||
|
||||
assert.equal(actual, expected);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -164,8 +164,7 @@ suite('Paths (Node Implementation)', () => {
|
||||
os = 'posix';
|
||||
}
|
||||
const message =
|
||||
`path.${os}.join(${test[0].map(JSON.stringify).join(',')})\n expect=${
|
||||
JSON.stringify(expected)}\n actual=${JSON.stringify(actual)}`;
|
||||
`path.${os}.join(${test[0].map(JSON.stringify).join(',')})\n expect=${JSON.stringify(expected)}\n actual=${JSON.stringify(actual)}`;
|
||||
if (actual !== expected && actualAlt !== expected) {
|
||||
failures.push(`\n${message}`);
|
||||
}
|
||||
@@ -319,8 +318,7 @@ suite('Paths (Node Implementation)', () => {
|
||||
os = 'posix';
|
||||
}
|
||||
const actual = extname(input);
|
||||
const message = `path.${os}.extname(${JSON.stringify(input)})\n expect=${
|
||||
JSON.stringify(expected)}\n actual=${JSON.stringify(actual)}`;
|
||||
const message = `path.${os}.extname(${JSON.stringify(input)})\n expect=${JSON.stringify(expected)}\n actual=${JSON.stringify(actual)}`;
|
||||
if (actual !== expected) {
|
||||
failures.push(`\n${message}`);
|
||||
}
|
||||
@@ -328,8 +326,7 @@ suite('Paths (Node Implementation)', () => {
|
||||
{
|
||||
const input = `C:${test[0].replace(slashRE, '\\')}`;
|
||||
const actual = path.win32.extname(input);
|
||||
const message = `path.win32.extname(${JSON.stringify(input)})\n expect=${
|
||||
JSON.stringify(expected)}\n actual=${JSON.stringify(actual)}`;
|
||||
const message = `path.win32.extname(${JSON.stringify(input)})\n expect=${JSON.stringify(expected)}\n actual=${JSON.stringify(actual)}`;
|
||||
if (actual !== expected) {
|
||||
failures.push(`\n${message}`);
|
||||
}
|
||||
@@ -416,8 +413,7 @@ suite('Paths (Node Implementation)', () => {
|
||||
|
||||
const expected = test[1];
|
||||
const message =
|
||||
`path.${os}.resolve(${test[0].map(JSON.stringify).join(',')})\n expect=${
|
||||
JSON.stringify(expected)}\n actual=${JSON.stringify(actual)}`;
|
||||
`path.${os}.resolve(${test[0].map(JSON.stringify).join(',')})\n expect=${JSON.stringify(expected)}\n actual=${JSON.stringify(actual)}`;
|
||||
if (actual !== expected && actualAlt !== expected) {
|
||||
failures.push(`\n${message}`);
|
||||
}
|
||||
@@ -585,9 +581,7 @@ suite('Paths (Node Implementation)', () => {
|
||||
const actual = relative(test[0], test[1]);
|
||||
const expected = test[2];
|
||||
const os = relative === path.win32.relative ? 'win32' : 'posix';
|
||||
const message = `path.${os}.relative(${
|
||||
test.slice(0, 2).map(JSON.stringify).join(',')})\n expect=${
|
||||
JSON.stringify(expected)}\n actual=${JSON.stringify(actual)}`;
|
||||
const message = `path.${os}.relative(${test.slice(0, 2).map(JSON.stringify).join(',')})\n expect=${JSON.stringify(expected)}\n actual=${JSON.stringify(actual)}`;
|
||||
if (actual !== expected) {
|
||||
failures.push(`\n${message}`);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user