Merge VS Code 1.23.1 (#1520)

This commit is contained in:
Matt Irvine
2018-06-05 11:24:51 -07:00
committed by GitHub
parent e3baf5c443
commit 0c58f09e59
3651 changed files with 74249 additions and 48599 deletions

View File

@@ -9,9 +9,10 @@ import * as fs from 'fs';
import { dirname, basename } from 'path';
import * as objects from 'vs/base/common/objects';
import { IDisposable, dispose, toDisposable } from 'vs/base/common/lifecycle';
import Event, { Emitter } from 'vs/base/common/event';
import { Event, Emitter } from 'vs/base/common/event';
import * as json from 'vs/base/common/json';
import * as extfs from 'vs/base/node/extfs';
import { isWindows } from 'vs/base/common/platform';
export interface IConfigurationChangeEvent<T> {
config: T;
@@ -49,7 +50,7 @@ export class ConfigWatcher<T> implements IConfigWatcher<T>, IDisposable {
private loaded: boolean;
private timeoutHandle: NodeJS.Timer;
private disposables: IDisposable[];
private _onDidUpdateConfiguration: Emitter<IConfigurationChangeEvent<T>>;
private readonly _onDidUpdateConfiguration: Emitter<IConfigurationChangeEvent<T>>;
private configName: string;
constructor(private _path: string, private options: IConfigOptions<T> = { changeBufferDelay: 0, defaultConfig: Object.create(null), onError: error => console.error(error) }) {
@@ -165,8 +166,18 @@ export class ConfigWatcher<T> implements IConfigWatcher<T>, IDisposable {
}
private onConfigFileChange(eventType: string, filename: string, isParentFolder: boolean): void {
if (isParentFolder && filename !== this.configName) {
return; // a change to a sibling file that is not our config file
if (isParentFolder) {
// Windows: in some cases the filename contains artifacts from the absolute path
// see https://github.com/nodejs/node/issues/19170
// As such, we have to ensure that the filename basename is used for comparison.
if (isWindows && filename !== this.configName) {
filename = basename(filename);
}
if (filename !== this.configName) {
return; // a change to a sibling file that is not our config file
}
}
if (this.timeoutHandle) {

View File

@@ -5,7 +5,7 @@
'use strict';
import sd = require('string_decoder');
import * as sd from 'string_decoder';
import { CharCode } from 'vs/base/common/charCode';
/**

View File

@@ -5,17 +5,106 @@
'use strict';
import stream = require('vs/base/node/stream');
import iconv = require('iconv-lite');
import * as stream from 'vs/base/node/stream';
import * as iconv from 'iconv-lite';
import { TPromise } from 'vs/base/common/winjs.base';
import { isLinux, isMacintosh } from 'vs/base/common/platform';
import { exec } from 'child_process';
import { Readable, Writable, WritableOptions } from 'stream';
export const UTF8 = 'utf8';
export const UTF8_with_bom = 'utf8bom';
export const UTF16be = 'utf16be';
export const UTF16le = 'utf16le';
export interface IDecodeStreamOptions {
guessEncoding?: boolean;
minBytesRequiredForDetection?: number;
overwriteEncoding?(detectedEncoding: string): string;
}
export function toDecodeStream(readable: Readable, options: IDecodeStreamOptions): TPromise<{ detected: IDetectedEncodingResult, stream: NodeJS.ReadableStream }> {
if (!options.minBytesRequiredForDetection) {
options.minBytesRequiredForDetection = options.guessEncoding ? AUTO_GUESS_BUFFER_MAX_LEN : NO_GUESS_BUFFER_MAX_LEN;
}
if (!options.overwriteEncoding) {
options.overwriteEncoding = detected => detected || UTF8;
}
return new TPromise<{ detected: IDetectedEncodingResult, stream: NodeJS.ReadableStream }>((resolve, reject) => {
readable.pipe(new class extends Writable {
private _decodeStream: NodeJS.ReadWriteStream;
private _decodeStreamConstruction: Thenable<any>;
private _buffer: Buffer[] = [];
private _bytesBuffered = 0;
constructor(opts?: WritableOptions) {
super(opts);
this.once('finish', () => this._finish());
}
_write(chunk: any, encoding: string, callback: Function): void {
if (!Buffer.isBuffer(chunk)) {
callback(new Error('data must be a buffer'));
}
if (this._decodeStream) {
// just a forwarder now
this._decodeStream.write(chunk, callback);
return;
}
this._buffer.push(chunk);
this._bytesBuffered += chunk.length;
if (this._decodeStreamConstruction) {
// waiting for the decoder to be ready
this._decodeStreamConstruction.then(_ => callback(), err => callback(err));
} else if (this._bytesBuffered >= options.minBytesRequiredForDetection) {
// buffered enough data, create stream and forward data
this._startDecodeStream(callback);
} else {
// only buffering
callback();
}
}
_startDecodeStream(callback: Function): void {
this._decodeStreamConstruction = TPromise.as(detectEncodingFromBuffer({
buffer: Buffer.concat(this._buffer), bytesRead: this._bytesBuffered
}, options.guessEncoding)).then(detected => {
detected.encoding = options.overwriteEncoding(detected.encoding);
this._decodeStream = decodeStream(detected.encoding);
for (const buffer of this._buffer) {
this._decodeStream.write(buffer);
}
callback();
resolve({ detected, stream: this._decodeStream });
}, err => {
callback(err);
});
}
_finish(): void {
if (this._decodeStream) {
// normal finish
this._decodeStream.end();
} else {
// we were still waiting for data...
this._startDecodeStream(() => this._decodeStream.end());
}
}
});
});
}
export function bomLength(encoding: string): number {
switch (encoding) {
case UTF8:
@@ -172,6 +261,89 @@ export function toCanonicalName(enc: string): string {
}
}
const ZERO_BYTE_DETECTION_BUFFER_MAX_LEN = 512; // number of bytes to look at to decide about a file being binary or not
const NO_GUESS_BUFFER_MAX_LEN = 512; // when not auto guessing the encoding, small number of bytes are enough
const AUTO_GUESS_BUFFER_MAX_LEN = 512 * 8; // with auto guessing we want a lot more content to be read for guessing
export interface IDetectedEncodingResult {
encoding: string;
seemsBinary: boolean;
}
export interface DetectEncodingOption {
autoGuessEncoding?: boolean;
}
export function detectEncodingFromBuffer(readResult: stream.ReadResult, autoGuessEncoding?: false): IDetectedEncodingResult;
export function detectEncodingFromBuffer(readResult: stream.ReadResult, autoGuessEncoding?: boolean): TPromise<IDetectedEncodingResult>;
export function detectEncodingFromBuffer({ buffer, bytesRead }: stream.ReadResult, autoGuessEncoding?: boolean): TPromise<IDetectedEncodingResult> | IDetectedEncodingResult {
// Always first check for BOM to find out about encoding
let encoding = detectEncodingByBOMFromBuffer(buffer, bytesRead);
// Detect 0 bytes to see if file is binary or UTF-16 LE/BE
// unless we already know that this file has a UTF-16 encoding
let seemsBinary = false;
if (encoding !== UTF16be && encoding !== UTF16le) {
let couldBeUTF16LE = true; // e.g. 0xAA 0x00
let couldBeUTF16BE = true; // e.g. 0x00 0xAA
let containsZeroByte = false;
// This is a simplified guess to detect UTF-16 BE or LE by just checking if
// the first 512 bytes have the 0-byte at a specific location. For UTF-16 LE
// this would be the odd byte index and for UTF-16 BE the even one.
// Note: this can produce false positives (a binary file that uses a 2-byte
// encoding of the same format as UTF-16) and false negatives (a UTF-16 file
// that is using 4 bytes to encode a character).
for (let i = 0; i < bytesRead && i < ZERO_BYTE_DETECTION_BUFFER_MAX_LEN; i++) {
const isEndian = (i % 2 === 1); // assume 2-byte sequences typical for UTF-16
const isZeroByte = (buffer.readInt8(i) === 0);
if (isZeroByte) {
containsZeroByte = true;
}
// UTF-16 LE: expect e.g. 0xAA 0x00
if (couldBeUTF16LE && (isEndian && !isZeroByte || !isEndian && isZeroByte)) {
couldBeUTF16LE = false;
}
// UTF-16 BE: expect e.g. 0x00 0xAA
if (couldBeUTF16BE && (isEndian && isZeroByte || !isEndian && !isZeroByte)) {
couldBeUTF16BE = false;
}
// Return if this is neither UTF16-LE nor UTF16-BE and thus treat as binary
if (isZeroByte && !couldBeUTF16LE && !couldBeUTF16BE) {
break;
}
}
// Handle case of 0-byte included
if (containsZeroByte) {
if (couldBeUTF16LE) {
encoding = UTF16le;
} else if (couldBeUTF16BE) {
encoding = UTF16be;
} else {
seemsBinary = true;
}
}
}
// Auto guess encoding if configured
if (autoGuessEncoding && !seemsBinary && !encoding) {
return guessEncodingByBuffer(buffer.slice(0, bytesRead)).then(encoding => {
return {
seemsBinary: false,
encoding
};
});
}
return { seemsBinary, encoding };
}
// https://ss64.com/nt/chcp.html
const windowsTerminalEncodings = {
'437': 'cp437', // United States

View File

@@ -5,7 +5,7 @@
'use strict';
import assert = require('assert');
import * as assert from 'assert';
/**
* Executes the given function (fn) over the given array of items (list) in parallel and returns the resulting errors and results as

View File

@@ -1,106 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import mime = require('vs/base/common/mime');
import { TPromise } from 'vs/base/common/winjs.base';
import stream = require('vs/base/node/stream');
import encoding = require('vs/base/node/encoding');
/**
* Lots of binary file types exists where the type can be determined by matching the first few bytes against some "magic patterns".
* E.g. PDF files always start with %PDF- and the rest of the file contains mostly text, but sometimes binary data (for fonts and images).
* In order to detect these types correctly (and independently from the file's extension), the content base mime type detection must be performed
* on any file, not only on text files.
*
* Here is the original mime type detection in pseudocode:
*
* let mimes = [];
*
* read file extension
*
* if (file extension matches) {
* if (file extension is bogus) {
* // ignore.
* // this covers *.manifest files which can contain arbitrary content, so the extension is of no value.
* // a consequence of this is that the content based mime type becomes the most specific type in the array
* } else {
* mimes.push(associated mime type) // first element: most specific
* }
* }
*
* read file contents
*
* if (content based match found) { // this is independent from text or binary
* mimes.push(associated mime type)
* if (a second mime exists for the match) { // should be rare; text/plain should never be included here
* // e.g. for svg: ['image/svg+xml', 'application/xml']
* mimes.push(second mime)
* }
* }
*
* if (content == text)
* mimes.push('text/plain') // last element: least specific
* else
* mimes.push('application/octet-stream') // last element: least specific
*/
const ZERO_BYTE_DETECTION_BUFFER_MAX_LEN = 512; // number of bytes to look at to decide about a file being binary or not
const NO_GUESS_BUFFER_MAX_LEN = 512; // when not auto guessing the encoding, small number of bytes are enough
const AUTO_GUESS_BUFFER_MAX_LEN = 512 * 8; // with auto guessing we want a lot more content to be read for guessing
export function maxBufferLen(arg1?: DetectMimesOption | boolean): number {
let autoGuessEncoding: boolean;
if (typeof arg1 === 'boolean') {
autoGuessEncoding = arg1;
} else {
autoGuessEncoding = arg1 && arg1.autoGuessEncoding;
}
return autoGuessEncoding ? AUTO_GUESS_BUFFER_MAX_LEN : NO_GUESS_BUFFER_MAX_LEN;
}
export interface IMimeAndEncoding {
encoding: string;
mimes: string[];
}
export interface DetectMimesOption {
autoGuessEncoding?: boolean;
}
export function detectMimeAndEncodingFromBuffer(readResult: stream.ReadResult, autoGuessEncoding?: false): IMimeAndEncoding;
export function detectMimeAndEncodingFromBuffer(readResult: stream.ReadResult, autoGuessEncoding?: boolean): TPromise<IMimeAndEncoding>;
export function detectMimeAndEncodingFromBuffer({ buffer, bytesRead }: stream.ReadResult, autoGuessEncoding?: boolean): TPromise<IMimeAndEncoding> | IMimeAndEncoding {
let enc = encoding.detectEncodingByBOMFromBuffer(buffer, bytesRead);
// Detect 0 bytes to see if file is binary (ignore for UTF 16 though)
let isText = true;
if (enc !== encoding.UTF16be && enc !== encoding.UTF16le) {
for (let i = 0; i < bytesRead && i < ZERO_BYTE_DETECTION_BUFFER_MAX_LEN; i++) {
if (buffer.readInt8(i) === 0) {
isText = false;
break;
}
}
}
if (autoGuessEncoding && isText && !enc) {
return encoding.guessEncodingByBuffer(buffer.slice(0, bytesRead)).then(enc => {
return {
mimes: isText ? [mime.MIME_TEXT] : [mime.MIME_BINARY],
encoding: enc
};
});
}
return {
mimes: isText ? [mime.MIME_TEXT] : [mime.MIME_BINARY],
encoding: enc
};
}

View File

@@ -82,12 +82,6 @@ export function readlink(path: string): TPromise<string> {
return nfcall<string>(fs.readlink, path);
}
export function touch(path: string): TPromise<void> {
const now = Date.now() / 1000; // the value should be a Unix timestamp in seconds
return nfcall(fs.utimes, path, now, now);
}
export function truncate(path: string, len: number): TPromise<void> {
return nfcall(fs.truncate, path, len);
}
@@ -196,3 +190,7 @@ export function whenDeleted(path: string): TPromise<void> {
}, 1000);
});
}
export function copy(source: string, target: string): TPromise<void> {
return nfcall(extfs.copy, source, target);
}

View File

@@ -5,7 +5,7 @@
'use strict';
import net = require('net');
import * as net from 'net';
/**
* @returns Returns a random port between 1025 and 65535.

View File

@@ -4,10 +4,10 @@
*--------------------------------------------------------------------------------------------*/
'use strict';
import path = require('path');
import * as path from 'path';
import * as cp from 'child_process';
import { fork } from 'vs/base/node/stdFork';
import nls = require('vs/nls');
import * as nls from 'vs/nls';
import { PPromise, TPromise, TValueCallback, TProgressCallback, ErrorCallback } from 'vs/base/common/winjs.base';
import * as Types from 'vs/base/common/types';
import { IStringDictionary } from 'vs/base/common/collections';
@@ -153,7 +153,7 @@ export abstract class AbstractProcess<TProgressData> {
public start(): PPromise<SuccessData, TProgressData> {
if (Platform.isWindows && ((this.options && this.options.cwd && TPath.isUNC(this.options.cwd)) || !this.options && !this.options.cwd && TPath.isUNC(process.cwd()))) {
return TPromise.wrapError(new Error(nls.localize('TaskRunner.UNC', 'Can\'t execute a shell command on an UNC drive.')));
return TPromise.wrapError(new Error(nls.localize('TaskRunner.UNC', 'Can\'t execute a shell command on a UNC drive.')));
}
return this.useExec().then((useExec) => {
let cc: TValueCallback<SuccessData>;
@@ -381,7 +381,7 @@ export interface IQueuedSender {
// queue is free again to consume messages.
// On Windows we always wait for the send() method to return before sending the next message
// to workaround https://github.com/nodejs/node/issues/7657 (IPC can freeze process)
export function createQueuedSender(childProcess: cp.ChildProcess | NodeJS.Process): IQueuedSender {
export function createQueuedSender(childProcess: cp.ChildProcess): IQueuedSender {
let msgQueue: string[] = [];
let useQueue = false;

View File

@@ -1,183 +0,0 @@
################################################################################################
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the Source EULA. See License.txt in the project root for license information.
################################################################################################
Param(
[string]$ProcessName = "code.exe",
[int]$MaxSamples = 10
)
$processLength = "process(".Length
function Get-MachineInfo {
$model = (Get-WmiObject -Class Win32_Processor).Name
$memory = (Get-WmiObject -Class Win32_PhysicalMemory | Measure-Object -Property Capacity -Sum).Sum / 1MB
$wmi_cs = Get-WmiObject -Class Win32_ComputerSystem
return @{
"type" = "machineInfo"
"model" = $model
"processors" = $wmi_cs.NumberOfProcessors
"logicalProcessors" = $wmi_cs.NumberOfLogicalProcessors
"totalMemory" = $memory
}
}
$machineInfo = Get-MachineInfo
function Get-MachineState {
$proc = Get-WmiObject Win32_Processor
$os = Get-WmiObject win32_OperatingSystem
return @{
"type" = 'machineState'
"cpuLoad" = $proc.LoadPercentage
"handles" = (Get-Process | Measure-Object Handles -Sum).Sum
"memory" = @{
"total" = $os.TotalVisibleMemorySize
"free" = $os.FreePhysicalMemory
"swapTotal" = $os.TotalVirtualMemorySize
"swapFree" = $os.FreeVirtualMemory
}
}
}
$machineState = Get-MachineState
$processId2CpuLoad = @{}
function Get-PerformanceCounters ($logicalProcessors) {
$counterError
# In a first round we get the performance counters and the process ids.
$counters = (Get-Counter ("\Process(*)\% Processor Time", "\Process(*)\ID Process") -ErrorAction SilentlyContinue).CounterSamples
$processKey2Id = @{}
foreach ($counter in $counters) {
if ($counter.Status -ne 0) {
continue
}
$path = $counter.path;
$segments = $path.Split("\");
$kind = $segments[4];
$processKey = $segments[3].Substring($processLength, $segments[3].Length - $processLength - 1)
if ($kind -eq "id process") {
$processKey2Id[$processKey] = [uint32]$counter.CookedValue
}
}
foreach ($counter in $counters) {
if ($counter.Status -ne 0) {
continue
}
$path = $counter.path;
$segments = $path.Split("\");
$kind = $segments[4];
$processKey = $segments[3].Substring($processLength, $segments[3].Length - $processLength - 1)
if ($kind -eq "% processor time") {
$array = New-Object double[] ($MaxSamples + 1)
$array[0] = ($counter.CookedValue / $logicalProcessors)
$processId = $processKey2Id[$processKey]
if ($processId) {
$processId2CpuLoad[$processId] = $array
}
}
}
# Now lets sample another 10 times but only the processor time
$samples = Get-Counter "\Process(*)\% Processor Time" -SampleInterval 1 -MaxSamples $MaxSamples -ErrorAction SilentlyContinue
for ($s = 0; $s -lt $samples.Count; $s++) {
$counters = $samples[$s].CounterSamples;
foreach ($counter in $counters) {
if ($counter.Status -ne 0) {
continue
}
$path = $counter.path;
$segments = $path.Split("\");
$processKey = $segments[3].Substring($processLength, $segments[3].Length - $processLength - 1)
$processKey = $processKey2Id[$processKey];
if ($processKey) {
$processId2CpuLoad[$processKey][$s + 1] = ($counter.CookedValue / $logicalProcessors)
}
}
}
}
Get-PerformanceCounters -logicalProcessors $machineInfo.logicalProcessors
$topElements = New-Object PSObject[] $processId2CpuLoad.Keys.Count;
$index = 0;
foreach ($key in $processId2CpuLoad.Keys) {
$obj = [PSCustomObject]@{
ProcessId = $key
Load = ($processId2CpuLoad[$key] | Measure-Object -Sum).Sum / ($MaxSamples + 1)
}
$topElements[$index] = $obj
$index++
}
$topElements = $topElements | Sort-Object Load -Descending
# Get all code processes
$codeProcesses = @{}
foreach ($item in Get-WmiObject Win32_Process -Filter "name = '$ProcessName'") {
$codeProcesses[$item.ProcessId] = $item
}
foreach ($item in Get-WmiObject Win32_Process -Filter "name = 'codeHelper.exe'") {
$codeProcesses[$item.ProcessId] = $item
}
$otherProcesses = @{}
foreach ($item in Get-WmiObject Win32_Process -Filter "name Like '%'") {
if (!($codeProcesses.Contains($item.ProcessId))) {
$otherProcesses[$item.ProcessId] = $item
}
}
$modified = $false
do {
$toDelete = @()
$modified = $false
foreach ($item in $otherProcesses.Values) {
if ($codeProcesses.Contains([uint32]$item.ParentProcessId)) {
$codeProcesses[$item.ProcessId] = $item;
$toDelete += $item
}
}
foreach ($item in $toDelete) {
$otherProcesses.Remove([uint32]$item.ProcessId)
$modified = $true
}
} while ($modified)
$result = New-Object PSObject[] (2 + [math]::Min(5, $topElements.Count) + $codeProcesses.Count)
$result[0] = $machineInfo
$result[1] = $machineState
$index = 2;
for($i = 0; $i -lt 5 -and $i -lt $topElements.Count; $i++) {
$element = $topElements[$i]
$item = $codeProcesses[[uint32]$element.ProcessId]
if (!$item) {
$item = $otherProcesses[[uint32]$element.ProcessId]
}
if ($item) {
$cpuLoad = $processId2CpuLoad[[uint32]$item.ProcessId] | % { [pscustomobject] $_ }
$result[$index] = [pscustomobject]@{
"type" = "topProcess"
"name" = $item.Name
"processId" = $item.ProcessId
"parentProcessId" = $item.ParentProcessId
"commandLine" = $item.CommandLine
"handles" = $item.HandleCount
"cpuLoad" = $cpuLoad
"workingSetSize" = $item.WorkingSetSize
}
$index++
}
}
foreach ($item in $codeProcesses.Values) {
# we need to convert this otherwise to JSON with create a value, count object and not an inline array
$cpuLoad = $processId2CpuLoad[[uint32]$item.ProcessId] | % { [pscustomobject] $_ }
$result[$index] = [pscustomobject]@{
"type" = "processInfo"
"name" = $item.Name
"processId" = $item.ProcessId
"parentProcessId" = $item.ParentProcessId
"commandLine" = $item.CommandLine
"handles" = $item.HandleCount
"cpuLoad" = $cpuLoad
"workingSetSize" = $item.WorkingSetSize
}
$index++
}
$result | ConvertTo-Json -Depth 99

View File

@@ -5,10 +5,7 @@
'use strict';
import { spawn, exec } from 'child_process';
import * as path from 'path';
import * as nls from 'vs/nls';
import URI from 'vs/base/common/uri';
import { exec } from 'child_process';
export interface ProcessItem {
name: string;
@@ -121,32 +118,6 @@ export function listProcesses(rootPid: number): Promise<ProcessItem> {
if (process.platform === 'win32') {
console.log(nls.localize('collecting', 'Collecting CPU and memory information. This might take a couple of seconds.'));
interface ProcessInfo {
type: 'processInfo';
name: string;
processId: number;
parentProcessId: number;
commandLine: string;
handles: number;
cpuLoad: number[];
workingSetSize: number;
}
interface TopProcess {
type: 'topProcess';
name: string;
processId: number;
parentProcessId: number;
commandLine: string;
handles: number;
cpuLoad: number[];
workingSetSize: number;
}
type Item = ProcessInfo | TopProcess;
const cleanUNCPrefix = (value: string): string => {
if (value.indexOf('\\\\?\\') === 0) {
return value.substr(4);
@@ -161,75 +132,45 @@ export function listProcesses(rootPid: number): Promise<ProcessItem> {
}
};
const execMain = path.basename(process.execPath);
const script = URI.parse(require.toUrl('vs/base/node/ps-win.ps1')).fsPath;
const commandLine = `& {& '${script}' -ProcessName '${execMain}' -MaxSamples 3}`;
const cmd = spawn('powershell.exe', ['-NoProfile', '-ExecutionPolicy', 'Bypass', '-Command', commandLine]);
let stdout = '';
let stderr = '';
cmd.stdout.on('data', data => {
stdout += data.toString();
});
cmd.stderr.on('data', data => {
stderr += data.toString();
});
cmd.on('exit', () => {
if (stderr.length > 0) {
reject(new Error(stderr));
return;
}
let processItems: Map<number, ProcessItem> = new Map();
try {
const items: Item[] = JSON.parse(stdout);
for (const item of items) {
if (item.type === 'processInfo') {
let load = 0;
if (item.cpuLoad) {
for (let value of item.cpuLoad) {
load += value;
}
load = load / item.cpuLoad.length;
} else {
load = -1;
}
let commandLine = cleanUNCPrefix(item.commandLine);
processItems.set(item.processId, {
(import('windows-process-tree')).then(windowsProcessTree => {
windowsProcessTree.getProcessList(rootPid, (processList) => {
windowsProcessTree.getProcessCpuUsage(processList, (completeProcessList) => {
const processItems: Map<number, ProcessItem> = new Map();
completeProcessList.forEach(process => {
const commandLine = cleanUNCPrefix(process.commandLine);
processItems.set(process.pid, {
name: findName(commandLine),
cmd: commandLine,
pid: item.processId,
ppid: item.parentProcessId,
load: load,
mem: item.workingSetSize
pid: process.pid,
ppid: process.ppid,
load: process.cpu,
mem: process.memory
});
}
}
rootItem = processItems.get(rootPid);
if (rootItem) {
processItems.forEach(item => {
let parent = processItems.get(item.ppid);
if (parent) {
if (!parent.children) {
parent.children = [];
});
rootItem = processItems.get(rootPid);
if (rootItem) {
processItems.forEach(item => {
let parent = processItems.get(item.ppid);
if (parent) {
if (!parent.children) {
parent.children = [];
}
parent.children.push(item);
}
parent.children.push(item);
}
});
processItems.forEach(item => {
if (item.children) {
item.children = item.children.sort((a, b) => a.pid - b.pid);
}
});
resolve(rootItem);
} else {
reject(new Error(`Root process ${rootPid} not found`));
}
} catch (error) {
console.log(stdout);
reject(error);
}
});
processItems.forEach(item => {
if (item.children) {
item.children = item.children.sort((a, b) => a.pid - b.pid);
}
});
resolve(rootItem);
} else {
reject(new Error(`Root process ${rootPid} not found`));
}
});
}, windowsProcessTree.ProcessDataFlag.CommandLine | windowsProcessTree.ProcessDataFlag.Memory);
});
} else { // OS X & Linux

View File

@@ -7,8 +7,8 @@
import { TPromise } from 'vs/base/common/winjs.base';
import { isBoolean, isNumber } from 'vs/base/common/types';
import https = require('https');
import http = require('http');
import * as https from 'https';
import * as http from 'http';
import { Stream } from 'stream';
import { parse as parseUrl } from 'url';
import { createWriteStream } from 'fs';
@@ -96,7 +96,7 @@ export function request(options: IRequestOptions): TPromise<IRequestContext> {
stream = stream.pipe(createGunzip());
}
c({ res, stream });
c({ res, stream } as IRequestContext);
}
});

View File

@@ -5,10 +5,10 @@
'use strict';
import path = require('path');
import os = require('os');
import net = require('net');
import cp = require('child_process');
import * as path from 'path';
import * as os from 'os';
import * as net from 'net';
import * as cp from 'child_process';
import uri from 'vs/base/common/uri';
export interface IForkOpts {

View File

@@ -5,7 +5,7 @@
'use strict';
import fs = require('fs');
import * as fs from 'fs';
import { TPromise } from 'vs/base/common/winjs.base';

View File

@@ -3,14 +3,15 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import nls = require('vs/nls');
import * as nls from 'vs/nls';
import * as path from 'path';
import { createWriteStream } from 'fs';
import { createWriteStream, WriteStream } from 'fs';
import { Readable } from 'stream';
import { nfcall, ninvoke, SimpleThrottler } from 'vs/base/common/async';
import { mkdirp, rimraf } from 'vs/base/node/pfs';
import { TPromise } from 'vs/base/common/winjs.base';
import { open as _openZip, Entry, ZipFile } from 'yauzl';
import { ILogService } from 'vs/platform/log/common/log';
export interface IExtractOptions {
overwrite?: boolean;
@@ -26,10 +27,7 @@ interface IOptions {
sourcePathRegex: RegExp;
}
export enum ExtractErrorType {
Undefined,
CorruptZip
}
export type ExtractErrorType = 'CorruptZip' | 'Incomplete';
export class ExtractError extends Error {
@@ -40,7 +38,7 @@ export class ExtractError extends Error {
let message = cause.message;
switch (type) {
case ExtractErrorType.CorruptZip: message = `Corrupt ZIP: ${message}`; break;
case 'CorruptZip': message = `Corrupt ZIP: ${message}`; break;
}
super(message);
@@ -58,12 +56,14 @@ function modeFromEntry(entry: Entry) {
}
function toExtractError(err: Error): ExtractError {
let type = ExtractErrorType.CorruptZip;
if (err instanceof ExtractError) {
return err;
}
console.log('WHAT');
let type: ExtractErrorType = void 0;
if (/end of central directory record signature not found/.test(err.message)) {
type = ExtractErrorType.CorruptZip;
type = 'CorruptZip';
}
return new ExtractError(type, err);
@@ -74,24 +74,51 @@ function extractEntry(stream: Readable, fileName: string, mode: number, targetPa
const targetDirName = path.join(targetPath, dirName);
const targetFileName = path.join(targetPath, fileName);
let istream: WriteStream;
return mkdirp(targetDirName).then(() => new TPromise((c, e) => {
let istream = createWriteStream(targetFileName, { mode });
istream.once('finish', () => c(null));
istream = createWriteStream(targetFileName, { mode });
istream.once('close', () => c(null));
istream.once('error', e);
stream.once('error', e);
stream.pipe(istream);
}, () => {
if (istream) {
istream.close();
}
}));
}
function extractZip(zipfile: ZipFile, targetPath: string, options: IOptions): TPromise<void> {
function extractZip(zipfile: ZipFile, targetPath: string, options: IOptions, logService: ILogService): TPromise<void> {
let isCanceled = false;
let last = TPromise.wrap<any>(null);
let extractedEntriesCount = 0;
return new TPromise((c, e) => {
const throttler = new SimpleThrottler();
let last = TPromise.as<any>(null);
const readNextEntry = () => {
extractedEntriesCount++;
zipfile.readEntry();
};
zipfile.once('error', e);
zipfile.once('close', () => last.then(c, e));
zipfile.once('close', () => last.then(() => {
if (isCanceled || zipfile.entryCount === extractedEntriesCount) {
c(null);
} else {
e(new ExtractError('Incomplete', new Error(nls.localize('incompleteExtract', "Incomplete. Found {0} of {1} entries", extractedEntriesCount, zipfile.entryCount))));
}
}, e));
zipfile.readEntry();
zipfile.on('entry', (entry: Entry) => {
logService.debug(targetPath, 'Found', entry.fileName);
if (isCanceled) {
return;
}
if (!options.sourcePathRegex.test(entry.fileName)) {
readNextEntry();
return;
}
@@ -100,33 +127,38 @@ function extractZip(zipfile: ZipFile, targetPath: string, options: IOptions): TP
// directory file names end with '/'
if (/\/$/.test(fileName)) {
const targetFileName = path.join(targetPath, fileName);
last = mkdirp(targetFileName);
last = mkdirp(targetFileName).then(() => readNextEntry());
return;
}
const stream = ninvoke(zipfile, zipfile.openReadStream, entry);
const mode = modeFromEntry(entry);
last = throttler.queue(() => stream.then(stream => extractEntry(stream, fileName, mode, targetPath, options)));
last = throttler.queue(() => stream.then(stream => extractEntry(stream, fileName, mode, targetPath, options).then(() => readNextEntry())));
});
}, () => {
logService.debug(targetPath, 'Cancelled.');
isCanceled = true;
last.cancel();
zipfile.close();
}).then(null, err => TPromise.wrapError(toExtractError(err)));
}
function openZip(zipFile: string): TPromise<ZipFile> {
return nfcall<ZipFile>(_openZip, zipFile)
function openZip(zipFile: string, lazy: boolean = false): TPromise<ZipFile> {
return nfcall<ZipFile>(_openZip, zipFile, lazy ? { lazyEntries: true } : void 0)
.then(null, err => TPromise.wrapError(toExtractError(err)));
}
export function extract(zipPath: string, targetPath: string, options: IExtractOptions = {}): TPromise<void> {
export function extract(zipPath: string, targetPath: string, options: IExtractOptions = {}, logService: ILogService): TPromise<void> {
const sourcePathRegex = new RegExp(options.sourcePath ? `^${options.sourcePath}` : '');
let promise = openZip(zipPath);
let promise = openZip(zipPath, true);
if (options.overwrite) {
promise = promise.then(zipfile => rimraf(targetPath).then(() => zipfile));
}
return promise.then(zipfile => extractZip(zipfile, targetPath, { sourcePathRegex }));
return promise.then(zipfile => extractZip(zipfile, targetPath, { sourcePathRegex }, logService));
}
function read(zipPath: string, filePath: string): TPromise<Readable> {