mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 10:58:30 -05:00
Merge VS Code 1.21 source code (#1067)
* Initial VS Code 1.21 file copy with patches * A few more merges * Post npm install * Fix batch of build breaks * Fix more build breaks * Fix more build errors * Fix more build breaks * Runtime fixes 1 * Get connection dialog working with some todos * Fix a few packaging issues * Copy several node_modules to package build to fix loader issues * Fix breaks from master * A few more fixes * Make tests pass * First pass of license header updates * Second pass of license header updates * Fix restore dialog issues * Remove add additional themes menu items * fix select box issues where the list doesn't show up * formatting * Fix editor dispose issue * Copy over node modules to correct location on all platforms
This commit is contained in:
118
build/lib/asar.js
Normal file
118
build/lib/asar.js
Normal file
@@ -0,0 +1,118 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var path = require("path");
|
||||
var es = require("event-stream");
|
||||
var pickle = require("chromium-pickle-js");
|
||||
var Filesystem = require("asar/lib/filesystem");
|
||||
var VinylFile = require("vinyl");
|
||||
var minimatch = require("minimatch");
|
||||
function createAsar(folderPath, unpackGlobs, destFilename) {
|
||||
var shouldUnpackFile = function (file) {
|
||||
for (var i = 0; i < unpackGlobs.length; i++) {
|
||||
if (minimatch(file.relative, unpackGlobs[i])) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
var filesystem = new Filesystem(folderPath);
|
||||
var out = [];
|
||||
// Keep track of pending inserts
|
||||
var pendingInserts = 0;
|
||||
var onFileInserted = function () { pendingInserts--; };
|
||||
// Do not insert twice the same directory
|
||||
var seenDir = {};
|
||||
var insertDirectoryRecursive = function (dir) {
|
||||
if (seenDir[dir]) {
|
||||
return;
|
||||
}
|
||||
var lastSlash = dir.lastIndexOf('/');
|
||||
if (lastSlash === -1) {
|
||||
lastSlash = dir.lastIndexOf('\\');
|
||||
}
|
||||
if (lastSlash !== -1) {
|
||||
insertDirectoryRecursive(dir.substring(0, lastSlash));
|
||||
}
|
||||
seenDir[dir] = true;
|
||||
filesystem.insertDirectory(dir);
|
||||
};
|
||||
var insertDirectoryForFile = function (file) {
|
||||
var lastSlash = file.lastIndexOf('/');
|
||||
if (lastSlash === -1) {
|
||||
lastSlash = file.lastIndexOf('\\');
|
||||
}
|
||||
if (lastSlash !== -1) {
|
||||
insertDirectoryRecursive(file.substring(0, lastSlash));
|
||||
}
|
||||
};
|
||||
var insertFile = function (relativePath, stat, shouldUnpack) {
|
||||
insertDirectoryForFile(relativePath);
|
||||
pendingInserts++;
|
||||
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
|
||||
};
|
||||
return es.through(function (file) {
|
||||
if (file.stat.isDirectory()) {
|
||||
return;
|
||||
}
|
||||
if (!file.stat.isFile()) {
|
||||
throw new Error("unknown item in stream!");
|
||||
}
|
||||
var shouldUnpack = shouldUnpackFile(file);
|
||||
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
|
||||
if (shouldUnpack) {
|
||||
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
|
||||
var relative = path.relative(folderPath, file.path);
|
||||
this.queue(new VinylFile({
|
||||
cwd: folderPath,
|
||||
base: folderPath,
|
||||
path: path.join(destFilename + '.unpacked', relative),
|
||||
stat: file.stat,
|
||||
contents: file.contents
|
||||
}));
|
||||
}
|
||||
else {
|
||||
// The file goes inside of xx.asar
|
||||
out.push(file.contents);
|
||||
}
|
||||
}, function () {
|
||||
var _this = this;
|
||||
var finish = function () {
|
||||
{
|
||||
var headerPickle = pickle.createEmpty();
|
||||
headerPickle.writeString(JSON.stringify(filesystem.header));
|
||||
var headerBuf = headerPickle.toBuffer();
|
||||
var sizePickle = pickle.createEmpty();
|
||||
sizePickle.writeUInt32(headerBuf.length);
|
||||
var sizeBuf = sizePickle.toBuffer();
|
||||
out.unshift(headerBuf);
|
||||
out.unshift(sizeBuf);
|
||||
}
|
||||
var contents = Buffer.concat(out);
|
||||
out.length = 0;
|
||||
_this.queue(new VinylFile({
|
||||
cwd: folderPath,
|
||||
base: folderPath,
|
||||
path: destFilename,
|
||||
contents: contents
|
||||
}));
|
||||
_this.queue(null);
|
||||
};
|
||||
// Call finish() only when all file inserts have finished...
|
||||
if (pendingInserts === 0) {
|
||||
finish();
|
||||
}
|
||||
else {
|
||||
onFileInserted = function () {
|
||||
pendingInserts--;
|
||||
if (pendingInserts === 0) {
|
||||
finish();
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.createAsar = createAsar;
|
||||
131
build/lib/asar.ts
Normal file
131
build/lib/asar.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as path from 'path';
|
||||
import * as es from 'event-stream';
|
||||
import * as pickle from 'chromium-pickle-js';
|
||||
import * as Filesystem from 'asar/lib/filesystem';
|
||||
import * as VinylFile from 'vinyl';
|
||||
import * as minimatch from 'minimatch';
|
||||
|
||||
export function createAsar(folderPath: string, unpackGlobs: string[], destFilename: string): NodeJS.ReadWriteStream {
|
||||
|
||||
const shouldUnpackFile = (file: VinylFile): boolean => {
|
||||
for (let i = 0; i < unpackGlobs.length; i++) {
|
||||
if (minimatch(file.relative, unpackGlobs[i])) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
const filesystem = new Filesystem(folderPath);
|
||||
const out: Buffer[] = [];
|
||||
|
||||
// Keep track of pending inserts
|
||||
let pendingInserts = 0;
|
||||
let onFileInserted = () => { pendingInserts--; };
|
||||
|
||||
// Do not insert twice the same directory
|
||||
const seenDir: { [key: string]: boolean; } = {};
|
||||
const insertDirectoryRecursive = (dir: string) => {
|
||||
if (seenDir[dir]) {
|
||||
return;
|
||||
}
|
||||
|
||||
let lastSlash = dir.lastIndexOf('/');
|
||||
if (lastSlash === -1) {
|
||||
lastSlash = dir.lastIndexOf('\\');
|
||||
}
|
||||
if (lastSlash !== -1) {
|
||||
insertDirectoryRecursive(dir.substring(0, lastSlash));
|
||||
}
|
||||
seenDir[dir] = true;
|
||||
filesystem.insertDirectory(dir);
|
||||
};
|
||||
|
||||
const insertDirectoryForFile = (file: string) => {
|
||||
let lastSlash = file.lastIndexOf('/');
|
||||
if (lastSlash === -1) {
|
||||
lastSlash = file.lastIndexOf('\\');
|
||||
}
|
||||
if (lastSlash !== -1) {
|
||||
insertDirectoryRecursive(file.substring(0, lastSlash));
|
||||
}
|
||||
};
|
||||
|
||||
const insertFile = (relativePath: string, stat: { size: number; mode: number; }, shouldUnpack: boolean) => {
|
||||
insertDirectoryForFile(relativePath);
|
||||
pendingInserts++;
|
||||
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
|
||||
};
|
||||
|
||||
return es.through(function (file) {
|
||||
if (file.stat.isDirectory()) {
|
||||
return;
|
||||
}
|
||||
if (!file.stat.isFile()) {
|
||||
throw new Error(`unknown item in stream!`);
|
||||
}
|
||||
const shouldUnpack = shouldUnpackFile(file);
|
||||
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
|
||||
|
||||
if (shouldUnpack) {
|
||||
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
|
||||
const relative = path.relative(folderPath, file.path);
|
||||
this.queue(new VinylFile({
|
||||
cwd: folderPath,
|
||||
base: folderPath,
|
||||
path: path.join(destFilename + '.unpacked', relative),
|
||||
stat: file.stat,
|
||||
contents: file.contents
|
||||
}));
|
||||
} else {
|
||||
// The file goes inside of xx.asar
|
||||
out.push(file.contents);
|
||||
}
|
||||
}, function () {
|
||||
|
||||
let finish = () => {
|
||||
{
|
||||
const headerPickle = pickle.createEmpty();
|
||||
headerPickle.writeString(JSON.stringify(filesystem.header));
|
||||
const headerBuf = headerPickle.toBuffer();
|
||||
|
||||
const sizePickle = pickle.createEmpty();
|
||||
sizePickle.writeUInt32(headerBuf.length);
|
||||
const sizeBuf = sizePickle.toBuffer();
|
||||
|
||||
out.unshift(headerBuf);
|
||||
out.unshift(sizeBuf);
|
||||
}
|
||||
|
||||
const contents = Buffer.concat(out);
|
||||
out.length = 0;
|
||||
|
||||
this.queue(new VinylFile({
|
||||
cwd: folderPath,
|
||||
base: folderPath,
|
||||
path: destFilename,
|
||||
contents: contents
|
||||
}));
|
||||
this.queue(null);
|
||||
};
|
||||
|
||||
// Call finish() only when all file inserts have finished...
|
||||
if (pendingInserts === 0) {
|
||||
finish();
|
||||
} else {
|
||||
onFileInserted = () => {
|
||||
pendingInserts--;
|
||||
if (pendingInserts === 0) {
|
||||
finish();
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
122
build/lib/builtInExtensions.js
Normal file
122
build/lib/builtInExtensions.js
Normal file
@@ -0,0 +1,122 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const mkdirp = require('mkdirp');
|
||||
const rimraf = require('rimraf');
|
||||
const es = require('event-stream');
|
||||
const rename = require('gulp-rename');
|
||||
const vfs = require('vinyl-fs');
|
||||
const ext = require('./extensions');
|
||||
const util = require('gulp-util');
|
||||
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const builtInExtensions = require('../builtInExtensions.json');
|
||||
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
||||
|
||||
function getExtensionPath(extension) {
|
||||
return path.join(root, '.build', 'builtInExtensions', extension.name);
|
||||
}
|
||||
|
||||
function isUpToDate(extension) {
|
||||
const packagePath = path.join(getExtensionPath(extension), 'package.json');
|
||||
|
||||
if (!fs.existsSync(packagePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const packageContents = fs.readFileSync(packagePath, { encoding: 'utf8' });
|
||||
|
||||
try {
|
||||
const diskVersion = JSON.parse(packageContents).version;
|
||||
return (diskVersion === extension.version);
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function syncMarketplaceExtension(extension) {
|
||||
if (isUpToDate(extension)) {
|
||||
util.log(util.colors.blue('[marketplace]'), `${extension.name}@${extension.version}`, util.colors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
|
||||
rimraf.sync(getExtensionPath(extension));
|
||||
|
||||
return ext.fromMarketplace(extension.name, extension.version)
|
||||
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
|
||||
.pipe(vfs.dest('.build/builtInExtensions'))
|
||||
.on('end', () => util.log(util.colors.blue('[marketplace]'), extension.name, util.colors.green('✔︎')));
|
||||
}
|
||||
|
||||
function syncExtension(extension, controlState) {
|
||||
switch (controlState) {
|
||||
case 'disabled':
|
||||
util.log(util.colors.blue('[disabled]'), util.colors.gray(extension.name));
|
||||
return es.readArray([]);
|
||||
|
||||
case 'marketplace':
|
||||
return syncMarketplaceExtension(extension);
|
||||
|
||||
default:
|
||||
if (!fs.existsSync(controlState)) {
|
||||
util.log(util.colors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
||||
return es.readArray([]);
|
||||
|
||||
} else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
|
||||
util.log(util.colors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
||||
return es.readArray([]);
|
||||
}
|
||||
|
||||
util.log(util.colors.blue('[local]'), `${extension.name}: ${util.colors.cyan(controlState)}`, util.colors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
}
|
||||
|
||||
function readControlFile() {
|
||||
try {
|
||||
return JSON.parse(fs.readFileSync(controlFilePath, 'utf8'));
|
||||
} catch (err) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
function writeControlFile(control) {
|
||||
mkdirp.sync(path.dirname(controlFilePath));
|
||||
fs.writeFileSync(controlFilePath, JSON.stringify(control, null, 2));
|
||||
}
|
||||
|
||||
function main() {
|
||||
util.log('Syncronizing built-in extensions...');
|
||||
util.log(`You can manage built-in extensions with the ${util.colors.cyan('--builtin')} flag`);
|
||||
|
||||
const control = readControlFile();
|
||||
const streams = [];
|
||||
|
||||
for (const extension of builtInExtensions) {
|
||||
let controlState = control[extension.name] || 'marketplace';
|
||||
control[extension.name] = controlState;
|
||||
|
||||
streams.push(syncExtension(extension, controlState));
|
||||
}
|
||||
|
||||
writeControlFile(control);
|
||||
|
||||
es.merge(streams)
|
||||
.on('error', err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
})
|
||||
.on('end', () => {
|
||||
process.exit(0);
|
||||
});
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -217,6 +217,7 @@ function removeDuplicateTSBoilerplate(destFiles) {
|
||||
{ start: /^var __metadata/, end: /^};$/ },
|
||||
{ start: /^var __param/, end: /^};$/ },
|
||||
{ start: /^var __awaiter/, end: /^};$/ },
|
||||
{ start: /^var __generator/, end: /^};$/ },
|
||||
];
|
||||
destFiles.forEach(function (destFile) {
|
||||
var SEEN_BOILERPLATE = [];
|
||||
|
||||
@@ -44,11 +44,11 @@ interface ILoaderPluginReqFunc {
|
||||
|
||||
export interface IEntryPoint {
|
||||
name: string;
|
||||
include: string[];
|
||||
exclude: string[];
|
||||
include?: string[];
|
||||
exclude?: string[];
|
||||
prepend: string[];
|
||||
append: string[];
|
||||
dest: string;
|
||||
append?: string[];
|
||||
dest?: string;
|
||||
}
|
||||
|
||||
interface IEntryPointMap {
|
||||
@@ -339,6 +339,7 @@ function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
|
||||
{ start: /^var __metadata/, end: /^};$/ },
|
||||
{ start: /^var __param/, end: /^};$/ },
|
||||
{ start: /^var __awaiter/, end: /^};$/ },
|
||||
{ start: /^var __generator/, end: /^};$/ },
|
||||
];
|
||||
|
||||
destFiles.forEach((destFile) => {
|
||||
|
||||
@@ -22,6 +22,9 @@ var rootDir = path.join(__dirname, '../../src');
|
||||
var options = require('../../src/tsconfig.json').compilerOptions;
|
||||
options.verbose = false;
|
||||
options.sourceMap = true;
|
||||
if (process.env['VSCODE_NO_SOURCEMAP']) {
|
||||
options.sourceMap = false;
|
||||
}
|
||||
options.rootDir = rootDir;
|
||||
options.sourceRoot = util.toFileUri(rootDir);
|
||||
function createCompile(build, emitError) {
|
||||
@@ -58,9 +61,13 @@ function compileTask(out, build) {
|
||||
return function () {
|
||||
var compile = createCompile(build, true);
|
||||
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
|
||||
return src
|
||||
.pipe(compile())
|
||||
.pipe(dtsFilter)
|
||||
.pipe(gulp.dest(out))
|
||||
.pipe(dtsFilter.restore)
|
||||
.pipe(monacodtsTask(out, false));
|
||||
};
|
||||
}
|
||||
@@ -70,54 +77,19 @@ function watchTask(out, build) {
|
||||
var compile = createCompile(build);
|
||||
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
|
||||
var watchSrc = watch('src/**', { base: 'src' });
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
|
||||
return watchSrc
|
||||
.pipe(util.incremental(compile, src, true))
|
||||
.pipe(dtsFilter)
|
||||
.pipe(gulp.dest(out))
|
||||
.pipe(dtsFilter.restore)
|
||||
.pipe(monacodtsTask(out, true));
|
||||
};
|
||||
}
|
||||
exports.watchTask = watchTask;
|
||||
function reloadTypeScriptNodeModule() {
|
||||
var util = require('gulp-util');
|
||||
function log(message) {
|
||||
var rest = [];
|
||||
for (var _i = 1; _i < arguments.length; _i++) {
|
||||
rest[_i - 1] = arguments[_i];
|
||||
}
|
||||
util.log.apply(util, [util.colors.cyan('[memory watch dog]'), message].concat(rest));
|
||||
}
|
||||
function heapUsed() {
|
||||
return (process.memoryUsage().heapUsed / 1024 / 1024).toFixed(2) + ' MB';
|
||||
}
|
||||
return es.through(function (data) {
|
||||
this.emit('data', data);
|
||||
}, function () {
|
||||
log('memory usage after compilation finished: ' + heapUsed());
|
||||
// It appears we are running into some variant of
|
||||
// https://bugs.chromium.org/p/v8/issues/detail?id=2073
|
||||
//
|
||||
// Even though all references are dropped, some
|
||||
// optimized methods in the TS compiler end up holding references
|
||||
// to the entire TypeScript language host (>600MB)
|
||||
//
|
||||
// The idea is to force v8 to drop references to these
|
||||
// optimized methods, by "reloading" the typescript node module
|
||||
log('Reloading typescript node module...');
|
||||
var resolvedName = require.resolve('typescript');
|
||||
var originalModule = require.cache[resolvedName];
|
||||
delete require.cache[resolvedName];
|
||||
var newExports = require('typescript');
|
||||
require.cache[resolvedName] = originalModule;
|
||||
for (var prop in newExports) {
|
||||
if (newExports.hasOwnProperty(prop)) {
|
||||
originalModule.exports[prop] = newExports[prop];
|
||||
}
|
||||
}
|
||||
log('typescript node module reloaded.');
|
||||
this.emit('end');
|
||||
});
|
||||
}
|
||||
function monacodtsTask(out, isWatch) {
|
||||
var basePath = path.resolve(process.cwd(), out);
|
||||
var neededFiles = {};
|
||||
monacodts.getFilesToWatch(out).forEach(function (filePath) {
|
||||
filePath = path.normalize(filePath);
|
||||
@@ -160,7 +132,7 @@ function monacodtsTask(out, isWatch) {
|
||||
}));
|
||||
}
|
||||
resultStream = es.through(function (data) {
|
||||
var filePath = path.normalize(data.path);
|
||||
var filePath = path.normalize(path.resolve(basePath, data.relative));
|
||||
if (neededFiles[filePath]) {
|
||||
setInputFile(filePath, data.contents.toString());
|
||||
}
|
||||
|
||||
@@ -25,6 +25,9 @@ const rootDir = path.join(__dirname, '../../src');
|
||||
const options = require('../../src/tsconfig.json').compilerOptions;
|
||||
options.verbose = false;
|
||||
options.sourceMap = true;
|
||||
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
||||
options.sourceMap = false;
|
||||
}
|
||||
options.rootDir = rootDir;
|
||||
options.sourceRoot = util.toFileUri(rootDir);
|
||||
|
||||
@@ -49,7 +52,6 @@ function createCompile(build: boolean, emitError?: boolean): (token?: util.ICanc
|
||||
.pipe(tsFilter)
|
||||
.pipe(util.loadSourcemaps())
|
||||
.pipe(ts(token))
|
||||
// .pipe(build ? reloadTypeScriptNodeModule() : es.through())
|
||||
.pipe(noDeclarationsFilter)
|
||||
.pipe(build ? nls() : es.through())
|
||||
.pipe(noDeclarationsFilter.restore)
|
||||
@@ -75,9 +77,14 @@ export function compileTask(out: string, build: boolean): () => NodeJS.ReadWrite
|
||||
gulp.src('node_modules/typescript/lib/lib.d.ts'),
|
||||
);
|
||||
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
|
||||
|
||||
return src
|
||||
.pipe(compile())
|
||||
.pipe(dtsFilter)
|
||||
.pipe(gulp.dest(out))
|
||||
.pipe(dtsFilter.restore)
|
||||
.pipe(monacodtsTask(out, false));
|
||||
};
|
||||
}
|
||||
@@ -93,62 +100,22 @@ export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteSt
|
||||
);
|
||||
const watchSrc = watch('src/**', { base: 'src' });
|
||||
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
|
||||
|
||||
return watchSrc
|
||||
.pipe(util.incremental(compile, src, true))
|
||||
.pipe(dtsFilter)
|
||||
.pipe(gulp.dest(out))
|
||||
.pipe(dtsFilter.restore)
|
||||
.pipe(monacodtsTask(out, true));
|
||||
};
|
||||
}
|
||||
|
||||
function reloadTypeScriptNodeModule(): NodeJS.ReadWriteStream {
|
||||
var util = require('gulp-util');
|
||||
function log(message: any, ...rest: any[]): void {
|
||||
util.log(util.colors.cyan('[memory watch dog]'), message, ...rest);
|
||||
}
|
||||
|
||||
function heapUsed(): string {
|
||||
return (process.memoryUsage().heapUsed / 1024 / 1024).toFixed(2) + ' MB';
|
||||
}
|
||||
|
||||
return es.through(function (data) {
|
||||
this.emit('data', data);
|
||||
}, function () {
|
||||
|
||||
log('memory usage after compilation finished: ' + heapUsed());
|
||||
|
||||
// It appears we are running into some variant of
|
||||
// https://bugs.chromium.org/p/v8/issues/detail?id=2073
|
||||
//
|
||||
// Even though all references are dropped, some
|
||||
// optimized methods in the TS compiler end up holding references
|
||||
// to the entire TypeScript language host (>600MB)
|
||||
//
|
||||
// The idea is to force v8 to drop references to these
|
||||
// optimized methods, by "reloading" the typescript node module
|
||||
|
||||
log('Reloading typescript node module...');
|
||||
|
||||
var resolvedName = require.resolve('typescript');
|
||||
|
||||
var originalModule = require.cache[resolvedName];
|
||||
delete require.cache[resolvedName];
|
||||
var newExports = require('typescript');
|
||||
require.cache[resolvedName] = originalModule;
|
||||
|
||||
for (var prop in newExports) {
|
||||
if (newExports.hasOwnProperty(prop)) {
|
||||
originalModule.exports[prop] = newExports[prop];
|
||||
}
|
||||
}
|
||||
|
||||
log('typescript node module reloaded.');
|
||||
|
||||
this.emit('end');
|
||||
});
|
||||
}
|
||||
|
||||
function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
|
||||
|
||||
const basePath = path.resolve(process.cwd(), out);
|
||||
|
||||
const neededFiles: { [file: string]: boolean; } = {};
|
||||
monacodts.getFilesToWatch(out).forEach(function (filePath) {
|
||||
filePath = path.normalize(filePath);
|
||||
@@ -196,7 +163,7 @@ function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
|
||||
}
|
||||
|
||||
resultStream = es.through(function (data) {
|
||||
const filePath = path.normalize(data.path);
|
||||
const filePath = path.normalize(path.resolve(basePath, data.relative));
|
||||
if (neededFiles[filePath]) {
|
||||
setInputFile(filePath, data.contents.toString());
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -46,10 +46,6 @@
|
||||
"name": "vs/workbench/parts/execution",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/explorers",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/extensions",
|
||||
"project": "vscode-workbench"
|
||||
@@ -71,7 +67,11 @@
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/nps",
|
||||
"name": "vs/workbench/parts/localizations",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/logs",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
@@ -138,6 +138,10 @@
|
||||
"name": "vs/workbench/parts/welcome",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/actions",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/configuration",
|
||||
"project": "vscode-workbench"
|
||||
@@ -146,6 +150,10 @@
|
||||
"name": "vs/workbench/services/crashReporter",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/dialogs",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/editor",
|
||||
"project": "vscode-workbench"
|
||||
@@ -154,6 +162,10 @@
|
||||
"name": "vs/workbench/services/extensions",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/jsonschemas",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/files",
|
||||
"project": "vscode-workbench"
|
||||
@@ -162,10 +174,6 @@
|
||||
"name": "vs/workbench/services/keybinding",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/message",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/mode",
|
||||
"project": "vscode-workbench"
|
||||
@@ -193,10 +201,6 @@
|
||||
{
|
||||
"name": "vs/workbench/services/decorations",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "setup_messages",
|
||||
"project": "vscode-workbench"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
1030
build/lib/i18n.ts
1030
build/lib/i18n.ts
File diff suppressed because it is too large
Load Diff
@@ -79,7 +79,7 @@ function isImportNode(node) {
|
||||
function fileFrom(file, contents, path) {
|
||||
if (path === void 0) { path = file.path; }
|
||||
return new File({
|
||||
contents: new Buffer(contents),
|
||||
contents: Buffer.from(contents),
|
||||
base: file.base,
|
||||
cwd: file.cwd,
|
||||
path: path
|
||||
|
||||
@@ -131,7 +131,7 @@ module nls {
|
||||
|
||||
export function fileFrom(file: File, contents: string, path: string = file.path) {
|
||||
return new File({
|
||||
contents: new Buffer(contents),
|
||||
contents: Buffer.from(contents),
|
||||
base: file.base,
|
||||
cwd: file.cwd,
|
||||
path: path
|
||||
|
||||
@@ -59,7 +59,7 @@ function loader(bundledFileHeader, bundleLoader) {
|
||||
this.emit('data', new VinylFile({
|
||||
path: 'fake',
|
||||
base: '',
|
||||
contents: new Buffer(bundledFileHeader)
|
||||
contents: Buffer.from(bundledFileHeader)
|
||||
}));
|
||||
this.emit('data', data);
|
||||
}
|
||||
@@ -98,7 +98,7 @@ function toConcatStream(bundledFileHeader, sources, dest) {
|
||||
return new VinylFile({
|
||||
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
|
||||
base: base,
|
||||
contents: new Buffer(source.contents)
|
||||
contents: Buffer.from(source.contents)
|
||||
});
|
||||
});
|
||||
return es.readArray(treatedSources)
|
||||
@@ -141,7 +141,7 @@ function optimizeTask(opts) {
|
||||
bundleInfoArray.push(new VinylFile({
|
||||
path: 'bundleInfo.json',
|
||||
base: '.',
|
||||
contents: new Buffer(JSON.stringify(result.bundleData, null, '\t'))
|
||||
contents: Buffer.from(JSON.stringify(result.bundleData, null, '\t'))
|
||||
}));
|
||||
}
|
||||
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
|
||||
@@ -174,7 +174,6 @@ function optimizeTask(opts) {
|
||||
};
|
||||
}
|
||||
exports.optimizeTask = optimizeTask;
|
||||
;
|
||||
/**
|
||||
* Wrap around uglify and allow the preserveComments function
|
||||
* to have a file "context" to include our copyright only once per file.
|
||||
@@ -237,4 +236,3 @@ function minifyTask(src, sourceMapBaseUrl) {
|
||||
};
|
||||
}
|
||||
exports.minifyTask = minifyTask;
|
||||
;
|
||||
|
||||
@@ -31,7 +31,7 @@ function log(prefix: string, message: string): void {
|
||||
}
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
export function loaderConfig(emptyPaths: string[]) {
|
||||
export function loaderConfig(emptyPaths?: string[]) {
|
||||
const result = {
|
||||
paths: {
|
||||
'vs': 'out-build/vs',
|
||||
@@ -73,7 +73,7 @@ function loader(bundledFileHeader: string, bundleLoader: boolean): NodeJS.ReadWr
|
||||
this.emit('data', new VinylFile({
|
||||
path: 'fake',
|
||||
base: '',
|
||||
contents: new Buffer(bundledFileHeader)
|
||||
contents: Buffer.from(bundledFileHeader)
|
||||
}));
|
||||
this.emit('data', data);
|
||||
} else {
|
||||
@@ -117,7 +117,7 @@ function toConcatStream(bundledFileHeader: string, sources: bundle.IFile[], dest
|
||||
return new VinylFile({
|
||||
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
|
||||
base: base,
|
||||
contents: new Buffer(source.contents)
|
||||
contents: Buffer.from(source.contents)
|
||||
});
|
||||
});
|
||||
|
||||
@@ -165,7 +165,7 @@ export interface IOptimizeTaskOpts {
|
||||
/**
|
||||
* (languages to process)
|
||||
*/
|
||||
languages: string[];
|
||||
languages: i18n.Language[];
|
||||
}
|
||||
export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStream {
|
||||
const entryPoints = opts.entryPoints;
|
||||
@@ -201,7 +201,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
|
||||
bundleInfoArray.push(new VinylFile({
|
||||
path: 'bundleInfo.json',
|
||||
base: '.',
|
||||
contents: new Buffer(JSON.stringify(result.bundleData, null, '\t'))
|
||||
contents: Buffer.from(JSON.stringify(result.bundleData, null, '\t'))
|
||||
}));
|
||||
}
|
||||
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
|
||||
@@ -241,7 +241,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
|
||||
}))
|
||||
.pipe(gulp.dest(out));
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
declare class FileWithCopyright extends VinylFile {
|
||||
public __hasOurCopyright: boolean;
|
||||
@@ -295,7 +295,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
|
||||
return es.duplex(input, output);
|
||||
}
|
||||
|
||||
export function minifyTask(src: string, sourceMapBaseUrl: string): (cb: any) => void {
|
||||
export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) => void {
|
||||
const sourceMappingURL = sourceMapBaseUrl && (f => `${sourceMapBaseUrl}/${f.relative}.map`);
|
||||
|
||||
return cb => {
|
||||
@@ -326,4 +326,4 @@ export function minifyTask(src: string, sourceMapBaseUrl: string): (cb: any) =>
|
||||
cb(err);
|
||||
});
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
9
build/lib/typings/event-stream.d.ts
vendored
9
build/lib/typings/event-stream.d.ts
vendored
@@ -1,7 +1,14 @@
|
||||
declare module "event-stream" {
|
||||
import { Stream } from 'stream';
|
||||
import { ThroughStream } from 'through';
|
||||
import { ThroughStream as _ThroughStream} from 'through';
|
||||
import { MapStream } from 'map-stream';
|
||||
import * as File from 'vinyl';
|
||||
|
||||
export interface ThroughStream extends _ThroughStream {
|
||||
queue(data: File | null);
|
||||
push(data: File | null);
|
||||
paused: boolean;
|
||||
}
|
||||
|
||||
function merge(streams: Stream[]): ThroughStream;
|
||||
function merge(...streams: Stream[]): ThroughStream;
|
||||
|
||||
@@ -143,7 +143,7 @@ function loadSourcemaps() {
|
||||
cb(null, f);
|
||||
return;
|
||||
}
|
||||
f.contents = new Buffer(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
|
||||
f.contents = Buffer.from(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
|
||||
fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', function (err, contents) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
@@ -160,7 +160,7 @@ function stripSourceMappingURL() {
|
||||
var output = input
|
||||
.pipe(es.mapSync(function (f) {
|
||||
var contents = f.contents.toString('utf8');
|
||||
f.contents = new Buffer(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
|
||||
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
|
||||
return f;
|
||||
}));
|
||||
return es.duplex(input, output);
|
||||
@@ -173,7 +173,6 @@ function rimraf(dir) {
|
||||
if (!err) {
|
||||
return cb();
|
||||
}
|
||||
;
|
||||
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
|
||||
return setTimeout(function () { return retry(cb); }, 10);
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ export interface IStreamProvider {
|
||||
(cancellationToken?: ICancellationToken): NodeJS.ReadWriteStream;
|
||||
}
|
||||
|
||||
export function incremental(streamProvider: IStreamProvider, initial: NodeJS.ReadWriteStream, supportsCancellation: boolean): NodeJS.ReadWriteStream {
|
||||
export function incremental(streamProvider: IStreamProvider, initial: NodeJS.ReadWriteStream, supportsCancellation?: boolean): NodeJS.ReadWriteStream {
|
||||
const input = es.through();
|
||||
const output = es.through();
|
||||
let state = 'idle';
|
||||
@@ -129,7 +129,7 @@ export function skipDirectories(): NodeJS.ReadWriteStream {
|
||||
});
|
||||
}
|
||||
|
||||
export function cleanNodeModule(name: string, excludes: string[], includes: string[]): NodeJS.ReadWriteStream {
|
||||
export function cleanNodeModule(name: string, excludes: string[], includes?: string[]): NodeJS.ReadWriteStream {
|
||||
const toGlob = (path: string) => '**/node_modules/' + name + (path ? '/' + path : '');
|
||||
const negate = (str: string) => '!' + str;
|
||||
|
||||
@@ -190,7 +190,7 @@ export function loadSourcemaps(): NodeJS.ReadWriteStream {
|
||||
return;
|
||||
}
|
||||
|
||||
f.contents = new Buffer(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
|
||||
f.contents = Buffer.from(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
|
||||
|
||||
fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', (err, contents) => {
|
||||
if (err) { return cb(err); }
|
||||
@@ -209,7 +209,7 @@ export function stripSourceMappingURL(): NodeJS.ReadWriteStream {
|
||||
const output = input
|
||||
.pipe(es.mapSync<VinylFile, VinylFile>(f => {
|
||||
const contents = (<Buffer>f.contents).toString('utf8');
|
||||
f.contents = new Buffer(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
|
||||
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
|
||||
return f;
|
||||
}));
|
||||
|
||||
@@ -223,7 +223,7 @@ export function rimraf(dir: string): (cb: any) => void {
|
||||
_rimraf(dir, { maxBusyTries: 1 }, (err: any) => {
|
||||
if (!err) {
|
||||
return cb();
|
||||
};
|
||||
}
|
||||
|
||||
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
|
||||
return setTimeout(() => retry(cb), 10);
|
||||
|
||||
@@ -9,7 +9,7 @@ const es = require('event-stream');
|
||||
function handleDeletions() {
|
||||
return es.mapSync(f => {
|
||||
if (/\.ts$/.test(f.relative) && !f.contents) {
|
||||
f.contents = new Buffer('');
|
||||
f.contents = Buffer.from('');
|
||||
f.stat = { mtime: new Date() };
|
||||
}
|
||||
|
||||
|
||||
@@ -30,12 +30,12 @@ function watch(root) {
|
||||
path: path,
|
||||
base: root
|
||||
});
|
||||
|
||||
//@ts-ignore
|
||||
file.event = type;
|
||||
result.emit('data', file);
|
||||
}
|
||||
|
||||
nsfw(root, function(events) {
|
||||
nsfw(root, function (events) {
|
||||
for (var i = 0; i < events.length; i++) {
|
||||
var e = events[i];
|
||||
var changeType = e.action;
|
||||
@@ -47,16 +47,16 @@ function watch(root) {
|
||||
handleEvent(path.join(e.directory, e.file), toChangeType(changeType));
|
||||
}
|
||||
}
|
||||
}).then(function(watcher) {
|
||||
}).then(function (watcher) {
|
||||
watcher.start();
|
||||
});
|
||||
});
|
||||
|
||||
return result;
|
||||
return result;
|
||||
}
|
||||
|
||||
var cache = Object.create(null);
|
||||
|
||||
module.exports = function(pattern, options) {
|
||||
module.exports = function (pattern, options) {
|
||||
options = options || {};
|
||||
|
||||
var cwd = path.normalize(options.cwd || process.cwd());
|
||||
@@ -66,7 +66,7 @@ module.exports = function(pattern, options) {
|
||||
watcher = cache[cwd] = watch(cwd);
|
||||
}
|
||||
|
||||
var rebase = !options.base ? es.through() : es.mapSync(function(f) {
|
||||
var rebase = !options.base ? es.through() : es.mapSync(function (f) {
|
||||
f.base = options.base;
|
||||
return f;
|
||||
});
|
||||
@@ -74,13 +74,13 @@ module.exports = function(pattern, options) {
|
||||
return watcher
|
||||
.pipe(filter(['**', '!.git{,/**}'])) // ignore all things git
|
||||
.pipe(filter(pattern))
|
||||
.pipe(es.map(function(file, cb) {
|
||||
fs.stat(file.path, function(err, stat) {
|
||||
.pipe(es.map(function (file, cb) {
|
||||
fs.stat(file.path, function (err, stat) {
|
||||
if (err && err.code === 'ENOENT') { return cb(null, file); }
|
||||
if (err) { return cb(); }
|
||||
if (!stat.isFile()) { return cb(); }
|
||||
|
||||
fs.readFile(file.path, function(err, contents) {
|
||||
fs.readFile(file.path, function (err, contents) {
|
||||
if (err && err.code === 'ENOENT') { return cb(null, file); }
|
||||
if (err) { return cb(); }
|
||||
|
||||
|
||||
@@ -24,7 +24,8 @@ function watch(root) {
|
||||
var result = es.through();
|
||||
var child = cp.spawn(watcherPath, [root]);
|
||||
|
||||
child.stdout.on('data', function(data) {
|
||||
child.stdout.on('data', function (data) {
|
||||
// @ts-ignore
|
||||
var lines = data.toString('utf8').split('\n');
|
||||
for (var i = 0; i < lines.length; i++) {
|
||||
var line = lines[i].trim();
|
||||
@@ -46,17 +47,17 @@ function watch(root) {
|
||||
path: changePathFull,
|
||||
base: root
|
||||
});
|
||||
|
||||
//@ts-ignore
|
||||
file.event = toChangeType(changeType);
|
||||
result.emit('data', file);
|
||||
}
|
||||
});
|
||||
|
||||
child.stderr.on('data', function(data) {
|
||||
child.stderr.on('data', function (data) {
|
||||
result.emit('error', data);
|
||||
});
|
||||
|
||||
child.on('exit', function(code) {
|
||||
child.on('exit', function (code) {
|
||||
result.emit('error', 'Watcher died with code ' + code);
|
||||
child = null;
|
||||
});
|
||||
@@ -70,7 +71,7 @@ function watch(root) {
|
||||
|
||||
var cache = Object.create(null);
|
||||
|
||||
module.exports = function(pattern, options) {
|
||||
module.exports = function (pattern, options) {
|
||||
options = options || {};
|
||||
|
||||
var cwd = path.normalize(options.cwd || process.cwd());
|
||||
|
||||
Reference in New Issue
Block a user