Merge from vscode merge-base (#22769)

* Merge from vscode merge-base

* Turn off basic checks

* Enable compilation, unit, and integration tests
This commit is contained in:
Lewis Sanchez
2023-04-18 18:28:58 -07:00
committed by GitHub
parent 6186358001
commit 6bd0a17d3c
2389 changed files with 92183 additions and 42601 deletions

View File

@@ -1,8 +1,8 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.createAsar = void 0;
const path = require("path");
@@ -81,7 +81,7 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
out.push(file.contents);
}
}, function () {
let finish = () => {
const finish = () => {
{
const headerPickle = pickle.createEmpty();
headerPickle.writeString(JSON.stringify(filesystem.header));

View File

@@ -3,8 +3,6 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as path from 'path';
import * as es from 'event-stream';
const pickle = require('chromium-pickle-js');
@@ -98,7 +96,7 @@ export function createAsar(folderPath: string, unpackGlobs: string[], destFilena
}
}, function () {
let finish = () => {
const finish = () => {
{
const headerPickle = pickle.createEmpty();
headerPickle.writeString(JSON.stringify(filesystem.header));

View File

@@ -98,12 +98,12 @@ function writeControlFile(control) {
fs.writeFileSync(controlFilePath, JSON.stringify(control, null, 2));
}
function getBuiltInExtensions() {
log('Syncronizing built-in extensions...');
log('Synchronizing built-in extensions...');
log(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
const control = readControlFile();
const streams = [];
for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) {
let controlState = control[extension.name] || 'marketplace';
const controlState = control[extension.name] || 'marketplace';
control[extension.name] = controlState;
streams.push(syncExtension(extension, controlState));
}

View File

@@ -136,14 +136,14 @@ function writeControlFile(control: IControlFile): void {
}
export function getBuiltInExtensions(): Promise<void> {
log('Syncronizing built-in extensions...');
log('Synchronizing built-in extensions...');
log(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
const control = readControlFile();
const streams: Stream[] = [];
for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) {
let controlState = control[extension.name] || 'marketplace';
const controlState = control[extension.name] || 'marketplace';
control[extension.name] = controlState;
streams.push(syncExtension(extension, controlState));

View File

@@ -43,14 +43,20 @@ function bundle(entryPoints, config, callback) {
if (!config.paths['vs/css']) {
config.paths['vs/css'] = 'out-build/vs/css.build';
}
config.buildForceInvokeFactory = config.buildForceInvokeFactory || {};
config.buildForceInvokeFactory['vs/nls'] = true;
config.buildForceInvokeFactory['vs/css'] = true;
loader.config(config);
loader(['require'], (localRequire) => {
const resolvePath = (path) => {
const r = localRequire.toUrl(path);
if (!/\.js/.test(r)) {
return r + '.js';
const resolvePath = (entry) => {
let r = localRequire.toUrl(entry.path);
if (!r.endsWith('.js')) {
r += '.js';
}
return r;
// avoid packaging the build version of plugins:
r = r.replace('vs/nls.build.js', 'vs/nls.js');
r = r.replace('vs/css.build.js', 'vs/css.js');
return { path: r, amdModuleId: entry.amdModuleId };
};
for (const moduleId in entryPointsMap) {
const entryPoint = entryPointsMap[moduleId];
@@ -299,9 +305,18 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
if (module.shim) {
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
}
else {
else if (module.defineLocation) {
mainResult.sources.push(emitNamedModule(c, module.defineLocation, module.path, contents));
}
else {
const moduleCopy = {
id: module.id,
path: module.path,
defineLocation: module.defineLocation,
dependencies: module.dependencies
};
throw new Error(`Cannot bundle module '${module.id}' for entry point '${entryPoint}' because it has no shim and it lacks a defineLocation: ${JSON.stringify(moduleCopy)}`);
}
});
Object.keys(usedPlugins).forEach((pluginName) => {
const plugin = usedPlugins[pluginName];
@@ -322,10 +337,13 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
plugin.writeFile(pluginName, entryPoint, req, write, {});
}
});
const toIFile = (path) => {
const contents = readFileAndRemoveBOM(path);
const toIFile = (entry) => {
let contents = readFileAndRemoveBOM(entry.path);
if (entry.amdModuleId) {
contents = contents.replace(/^define\(/m, `define("${entry.amdModuleId}",`);
}
return {
path: path,
path: entry.path,
contents: contents
};
};

View File

@@ -15,7 +15,7 @@ interface IPosition {
interface IBuildModuleInfo {
id: string;
path: string;
defineLocation: IPosition;
defineLocation: IPosition | null;
dependencies: string[];
shim: string;
exports: any;
@@ -42,12 +42,17 @@ interface ILoaderPluginReqFunc {
toUrl(something: string): string;
}
export interface IExtraFile {
path: string;
amdModuleId?: string;
}
export interface IEntryPoint {
name: string;
include?: string[];
exclude?: string[];
prepend?: string[];
append?: string[];
prepend?: IExtraFile[];
append?: IExtraFile[];
dest?: string;
}
@@ -92,6 +97,13 @@ interface IPartialBundleResult {
export interface ILoaderConfig {
isBuild?: boolean;
paths?: { [path: string]: any };
/*
* Normally, during a build, no module factories are invoked. This can be used
* to forcefully execute a module's factory.
*/
buildForceInvokeFactory: {
[moduleId: string]: boolean;
};
}
/**
@@ -132,15 +144,21 @@ export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callba
if (!config.paths['vs/css']) {
config.paths['vs/css'] = 'out-build/vs/css.build';
}
config.buildForceInvokeFactory = config.buildForceInvokeFactory || {};
config.buildForceInvokeFactory['vs/nls'] = true;
config.buildForceInvokeFactory['vs/css'] = true;
loader.config(config);
loader(['require'], (localRequire: any) => {
const resolvePath = (path: string) => {
const r = localRequire.toUrl(path);
if (!/\.js/.test(r)) {
return r + '.js';
const resolvePath = (entry: IExtraFile) => {
let r = localRequire.toUrl(entry.path);
if (!r.endsWith('.js')) {
r += '.js';
}
return r;
// avoid packaging the build version of plugins:
r = r.replace('vs/nls.build.js', 'vs/nls.js');
r = r.replace('vs/css.build.js', 'vs/css.js');
return { path: r, amdModuleId: entry.amdModuleId };
};
for (const moduleId in entryPointsMap) {
const entryPoint = entryPointsMap[moduleId];
@@ -403,8 +421,8 @@ function emitEntryPoint(
deps: IGraph,
entryPoint: string,
includedModules: string[],
prepend: string[],
append: string[],
prepend: IExtraFile[],
append: IExtraFile[],
dest: string | undefined
): IEmitEntryPointResult {
if (!dest) {
@@ -444,8 +462,16 @@ function emitEntryPoint(
if (module.shim) {
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
} else {
} else if (module.defineLocation) {
mainResult.sources.push(emitNamedModule(c, module.defineLocation, module.path, contents));
} else {
const moduleCopy = {
id: module.id,
path: module.path,
defineLocation: module.defineLocation,
dependencies: module.dependencies
};
throw new Error(`Cannot bundle module '${module.id}' for entry point '${entryPoint}' because it has no shim and it lacks a defineLocation: ${JSON.stringify(moduleCopy)}`);
}
});
@@ -470,10 +496,13 @@ function emitEntryPoint(
}
});
const toIFile = (path: string): IFile => {
const contents = readFileAndRemoveBOM(path);
const toIFile = (entry: IExtraFile): IFile => {
let contents = readFileAndRemoveBOM(entry.path);
if (entry.amdModuleId) {
contents = contents.replace(/^define\(/m, `define("${entry.amdModuleId}",`);
}
return {
path: path,
path: entry.path,
contents: contents
};
};

View File

@@ -1,10 +1,10 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.watchApiProposalNamesTask = exports.compileApiProposalNamesTask = exports.watchTask = exports.compileTask = void 0;
exports.watchApiProposalNamesTask = exports.compileApiProposalNamesTask = exports.watchTask = exports.compileTask = exports.transpileTask = void 0;
const es = require("event-stream");
const fs = require("fs");
const gulp = require("gulp");
@@ -22,7 +22,7 @@ const watch = require('./watch');
const reporter = (0, reporter_1.createReporter)();
function getTypeScriptCompilerOptions(src) {
const rootDir = path.join(__dirname, `../../${src}`);
let options = {};
const options = {};
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
@@ -34,8 +34,8 @@ function getTypeScriptCompilerOptions(src) {
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1;
return options;
}
function createCompile(src, build, emitError) {
const tsb = require('gulp-tsb');
function createCompile(src, build, emitError, transpileOnly) {
const tsb = require('./tsb');
const sourcemaps = require('gulp-sourcemaps');
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
const overrideOptions = Object.assign(Object.assign({}, getTypeScriptCompilerOptions(src)), { inlineSources: Boolean(build) });
@@ -69,7 +69,7 @@ function createCompile(src, build, emitError) {
.pipe(noDeclarationsFilter)
.pipe(build ? nls.nls() : es.through())
.pipe(noDeclarationsFilter.restore)
.pipe(sourcemaps.write('.', {
.pipe(transpileOnly ? es.through() : sourcemaps.write('.', {
addComment: false,
includeContent: !!build,
sourceRoot: overrideOptions.sourceRoot
@@ -83,14 +83,24 @@ function createCompile(src, build, emitError) {
};
return pipeline;
}
function transpileTask(src, out) {
return function () {
const transpile = createCompile(src, false, true, true);
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
return srcPipe
.pipe(transpile())
.pipe(gulp.dest(out));
};
}
exports.transpileTask = transpileTask;
function compileTask(src, out, build) {
return function () {
if (os.totalmem() < 4000000000) {
throw new Error('compilation requires 4GB of RAM');
}
const compile = createCompile(src, build, true);
const compile = createCompile(src, build, true, false);
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
let generator = new MonacoGenerator(false);
const generator = new MonacoGenerator(false);
if (src === 'src') {
generator.execute();
}
@@ -103,10 +113,10 @@ function compileTask(src, out, build) {
exports.compileTask = compileTask;
function watchTask(out, build) {
return function () {
const compile = createCompile('src', build);
const compile = createCompile('src', build, false, false);
const src = gulp.src('src/**', { base: 'src' });
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
let generator = new MonacoGenerator(true);
const generator = new MonacoGenerator(true);
generator.execute();
return watchSrc
.pipe(generator.stream)
@@ -122,7 +132,7 @@ class MonacoGenerator {
this._isWatch = isWatch;
this.stream = es.through();
this._watchedFiles = {};
let onWillReadFile = (moduleId, filePath) => {
const onWillReadFile = (moduleId, filePath) => {
if (!this._isWatch) {
return;
}
@@ -159,7 +169,7 @@ class MonacoGenerator {
}, 20);
}
_run() {
let r = monacodts.run3(this._declarationResolver);
const r = monacodts.run3(this._declarationResolver);
if (!r && !this._isWatch) {
// The build must always be able to generate the monaco.d.ts
throw new Error(`monaco.d.ts generation error - Cannot continue`);
@@ -188,6 +198,15 @@ class MonacoGenerator {
}
}
function generateApiProposalNames() {
let eol;
try {
const src = fs.readFileSync('src/vs/workbench/services/extensions/common/extensionsApiProposals.ts', 'utf-8');
const match = /\r?\n/m.exec(src);
eol = match ? match[0] : os.EOL;
}
catch {
eol = os.EOL;
}
const pattern = /vscode\.proposed\.([a-zA-Z]+)\.d\.ts$/;
const proposalNames = new Set();
const input = es.through();
@@ -214,7 +233,7 @@ function generateApiProposalNames() {
'});',
'export type ApiProposalName = keyof typeof allApiProposals;',
'',
].join(os.EOL);
].join(eol);
this.emit('data', new File({
path: 'vs/workbench/services/extensions/common/extensionsApiProposals.ts',
contents: Buffer.from(contents)

View File

@@ -26,7 +26,7 @@ const reporter = createReporter();
function getTypeScriptCompilerOptions(src: string): ts.CompilerOptions {
const rootDir = path.join(__dirname, `../../${src}`);
let options: ts.CompilerOptions = {};
const options: ts.CompilerOptions = {};
options.verbose = false;
options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
@@ -39,8 +39,8 @@ function getTypeScriptCompilerOptions(src: string): ts.CompilerOptions {
return options;
}
function createCompile(src: string, build: boolean, emitError?: boolean) {
const tsb = require('gulp-tsb') as typeof import('gulp-tsb');
function createCompile(src: string, build: boolean, emitError: boolean, transpileOnly: boolean) {
const tsb = require('./tsb') as typeof import('./tsb');
const sourcemaps = require('gulp-sourcemaps') as typeof import('gulp-sourcemaps');
@@ -80,7 +80,7 @@ function createCompile(src: string, build: boolean, emitError?: boolean) {
.pipe(noDeclarationsFilter)
.pipe(build ? nls.nls() : es.through())
.pipe(noDeclarationsFilter.restore)
.pipe(sourcemaps.write('.', {
.pipe(transpileOnly ? es.through() : sourcemaps.write('.', {
addComment: false,
includeContent: !!build,
sourceRoot: overrideOptions.sourceRoot
@@ -96,6 +96,19 @@ function createCompile(src: string, build: boolean, emitError?: boolean) {
return pipeline;
}
export function transpileTask(src: string, out: string): () => NodeJS.ReadWriteStream {
return function () {
const transpile = createCompile(src, false, true, true);
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
return srcPipe
.pipe(transpile())
.pipe(gulp.dest(out));
};
}
export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream {
return function () {
@@ -104,9 +117,9 @@ export function compileTask(src: string, out: string, build: boolean): () => Nod
throw new Error('compilation requires 4GB of RAM');
}
const compile = createCompile(src, build, true);
const compile = createCompile(src, build, true, false);
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
let generator = new MonacoGenerator(false);
const generator = new MonacoGenerator(false);
if (src === 'src') {
generator.execute();
}
@@ -121,12 +134,12 @@ export function compileTask(src: string, out: string, build: boolean): () => Nod
export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteStream {
return function () {
const compile = createCompile('src', build);
const compile = createCompile('src', build, false, false);
const src = gulp.src('src/**', { base: 'src' });
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
let generator = new MonacoGenerator(true);
const generator = new MonacoGenerator(true);
generator.execute();
return watchSrc
@@ -150,7 +163,7 @@ class MonacoGenerator {
this._isWatch = isWatch;
this.stream = es.through();
this._watchedFiles = {};
let onWillReadFile = (moduleId: string, filePath: string) => {
const onWillReadFile = (moduleId: string, filePath: string) => {
if (!this._isWatch) {
return;
}
@@ -192,7 +205,7 @@ class MonacoGenerator {
}
private _run(): monacodts.IMonacoDeclarationResult | null {
let r = monacodts.run3(this._declarationResolver);
const r = monacodts.run3(this._declarationResolver);
if (!r && !this._isWatch) {
// The build must always be able to generate the monaco.d.ts
throw new Error(`monaco.d.ts generation error - Cannot continue`);
@@ -225,6 +238,16 @@ class MonacoGenerator {
}
function generateApiProposalNames() {
let eol: string;
try {
const src = fs.readFileSync('src/vs/workbench/services/extensions/common/extensionsApiProposals.ts', 'utf-8');
const match = /\r?\n/m.exec(src);
eol = match ? match[0] : os.EOL;
} catch {
eol = os.EOL;
}
const pattern = /vscode\.proposed\.([a-zA-Z]+)\.d\.ts$/;
const proposalNames = new Set<string>();
@@ -253,7 +276,7 @@ function generateApiProposalNames() {
'});',
'export type ApiProposalName = keyof typeof allApiProposals;',
'',
].join(os.EOL);
].join(eol);
this.emit('data', new File({
path: 'vs/workbench/services/extensions/common/extensionsApiProposals.ts',

View File

@@ -1,8 +1,8 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.getProductionDependencies = void 0;
const path = require("path");

View File

@@ -3,8 +3,6 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as path from 'path';
import * as cp from 'child_process';
import * as _ from 'underscore';

View File

@@ -1,8 +1,8 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.config = void 0;
const fs = require("fs");

View File

@@ -3,8 +3,6 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import * as path from 'path';
import * as vfs from 'vinyl-fs';

View File

@@ -1,8 +1,8 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
//------------------------------------------------------------------------------
// Rule Definition

View File

@@ -3,8 +3,6 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as eslint from 'eslint';
import { TSESTree } from '@typescript-eslint/experimental-utils';
import * as ESTree from 'estree';

View File

@@ -41,7 +41,7 @@ module.exports = new (_a = class NoUnexternalizedStrings {
key = keyNode.value;
}
else if (keyNode.type === experimental_utils_1.AST_NODE_TYPES.ObjectExpression) {
for (let property of keyNode.properties) {
for (const property of keyNode.properties) {
if (property.type === experimental_utils_1.AST_NODE_TYPES.Property && !property.computed) {
if (property.key.type === experimental_utils_1.AST_NODE_TYPES.Identifier && property.key.name === 'key') {
if (isStringLiteral(property.value)) {
@@ -83,7 +83,7 @@ module.exports = new (_a = class NoUnexternalizedStrings {
// (2)
// report all invalid NLS keys
if (!key.match(NoUnexternalizedStrings._rNlsKeys)) {
for (let value of values) {
for (const value of values) {
context.report({ loc: value.call.loc, messageId: 'badKey', data: { key } });
}
}

View File

@@ -51,7 +51,7 @@ export = new class NoUnexternalizedStrings implements eslint.Rule.RuleModule {
key = keyNode.value;
} else if (keyNode.type === AST_NODE_TYPES.ObjectExpression) {
for (let property of keyNode.properties) {
for (const property of keyNode.properties) {
if (property.type === AST_NODE_TYPES.Property && !property.computed) {
if (property.key.type === AST_NODE_TYPES.Identifier && property.key.name === 'key') {
if (isStringLiteral(property.value)) {
@@ -97,7 +97,7 @@ export = new class NoUnexternalizedStrings implements eslint.Rule.RuleModule {
// (2)
// report all invalid NLS keys
if (!key.match(NoUnexternalizedStrings._rNlsKeys)) {
for (let value of values) {
for (const value of values) {
context.report({ loc: value.call.loc, messageId: 'badKey', data: { key } });
}
}

View File

@@ -1,14 +1,8 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
// FORKED FROM https://github.com/eslint/eslint/blob/b23ad0d789a909baf8d7c41a35bc53df932eaf30/lib/rules/no-unused-expressions.js
// and added support for `OptionalCallExpression`, see https://github.com/facebook/create-react-app/issues/8107 and https://github.com/eslint/eslint/issues/12642
/**
* @fileoverview Flag expressions in statement position that do not side effect
* @author Michael Ficarra
*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
//------------------------------------------------------------------------------
// Rule Definition

View File

@@ -11,8 +11,6 @@
* @author Michael Ficarra
*/
'use strict';
import * as eslint from 'eslint';
import { TSESTree } from '@typescript-eslint/experimental-utils';
import * as ESTree from 'estree';

View File

@@ -16,7 +16,7 @@ module.exports = new class ApiProviderNaming {
return {
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature[key.name=/^(provide|resolve).+/]']: (node) => {
let found = false;
for (let param of node.params) {
for (const param of node.params) {
if (param.type === experimental_utils_1.AST_NODE_TYPES.Identifier) {
found = found || param.name === 'token';
}

View File

@@ -20,7 +20,7 @@ export = new class ApiProviderNaming implements eslint.Rule.RuleModule {
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature[key.name=/^(provide|resolve).+/]']: (node: any) => {
let found = false;
for (let param of (<TSESTree.TSMethodSignature>node).params) {
for (const param of (<TSESTree.TSMethodSignature>node).params) {
if (param.type === AST_NODE_TYPES.Identifier) {
found = found || param.name === 'token';
}

View File

@@ -77,7 +77,7 @@ module.exports = new (_a = class ApiEventNaming {
if (def.type === experimental_utils_1.AST_NODE_TYPES.Identifier) {
return def;
}
else if ((def.type === experimental_utils_1.AST_NODE_TYPES.TSPropertySignature || def.type === experimental_utils_1.AST_NODE_TYPES.Property) && def.key.type === experimental_utils_1.AST_NODE_TYPES.Identifier) {
else if ((def.type === experimental_utils_1.AST_NODE_TYPES.TSPropertySignature || def.type === experimental_utils_1.AST_NODE_TYPES.PropertyDefinition) && def.key.type === experimental_utils_1.AST_NODE_TYPES.Identifier) {
return def.key;
}
return this.getIdent(def.parent);

View File

@@ -88,11 +88,10 @@ export = new class ApiEventNaming implements eslint.Rule.RuleModule {
if (def.type === AST_NODE_TYPES.Identifier) {
return def;
} else if ((def.type === AST_NODE_TYPES.TSPropertySignature || def.type === AST_NODE_TYPES.Property) && def.key.type === AST_NODE_TYPES.Identifier) {
} else if ((def.type === AST_NODE_TYPES.TSPropertySignature || def.type === AST_NODE_TYPES.PropertyDefinition) && def.key.type === AST_NODE_TYPES.Identifier) {
return def.key;
}
return this.getIdent(def.parent);
}
};

View File

@@ -34,14 +34,14 @@ function minifyExtensionResources(input) {
.pipe(jsonFilter)
.pipe(buffer())
.pipe(es.mapSync((f) => {
const errors = [];
const value = jsoncParser.parse(f.contents.toString('utf8'), errors);
if (errors.length === 0) {
// file parsed OK => just stringify to drop whitespace and comments
f.contents = Buffer.from(JSON.stringify(value));
}
return f;
}))
const errors = [];
const value = jsoncParser.parse(f.contents.toString('utf8'), errors);
if (errors.length === 0) {
// file parsed OK => just stringify to drop whitespace and comments
f.contents = Buffer.from(JSON.stringify(value));
}
return f;
}))
.pipe(jsonFilter.restore);
}
function updateExtensionPackageJSON(input, update) {
@@ -50,10 +50,10 @@ function updateExtensionPackageJSON(input, update) {
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(es.mapSync((f) => {
const data = JSON.parse(f.contents.toString('utf8'));
f.contents = Buffer.from(JSON.stringify(update(data)));
return f;
}))
const data = JSON.parse(f.contents.toString('utf8'));
f.contents = Buffer.from(JSON.stringify(update(data)));
return f;
}))
.pipe(packageJsonFilter.restore);
}
function fromLocal(extensionPath, forWeb) {
@@ -95,11 +95,11 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName) {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
// check for a webpack configuration files, then invoke webpack
// and merge its output with the files stream.
const webpackConfigLocations = glob.sync(path.join(extensionPath, '**', webpackConfigFileName), { ignore: ['**/node_modules'] });
@@ -123,20 +123,20 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName) {
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(es.through(function (data) {
data.stat = data.stat || {};
data.base = extensionPath;
this.emit('data', data);
}))
data.stat = data.stat || {};
data.base = extensionPath;
this.emit('data', data);
}))
.pipe(es.through(function (data) {
// source map handling:
// * rewrite sourceMappingURL
// * save to disk so that upload-task picks this up
const contents = data.contents.toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
this.emit('data', data);
}));
// source map handling:
// * rewrite sourceMappingURL
// * save to disk so that upload-task picks this up
const contents = data.contents.toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
this.emit('data', data);
}));
});
});
es.merge(...webpackStreams, es.readArray(files))
@@ -158,16 +158,16 @@ function fromLocalNormal(extensionPath) {
const result = es.through();
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
.then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
es.readArray(files).pipe(result);
})
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
es.readArray(files).pipe(result);
})
.catch(err => result.emit('error', err));
return result.pipe((0, stats_1.createStatsStream)(path.basename(extensionPath)));
}
@@ -324,11 +324,11 @@ function isWebExtension(manifest) {
function packageLocalExtensionsStream(forWeb) {
const localExtensionsDescriptions = (glob.sync('extensions/*/package.json')
.map(manifestPath => {
const absoluteManifestPath = path.join(root, manifestPath);
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath, manifestPath: absoluteManifestPath };
})
const absoluteManifestPath = path.join(root, manifestPath);
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath, manifestPath: absoluteManifestPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => externalExtensions.indexOf(name) === -1) // {{SQL CARBON EDIT}} Remove external Extensions with separate package
@@ -359,15 +359,15 @@ function packageMarketplaceExtensionsStream(forWeb, galleryServiceUrl) {
];
const marketplaceExtensionsStream = minifyExtensionResources(es.merge(...marketplaceExtensionsDescriptions
.map(extension => {
const input = (galleryServiceUrl ? fromMarketplace(galleryServiceUrl, extension) : fromGithub(extension))
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
return updateExtensionPackageJSON(input, (data) => {
delete data.scripts;
delete data.dependencies;
delete data.devDependencies;
return data;
});
})));
const input = (galleryServiceUrl ? fromMarketplace(galleryServiceUrl, extension) : fromGithub(extension))
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
return updateExtensionPackageJSON(input, (data) => {
delete data.scripts;
delete data.dependencies;
delete data.devDependencies;
return data;
});
})));
return (marketplaceExtensionsStream
.pipe(util2.setExecutableBit(['**/*.sh'])));
}
@@ -384,7 +384,7 @@ function scanBuiltinExtensions(extensionsRoot, exclude = []) {
if (!fs.existsSync(packageJSONPath)) {
continue;
}
let packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString('utf8'));
const packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString('utf8'));
if (!isWebExtension(packageJSON)) {
continue;
}
@@ -412,10 +412,10 @@ exports.scanBuiltinExtensions = scanBuiltinExtensions;
function packageExternalExtensionsStream() {
const extenalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => externalExtensions.indexOf(name) >= 0 || exports.vscodeExternalExtensions.indexOf(name) >= 0);
const builtExtensions = extenalExtensionDescriptions.map(extension => {
return fromLocal(extension.path, false)
@@ -433,10 +433,10 @@ exports.cleanRebuildExtensions = cleanRebuildExtensions;
function packageRebuildExtensionsStream() {
const extenalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => rebuildExtensions.indexOf(name) >= 0);
const builtExtensions = extenalExtensionDescriptions.map(extension => {
return fromLocal(extension.path, false)
@@ -450,7 +450,7 @@ function translatePackageJSON(packageJSON, packageNLSPath) {
const CharCode_PC = '%'.charCodeAt(0);
const packageNls = JSON.parse(fs.readFileSync(packageNLSPath).toString());
const translate = (obj) => {
for (let key in obj) {
for (const key in obj) {
const val = obj[key];
if (Array.isArray(val)) {
val.forEach(translate);
@@ -477,6 +477,7 @@ const esbuildMediaScripts = [
'markdown-language-features/esbuild-preview.js',
'markdown-math/esbuild.js',
'notebook-renderers/esbuild.js',
'ipynb/esbuild.js',
'simple-browser/esbuild-preview.js',
];
async function webpackExtensions(taskName, isWatch, webpackConfigLocations) {

View File

@@ -9,7 +9,7 @@ import * as cp from 'child_process';
import * as glob from 'glob';
import * as gulp from 'gulp';
import * as path from 'path';
import * as through2 from 'through2';
import * as through2 from 'through2'
import got from 'got';
import { Stream } from 'stream';
import * as File from 'vinyl';
@@ -285,6 +285,7 @@ const excludedExtensions = [
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
'vscode-custom-editor-tests',
'vscode-notebook-tests',
'integration-tests', // {{SQL CARBON EDIT}}
];
@@ -476,7 +477,7 @@ export function scanBuiltinExtensions(extensionsRoot: string, exclude: string[]
if (!fs.existsSync(packageJSONPath)) {
continue;
}
let packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString('utf8'));
const packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString('utf8'));
if (!isWebExtension(packageJSON)) {
continue;
}
@@ -549,7 +550,7 @@ export function translatePackageJSON(packageJSON: string, packageNLSPath: string
const CharCode_PC = '%'.charCodeAt(0);
const packageNls: NLSFormat = JSON.parse(fs.readFileSync(packageNLSPath).toString());
const translate = (obj: any) => {
for (let key in obj) {
for (const key in obj) {
const val = obj[key];
if (Array.isArray(val)) {
val.forEach(translate);
@@ -575,6 +576,7 @@ const esbuildMediaScripts = [
'markdown-language-features/esbuild-preview.js',
'markdown-math/esbuild.js',
'notebook-renderers/esbuild.js',
'ipynb/esbuild.js',
'simple-browser/esbuild-preview.js',
];
@@ -588,7 +590,7 @@ export async function webpackExtensions(taskName: string, isWatch: boolean, webp
function addConfig(configOrFn: webpack.Configuration | Function) {
let config;
if (typeof configOrFn === 'function') {
config = configOrFn({}, {});
config = (configOrFn as Function)({}, {});
webpackConfigs.push(config);
} else {
config = configOrFn;

View File

@@ -1,10 +1,10 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.getVersion = void 0;
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.getVersion = void 0;
const path = require("path");
const fs = require("fs");
/**
@@ -45,7 +45,7 @@ function getVersion(repo) {
}
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
let refsMatch;
let refs = {};
const refs = {};
while (refsMatch = refsRegex.exec(refsRaw)) {
refs[refsMatch[2]] = refsMatch[1];
}

View File

@@ -2,8 +2,6 @@
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as path from 'path';
import * as fs from 'fs';
@@ -51,7 +49,7 @@ export function getVersion(repo: string): string | undefined {
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
let refsMatch: RegExpExecArray | null;
let refs: { [ref: string]: string } = {};
const refs: { [ref: string]: string } = {};
while (refsMatch = refsRegex.exec(refsRaw)) {
refs[refsMatch[2]] = refsMatch[1];

View File

@@ -46,7 +46,7 @@ exports.externalExtensionsWithTranslations = {
var LocalizeInfo;
(function (LocalizeInfo) {
function is(value) {
let candidate = value;
const candidate = value;
return Is.defined(candidate) && Is.string(candidate.key) && (Is.undef(candidate.comment) || (Is.array(candidate.comment) && candidate.comment.every(element => Is.string(element))));
}
LocalizeInfo.is = is;
@@ -57,8 +57,8 @@ var BundledFormat;
if (Is.undef(value)) {
return false;
}
let candidate = value;
let length = Object.keys(value).length;
const candidate = value;
const length = Object.keys(value).length;
return length === 3 && Is.defined(candidate.keys) && Is.defined(candidate.messages) && Is.defined(candidate.bundles);
}
BundledFormat.is = is;
@@ -70,7 +70,7 @@ var PackageJsonFormat;
return false;
}
return Object.keys(value).every(key => {
let element = value[key];
const element = value[key];
return Is.string(element) || (Is.object(element) && Is.defined(element.message) && Is.defined(element.comment));
});
}
@@ -133,9 +133,9 @@ class XLF {
}
this.numberOfMessages += keys.length;
this.files[original] = [];
let existingKeys = new Set();
const existingKeys = new Set();
for (let i = 0; i < keys.length; i++) {
let key = keys[i];
const key = keys[i];
let realKey;
let comment;
if (Is.string(key)) {
@@ -152,7 +152,7 @@ class XLF {
continue;
}
existingKeys.add(realKey);
let message = encodeEntities(messages[i]);
const message = encodeEntities(messages[i]);
this.files[original].push({ id: realKey, message: message, comment: comment });
}
}
@@ -178,7 +178,7 @@ class XLF {
this.appendNewLine('</xliff>', 0);
}
appendNewLine(content, indent) {
let line = new Line(indent);
const line = new Line(indent);
line.append(content);
this.buffer.push(line.toString());
}
@@ -186,8 +186,8 @@ class XLF {
exports.XLF = XLF;
XLF.parsePseudo = function (xlfString) {
return new Promise((resolve) => {
let parser = new xml2js.Parser();
let files = [];
const parser = new xml2js.Parser();
const files = [];
parser.parseString(xlfString, function (_err, result) {
const fileNodes = result['xliff']['file'];
fileNodes.forEach(file => {
@@ -211,8 +211,8 @@ XLF.parsePseudo = function (xlfString) {
};
XLF.parse = function (xlfString) {
return new Promise((resolve, reject) => {
let parser = new xml2js.Parser();
let files = [];
const parser = new xml2js.Parser();
const files = [];
parser.parseString(xlfString, function (err, result) {
if (err) {
reject(new Error(`XLF parsing error: Failed to parse XLIFF string. ${err}`));
@@ -226,7 +226,7 @@ XLF.parse = function (xlfString) {
if (!originalFilePath) {
reject(new Error(`XLF parsing error: XLIFF file node does not contain original attribute to determine the original location of the resource file.`));
}
let language = file.$['target-language'];
const language = file.$['target-language'];
if (!language) {
reject(new Error(`XLF parsing error: XLIFF file node does not contain target-language attribute to determine translated language.`));
}
@@ -295,9 +295,10 @@ function stripComments(content) {
// Second group matches a single quoted string
// Third group matches a multi line comment
// Forth group matches a single line comment
const regexp = /("[^"\\]*(?:\\.[^"\\]*)*")|('[^'\\]*(?:\\.[^'\\]*)*')|(\/\*[^\/\*]*(?:(?:\*|\/)[^\/\*]*)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g;
let result = content.replace(regexp, (match, _m1, _m2, m3, m4) => {
// Only one of m1, m2, m3, m4 matches
// Fifth group matches a trailing comma
const regexp = /("[^"\\]*(?:\\.[^"\\]*)*")|('[^'\\]*(?:\\.[^'\\]*)*')|(\/\*[^\/\*]*(?:(?:\*|\/)[^\/\*]*)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))|(,\s*[}\]])/g;
const result = content.replace(regexp, (match, _m1, _m2, m3, m4, m5) => {
// Only one of m1, m2, m3, m4, m5 matches
if (m3) {
// A block comment. Replace with nothing
return '';
@@ -313,6 +314,10 @@ function stripComments(content) {
return '';
}
}
else if (m5) {
// Remove the trailing comma
return match.substring(1);
}
else {
// We match a string
return match;
@@ -356,20 +361,20 @@ function escapeCharacters(value) {
return result.join('');
}
function processCoreBundleFormat(fileHeader, languages, json, emitter) {
let keysSection = json.keys;
let messageSection = json.messages;
let bundleSection = json.bundles;
let statistics = Object.create(null);
let defaultMessages = Object.create(null);
let modules = Object.keys(keysSection);
const keysSection = json.keys;
const messageSection = json.messages;
const bundleSection = json.bundles;
const statistics = Object.create(null);
const defaultMessages = Object.create(null);
const modules = Object.keys(keysSection);
modules.forEach((module) => {
let keys = keysSection[module];
let messages = messageSection[module];
const keys = keysSection[module];
const messages = messageSection[module];
if (!messages || keys.length !== messages.length) {
emitter.emit('error', `Message for module ${module} corrupted. Mismatch in number of keys and messages.`);
return;
}
let messageMap = Object.create(null);
const messageMap = Object.create(null);
defaultMessages[module] = messageMap;
keys.map((key, i) => {
if (typeof key === 'string') {
@@ -380,27 +385,27 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
}
});
});
let languageDirectory = path.join(__dirname, '..', '..', '..', 'vscode-loc', 'i18n');
const languageDirectory = path.join(__dirname, '..', '..', '..', 'vscode-loc', 'i18n');
if (!fs.existsSync(languageDirectory)) {
log(`No VS Code localization repository found. Looking at ${languageDirectory}`);
log(`To bundle translations please check out the vscode-loc repository as a sibling of the vscode repository.`);
}
let sortedLanguages = sortLanguages(languages);
const sortedLanguages = sortLanguages(languages);
sortedLanguages.forEach((language) => {
if (process.env['VSCODE_BUILD_VERBOSE']) {
log(`Generating nls bundles for: ${language.id}`);
}
statistics[language.id] = 0;
let localizedModules = Object.create(null);
let languageFolderName = language.translationId || language.id;
let i18nFile = path.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json');
const localizedModules = Object.create(null);
const languageFolderName = language.translationId || language.id;
const i18nFile = path.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json');
let allMessages;
if (fs.existsSync(i18nFile)) {
let content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
const content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
allMessages = JSON.parse(content);
}
modules.forEach((module) => {
let order = keysSection[module];
const order = keysSection[module];
let moduleMessage;
if (allMessages) {
moduleMessage = allMessages.contents[module];
@@ -412,7 +417,7 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
moduleMessage = defaultMessages[module];
statistics[language.id] = statistics[language.id] + Object.keys(moduleMessage).length;
}
let localizedMessages = [];
const localizedMessages = [];
order.forEach((keyInfo) => {
let key = null;
if (typeof keyInfo === 'string') {
@@ -434,14 +439,14 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
localizedModules[module] = localizedMessages;
});
Object.keys(bundleSection).forEach((bundle) => {
let modules = bundleSection[bundle];
let contents = [
const modules = bundleSection[bundle];
const contents = [
fileHeader,
`define("${bundle}.nls.${language.id}", {`
];
modules.forEach((module, index) => {
contents.push(`\t"${module}": [`);
let messages = localizedModules[module];
const messages = localizedModules[module];
if (!messages) {
emitter.emit('error', `Didn't find messages for module ${module}.`);
return;
@@ -456,11 +461,11 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
});
});
Object.keys(statistics).forEach(key => {
let value = statistics[key];
const value = statistics[key];
log(`${key} has ${value} untranslated strings.`);
});
sortedLanguages.forEach(language => {
let stats = statistics[language.id];
const stats = statistics[language.id];
if (Is.undef(stats)) {
log(`\tNo translations found for language ${language.id}. Using default language instead.`);
}
@@ -468,7 +473,7 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
}
function processNlsFiles(opts) {
return (0, event_stream_1.through)(function (file) {
let fileName = path.basename(file.path);
const fileName = path.basename(file.path);
if (fileName === 'nls.metadata.json') {
let json = null;
if (file.isBuffer()) {
@@ -554,7 +559,7 @@ function createXlfFilesForCoreBundle() {
xlf.addFile(`src/${coreModule}`, keys, messages);
}
}
for (let resource in xlfs) {
for (const resource in xlfs) {
const xlf = xlfs[resource];
const filePath = `${xlf.project}/${resource.replace(/\//g, '_')}.xlf`;
const xlfFile = new File({
@@ -586,7 +591,7 @@ function createXlfFilesForExtensions() {
if (!stat.isDirectory()) {
return;
}
let extensionName = path.basename(extensionFolder.path);
const extensionName = path.basename(extensionFolder.path);
if (extensionName === 'node_modules') {
return;
}
@@ -604,17 +609,24 @@ function createXlfFilesForExtensions() {
const basename = path.basename(file.path);
if (basename === 'package.nls.json') {
const json = JSON.parse(buffer.toString('utf8'));
const keys = Object.keys(json);
const messages = keys.map((key) => {
const keys = [];
const messages = [];
Object.keys(json).forEach((key) => {
const value = json[key];
if (Is.string(value)) {
return value;
keys.push(key);
messages.push(value);
}
else if (value) {
return value.message;
keys.push({
key,
comment: value.comment
});
messages.push(value.message);
}
else {
return `Unknown message for key: ${key}`;
keys.push(key);
messages.push(`Unknown message for key: ${key}`);
}
});
getXlf().addFile(`extensions/${extensionName}/package`, keys, messages);
@@ -622,7 +634,7 @@ function createXlfFilesForExtensions() {
else if (basename === 'nls.metadata.json') {
const json = JSON.parse(buffer.toString('utf8'));
const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path));
for (let file in json) {
for (const file in json) {
const fileContent = json[file];
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
}
@@ -634,7 +646,7 @@ function createXlfFilesForExtensions() {
}
}, function () {
if (_xlf) {
let xlfFile = new File({
const xlfFile = new File({
path: path.join(extensionsProject, extensionName + '.xlf'),
contents: Buffer.from(_xlf.toString(), 'utf8')
});
@@ -666,14 +678,14 @@ function createXlfFilesForIsl() {
else {
throw new Error(`Unknown input file ${file.path}`);
}
let xlf = new XLF(projectName), keys = [], messages = [];
let model = new TextModel(file.contents.toString());
const xlf = new XLF(projectName), keys = [], messages = [];
const model = new TextModel(file.contents.toString());
let inMessageSection = false;
model.lines.forEach(line => {
if (line.length === 0) {
return;
}
let firstChar = line.charAt(0);
const firstChar = line.charAt(0);
switch (firstChar) {
case ';':
// Comment line;
@@ -685,13 +697,13 @@ function createXlfFilesForIsl() {
if (!inMessageSection) {
return;
}
let sections = line.split('=');
const sections = line.split('=');
if (sections.length !== 2) {
throw new Error(`Badly formatted message found: ${line}`);
}
else {
let key = sections[0];
let value = sections[1];
const key = sections[0];
const value = sections[1];
if (key.length > 0 && value.length > 0) {
keys.push(key);
messages.push(value);
@@ -708,8 +720,8 @@ function createXlfFilesForIsl() {
}
exports.createXlfFilesForIsl = createXlfFilesForIsl;
function pushXlfFiles(apiHostname, username, password) {
let tryGetPromises = [];
let updateCreatePromises = [];
const tryGetPromises = [];
const updateCreatePromises = [];
return (0, event_stream_1.through)(function (file) {
const project = path.dirname(file.relative);
const fileName = path.basename(file.path);
@@ -747,11 +759,11 @@ function getAllResources(project, apiHostname, username, password) {
method: 'GET'
};
const request = https.request(options, (res) => {
let buffer = [];
const buffer = [];
res.on('data', (chunk) => buffer.push(chunk));
res.on('end', () => {
if (res.statusCode === 200) {
let json = JSON.parse(Buffer.concat(buffer).toString());
const json = JSON.parse(Buffer.concat(buffer).toString());
if (Array.isArray(json)) {
resolve(json.map(o => o.slug));
return;
@@ -770,7 +782,7 @@ function getAllResources(project, apiHostname, username, password) {
});
}
function findObsoleteResources(apiHostname, username, password) {
let resourcesByProject = Object.create(null);
const resourcesByProject = Object.create(null);
resourcesByProject[extensionsProject] = [].concat(exports.externalExtensionsWithTranslations); // clone
return (0, event_stream_1.through)(function (file) {
const project = path.dirname(file.relative);
@@ -784,10 +796,10 @@ function findObsoleteResources(apiHostname, username, password) {
this.push(file);
}, function () {
const json = JSON.parse(fs.readFileSync('./build/lib/i18n.resources.json', 'utf8'));
let i18Resources = [...json.editor, ...json.workbench].map((r) => r.project + '/' + r.name.replace(/\//g, '_'));
let extractedResources = [];
for (let project of [workbenchProject, editorProject]) {
for (let resource of resourcesByProject[project]) {
const i18Resources = [...json.editor, ...json.workbench].map((r) => r.project + '/' + r.name.replace(/\//g, '_'));
const extractedResources = [];
for (const project of [workbenchProject, editorProject]) {
for (const resource of resourcesByProject[project]) {
if (resource !== 'setup_messages') {
extractedResources.push(project + '/' + resource);
}
@@ -797,11 +809,11 @@ function findObsoleteResources(apiHostname, username, password) {
console.log(`[i18n] Obsolete resources in file 'build/lib/i18n.resources.json': JSON.stringify(${i18Resources.filter(p => extractedResources.indexOf(p) === -1)})`);
console.log(`[i18n] Missing resources in file 'build/lib/i18n.resources.json': JSON.stringify(${extractedResources.filter(p => i18Resources.indexOf(p) === -1)})`);
}
let promises = [];
for (let project in resourcesByProject) {
const promises = [];
for (const project in resourcesByProject) {
promises.push(getAllResources(project, apiHostname, username, password).then(resources => {
let expectedResources = resourcesByProject[project];
let unusedResources = resources.filter(resource => resource && expectedResources.indexOf(resource) === -1);
const expectedResources = resourcesByProject[project];
const unusedResources = resources.filter(resource => resource && expectedResources.indexOf(resource) === -1);
if (unusedResources.length) {
console.log(`[transifex] Obsolete resources in project '${project}': ${unusedResources.join(', ')}`);
}
@@ -856,7 +868,7 @@ function createResource(project, slug, xlfFile, apiHostname, credentials) {
auth: credentials,
method: 'POST'
};
let request = https.request(options, (res) => {
const request = https.request(options, (res) => {
if (res.statusCode === 201) {
log(`Resource ${project}/${slug} successfully created on Transifex.`);
}
@@ -888,7 +900,7 @@ function updateResource(project, slug, xlfFile, apiHostname, credentials) {
auth: credentials,
method: 'PUT'
};
let request = https.request(options, (res) => {
const request = https.request(options, (res) => {
if (res.statusCode === 200) {
res.setEncoding('utf8');
let responseBuffer = '';
@@ -913,7 +925,7 @@ function updateResource(project, slug, xlfFile, apiHostname, credentials) {
});
}
function pullSetupXlfFiles(apiHostname, username, password, language, includeDefault) {
let setupResources = [{ name: 'setup_messages', project: workbenchProject }];
const setupResources = [{ name: 'setup_messages', project: workbenchProject }];
if (includeDefault) {
setupResources.push({ name: 'setup_default', project: setupProject });
}
@@ -922,7 +934,7 @@ function pullSetupXlfFiles(apiHostname, username, password, language, includeDef
exports.pullSetupXlfFiles = pullSetupXlfFiles;
function pullXlfFiles(apiHostname, username, password, language, resources) {
const credentials = `${username}:${password}`;
let expectedTranslationsCount = resources.length;
const expectedTranslationsCount = resources.length;
let translationsRetrieved = 0, called = false;
return (0, event_stream_1.readable)(function (_count, callback) {
// Mark end of stream when all resources were retrieved
@@ -949,7 +961,7 @@ function retrieveResource(language, resource, apiHostname, credentials) {
return limiter.queue(() => new Promise((resolve, reject) => {
const slug = resource.name.replace(/\//g, '_');
const project = resource.project;
let transifexLanguageId = language.id === 'ps' ? 'en' : language.translationId || language.id;
const transifexLanguageId = language.id === 'ps' ? 'en' : language.translationId || language.id;
const options = {
hostname: apiHostname,
path: `/api/2/project/${project}/resource/${slug}/translation/${transifexLanguageId}?file&mode=onlyreviewed`,
@@ -958,8 +970,8 @@ function retrieveResource(language, resource, apiHostname, credentials) {
method: 'GET'
};
console.log('[transifex] Fetching ' + options.path);
let request = https.request(options, (res) => {
let xlfBuffer = [];
const request = https.request(options, (res) => {
const xlfBuffer = [];
res.on('data', (chunk) => xlfBuffer.push(chunk));
res.on('end', () => {
if (res.statusCode === 200) {
@@ -981,14 +993,14 @@ function retrieveResource(language, resource, apiHostname, credentials) {
}));
}
function prepareI18nFiles() {
let parsePromises = [];
const parsePromises = [];
return (0, event_stream_1.through)(function (xlf) {
let stream = this;
let parsePromise = XLF.parse(xlf.contents.toString());
const stream = this;
const parsePromise = XLF.parse(xlf.contents.toString());
parsePromises.push(parsePromise);
parsePromise.then(resolvedFiles => {
resolvedFiles.forEach(file => {
let translatedFile = createI18nFile(file.originalFilePath, file.messages);
const translatedFile = createI18nFile(file.originalFilePath, file.messages);
stream.queue(translatedFile);
});
});
@@ -1000,7 +1012,7 @@ function prepareI18nFiles() {
}
exports.prepareI18nFiles = prepareI18nFiles;
function createI18nFile(originalFilePath, messages) {
let result = Object.create(null);
const result = Object.create(null);
result[''] = [
'--------------------------------------------------------------------------------------------',
'Copyright (c) Microsoft Corporation. All rights reserved.',
@@ -1008,7 +1020,7 @@ function createI18nFile(originalFilePath, messages) {
'--------------------------------------------------------------------------------------------',
'Do not edit this file. It is machine generated.'
];
for (let key of Object.keys(messages)) {
for (const key of Object.keys(messages)) {
result[key] = messages[key];
}
let content = JSON.stringify(result, null, '\t');
@@ -1068,7 +1080,7 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
const translatedMainFile = createI18nFile('./main', mainPack);
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
this.queue(translatedMainFile);
for (let extension in extensionsPacks) {
for (const extension in extensionsPacks) {
const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
this.queue(translatedExtFile);
const externalExtensionId = externalExtensions[extension];
@@ -1088,14 +1100,14 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
}
exports.prepareI18nPackFiles = prepareI18nPackFiles;
function prepareIslFiles(language, innoSetupConfig) {
let parsePromises = [];
const parsePromises = [];
return (0, event_stream_1.through)(function (xlf) {
let stream = this;
let parsePromise = XLF.parse(xlf.contents.toString());
const stream = this;
const parsePromise = XLF.parse(xlf.contents.toString());
parsePromises.push(parsePromise);
parsePromise.then(resolvedFiles => {
resolvedFiles.forEach(file => {
let translatedFile = createIslFile(file.originalFilePath, file.messages, language, innoSetupConfig);
const translatedFile = createIslFile(file.originalFilePath, file.messages, language, innoSetupConfig);
stream.queue(translatedFile);
});
}).catch(reason => {
@@ -1111,7 +1123,7 @@ function prepareIslFiles(language, innoSetupConfig) {
}
exports.prepareIslFiles = prepareIslFiles;
function createIslFile(originalFilePath, messages, language, innoSetup) {
let content = [];
const content = [];
let originalContent;
if (path.basename(originalFilePath) === 'Default') {
originalContent = new TextModel(fs.readFileSync(originalFilePath + '.isl', 'utf8'));
@@ -1121,16 +1133,16 @@ function createIslFile(originalFilePath, messages, language, innoSetup) {
}
originalContent.lines.forEach(line => {
if (line.length > 0) {
let firstChar = line.charAt(0);
const firstChar = line.charAt(0);
if (firstChar === '[' || firstChar === ';') {
content.push(line);
}
else {
let sections = line.split('=');
let key = sections[0];
const sections = line.split('=');
const key = sections[0];
let translated = line;
if (key) {
let translatedMessage = messages[key];
const translatedMessage = messages[key];
if (translatedMessage) {
translated = `${key}=${translatedMessage}`;
}
@@ -1148,9 +1160,9 @@ function createIslFile(originalFilePath, messages, language, innoSetup) {
});
}
function encodeEntities(value) {
let result = [];
const result = [];
for (let i = 0; i < value.length; i++) {
let ch = value[i];
const ch = value[i];
switch (ch) {
case '<':
result.push('&lt;');

View File

@@ -119,7 +119,11 @@
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/localizations",
"name": "vs/workbench/contrib/mergeEditor",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/localization",
"project": "vscode-workbench"
},
{
@@ -274,6 +278,10 @@
"name": "vs/workbench/contrib/userDataSync",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/editSessions",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/views",
"project": "vscode-workbench"
@@ -286,6 +294,14 @@
"name": "vs/workbench/contrib/audioCues",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/deprecatedExtensionMigrator",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/bracketPairColorizer2Telemetry",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/offline",
"project": "vscode-workbench"
@@ -422,6 +438,10 @@
"name": "vs/workbench/services/userDataSync",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/editSessions",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/views",
"project": "vscode-workbench"
@@ -451,11 +471,11 @@
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/profiles",
"name": "vs/workbench/contrib/userDataProfile",
"project": "vscode-profiles"
},
{
"name": "vs/workbench/services/profiles",
"name": "vs/workbench/services/userDataProfile",
"project": "vscode-profiles"
}
]

View File

@@ -87,7 +87,7 @@ interface LocalizeInfo {
module LocalizeInfo {
export function is(value: any): value is LocalizeInfo {
let candidate = value as LocalizeInfo;
const candidate = value as LocalizeInfo;
return Is.defined(candidate) && Is.string(candidate.key) && (Is.undef(candidate.comment) || (Is.array(candidate.comment) && candidate.comment.every(element => Is.string(element))));
}
}
@@ -104,8 +104,8 @@ module BundledFormat {
return false;
}
let candidate = value as BundledFormat;
let length = Object.keys(value).length;
const candidate = value as BundledFormat;
const length = Object.keys(value).length;
return length === 3 && Is.defined(candidate.keys) && Is.defined(candidate.messages) && Is.defined(candidate.bundles);
}
@@ -126,7 +126,7 @@ module PackageJsonFormat {
return false;
}
return Object.keys(value).every(key => {
let element = value[key];
const element = value[key];
return Is.string(element) || (Is.object(element) && Is.defined(element.message) && Is.defined(element.comment));
});
}
@@ -218,9 +218,9 @@ export class XLF {
}
this.numberOfMessages += keys.length;
this.files[original] = [];
let existingKeys = new Set<string>();
const existingKeys = new Set<string>();
for (let i = 0; i < keys.length; i++) {
let key = keys[i];
const key = keys[i];
let realKey: string | undefined;
let comment: string | undefined;
if (Is.string(key)) {
@@ -236,7 +236,7 @@ export class XLF {
continue;
}
existingKeys.add(realKey);
let message: string = encodeEntities(messages[i]);
const message: string = encodeEntities(messages[i]);
this.files[original].push({ id: realKey, message: message, comment: comment });
}
}
@@ -269,15 +269,15 @@ export class XLF {
}
private appendNewLine(content: string, indent?: number): void {
let line = new Line(indent);
const line = new Line(indent);
line.append(content);
this.buffer.push(line.toString());
}
static parsePseudo = function (xlfString: string): Promise<ParsedXLF[]> {
return new Promise((resolve) => {
let parser = new xml2js.Parser();
let files: { messages: Map<string>; originalFilePath: string; language: string }[] = [];
const parser = new xml2js.Parser();
const files: { messages: Map<string>; originalFilePath: string; language: string }[] = [];
parser.parseString(xlfString, function (_err: any, result: any) {
const fileNodes: any[] = result['xliff']['file'];
fileNodes.forEach(file => {
@@ -302,9 +302,9 @@ export class XLF {
static parse = function (xlfString: string): Promise<ParsedXLF[]> {
return new Promise((resolve, reject) => {
let parser = new xml2js.Parser();
const parser = new xml2js.Parser();
let files: { messages: Map<string>; originalFilePath: string; language: string }[] = [];
const files: { messages: Map<string>; originalFilePath: string; language: string }[] = [];
parser.parseString(xlfString, function (err: any, result: any) {
if (err) {
@@ -321,7 +321,7 @@ export class XLF {
if (!originalFilePath) {
reject(new Error(`XLF parsing error: XLIFF file node does not contain original attribute to determine the original location of the resource file.`));
}
let language = file.$['target-language'];
const language = file.$['target-language'];
if (!language) {
reject(new Error(`XLF parsing error: XLIFF file node does not contain target-language attribute to determine translated language.`));
}
@@ -412,9 +412,10 @@ function stripComments(content: string): string {
// Second group matches a single quoted string
// Third group matches a multi line comment
// Forth group matches a single line comment
const regexp = /("[^"\\]*(?:\\.[^"\\]*)*")|('[^'\\]*(?:\\.[^'\\]*)*')|(\/\*[^\/\*]*(?:(?:\*|\/)[^\/\*]*)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g;
let result = content.replace(regexp, (match, _m1: string, _m2: string, m3: string, m4: string) => {
// Only one of m1, m2, m3, m4 matches
// Fifth group matches a trailing comma
const regexp = /("[^"\\]*(?:\\.[^"\\]*)*")|('[^'\\]*(?:\\.[^'\\]*)*')|(\/\*[^\/\*]*(?:(?:\*|\/)[^\/\*]*)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))|(,\s*[}\]])/g;
const result = content.replace(regexp, (match, _m1: string, _m2: string, m3: string, m4: string, m5: string) => {
// Only one of m1, m2, m3, m4, m5 matches
if (m3) {
// A block comment. Replace with nothing
return '';
@@ -427,6 +428,9 @@ function stripComments(content: string): string {
} else {
return '';
}
} else if (m5) {
// Remove the trailing comma
return match.substring(1);
} else {
// We match a string
return match;
@@ -472,22 +476,22 @@ function escapeCharacters(value: string): string {
}
function processCoreBundleFormat(fileHeader: string, languages: Language[], json: BundledFormat, emitter: ThroughStream) {
let keysSection = json.keys;
let messageSection = json.messages;
let bundleSection = json.bundles;
const keysSection = json.keys;
const messageSection = json.messages;
const bundleSection = json.bundles;
let statistics: Map<number> = Object.create(null);
const statistics: Map<number> = Object.create(null);
let defaultMessages: Map<Map<string>> = Object.create(null);
let modules = Object.keys(keysSection);
const defaultMessages: Map<Map<string>> = Object.create(null);
const modules = Object.keys(keysSection);
modules.forEach((module) => {
let keys = keysSection[module];
let messages = messageSection[module];
const keys = keysSection[module];
const messages = messageSection[module];
if (!messages || keys.length !== messages.length) {
emitter.emit('error', `Message for module ${module} corrupted. Mismatch in number of keys and messages.`);
return;
}
let messageMap: Map<string> = Object.create(null);
const messageMap: Map<string> = Object.create(null);
defaultMessages[module] = messageMap;
keys.map((key, i) => {
if (typeof key === 'string') {
@@ -498,28 +502,28 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
});
});
let languageDirectory = path.join(__dirname, '..', '..', '..', 'vscode-loc', 'i18n');
const languageDirectory = path.join(__dirname, '..', '..', '..', 'vscode-loc', 'i18n');
if (!fs.existsSync(languageDirectory)) {
log(`No VS Code localization repository found. Looking at ${languageDirectory}`);
log(`To bundle translations please check out the vscode-loc repository as a sibling of the vscode repository.`);
}
let sortedLanguages = sortLanguages(languages);
const sortedLanguages = sortLanguages(languages);
sortedLanguages.forEach((language) => {
if (process.env['VSCODE_BUILD_VERBOSE']) {
log(`Generating nls bundles for: ${language.id}`);
}
statistics[language.id] = 0;
let localizedModules: Map<string[]> = Object.create(null);
let languageFolderName = language.translationId || language.id;
let i18nFile = path.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json');
const localizedModules: Map<string[]> = Object.create(null);
const languageFolderName = language.translationId || language.id;
const i18nFile = path.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json');
let allMessages: I18nFormat | undefined;
if (fs.existsSync(i18nFile)) {
let content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
const content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
allMessages = JSON.parse(content);
}
modules.forEach((module) => {
let order = keysSection[module];
const order = keysSection[module];
let moduleMessage: { [messageKey: string]: string } | undefined;
if (allMessages) {
moduleMessage = allMessages.contents[module];
@@ -531,7 +535,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
moduleMessage = defaultMessages[module];
statistics[language.id] = statistics[language.id] + Object.keys(moduleMessage).length;
}
let localizedMessages: string[] = [];
const localizedMessages: string[] = [];
order.forEach((keyInfo) => {
let key: string | null = null;
if (typeof keyInfo === 'string') {
@@ -552,14 +556,14 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
localizedModules[module] = localizedMessages;
});
Object.keys(bundleSection).forEach((bundle) => {
let modules = bundleSection[bundle];
let contents: string[] = [
const modules = bundleSection[bundle];
const contents: string[] = [
fileHeader,
`define("${bundle}.nls.${language.id}", {`
];
modules.forEach((module, index) => {
contents.push(`\t"${module}": [`);
let messages = localizedModules[module];
const messages = localizedModules[module];
if (!messages) {
emitter.emit('error', `Didn't find messages for module ${module}.`);
return;
@@ -574,11 +578,11 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
});
});
Object.keys(statistics).forEach(key => {
let value = statistics[key];
const value = statistics[key];
log(`${key} has ${value} untranslated strings.`);
});
sortedLanguages.forEach(language => {
let stats = statistics[language.id];
const stats = statistics[language.id];
if (Is.undef(stats)) {
log(`\tNo translations found for language ${language.id}. Using default language instead.`);
}
@@ -587,7 +591,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
export function processNlsFiles(opts: { fileHeader: string; languages: Language[] }): ThroughStream {
return through(function (this: ThroughStream, file: File) {
let fileName = path.basename(file.path);
const fileName = path.basename(file.path);
if (fileName === 'nls.metadata.json') {
let json = null;
if (file.isBuffer()) {
@@ -674,7 +678,7 @@ export function createXlfFilesForCoreBundle(): ThroughStream {
xlf.addFile(`src/${coreModule}`, keys, messages);
}
}
for (let resource in xlfs) {
for (const resource in xlfs) {
const xlf = xlfs[resource];
const filePath = `${xlf.project}/${resource.replace(/\//g, '_')}.xlf`;
const xlfFile = new File({
@@ -704,7 +708,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
if (!stat.isDirectory()) {
return;
}
let extensionName = path.basename(extensionFolder.path);
const extensionName = path.basename(extensionFolder.path);
if (extensionName === 'node_modules') {
return;
}
@@ -722,22 +726,29 @@ export function createXlfFilesForExtensions(): ThroughStream {
const basename = path.basename(file.path);
if (basename === 'package.nls.json') {
const json: PackageJsonFormat = JSON.parse(buffer.toString('utf8'));
const keys = Object.keys(json);
const messages = keys.map((key) => {
const keys: Array<string | LocalizeInfo> = [];
const messages: string[] = [];
Object.keys(json).forEach((key) => {
const value = json[key];
if (Is.string(value)) {
return value;
keys.push(key);
messages.push(value);
} else if (value) {
return value.message;
keys.push({
key,
comment: value.comment
});
messages.push(value.message);
} else {
return `Unknown message for key: ${key}`;
keys.push(key);
messages.push(`Unknown message for key: ${key}`);
}
});
getXlf().addFile(`extensions/${extensionName}/package`, keys, messages);
} else if (basename === 'nls.metadata.json') {
const json: BundledExtensionFormat = JSON.parse(buffer.toString('utf8'));
const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path));
for (let file in json) {
for (const file in json) {
const fileContent = json[file];
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
}
@@ -748,7 +759,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
}
}, function () {
if (_xlf) {
let xlfFile = new File({
const xlfFile = new File({
path: path.join(extensionsProject, extensionName + '.xlf'),
contents: Buffer.from(_xlf.toString(), 'utf8')
});
@@ -781,17 +792,17 @@ export function createXlfFilesForIsl(): ThroughStream {
throw new Error(`Unknown input file ${file.path}`);
}
let xlf = new XLF(projectName),
const xlf = new XLF(projectName),
keys: string[] = [],
messages: string[] = [];
let model = new TextModel(file.contents.toString());
const model = new TextModel(file.contents.toString());
let inMessageSection = false;
model.lines.forEach(line => {
if (line.length === 0) {
return;
}
let firstChar = line.charAt(0);
const firstChar = line.charAt(0);
switch (firstChar) {
case ';':
// Comment line;
@@ -803,12 +814,12 @@ export function createXlfFilesForIsl(): ThroughStream {
if (!inMessageSection) {
return;
}
let sections: string[] = line.split('=');
const sections: string[] = line.split('=');
if (sections.length !== 2) {
throw new Error(`Badly formatted message found: ${line}`);
} else {
let key = sections[0];
let value = sections[1];
const key = sections[0];
const value = sections[1];
if (key.length > 0 && value.length > 0) {
keys.push(key);
messages.push(value);
@@ -827,8 +838,8 @@ export function createXlfFilesForIsl(): ThroughStream {
}
export function pushXlfFiles(apiHostname: string, username: string, password: string): ThroughStream {
let tryGetPromises: Array<Promise<boolean>> = [];
let updateCreatePromises: Array<Promise<boolean>> = [];
const tryGetPromises: Array<Promise<boolean>> = [];
const updateCreatePromises: Array<Promise<boolean>> = [];
return through(function (this: ThroughStream, file: File) {
const project = path.dirname(file.relative);
@@ -869,11 +880,11 @@ function getAllResources(project: string, apiHostname: string, username: string,
};
const request = https.request(options, (res) => {
let buffer: Buffer[] = [];
const buffer: Buffer[] = [];
res.on('data', (chunk: Buffer) => buffer.push(chunk));
res.on('end', () => {
if (res.statusCode === 200) {
let json = JSON.parse(Buffer.concat(buffer).toString());
const json = JSON.parse(Buffer.concat(buffer).toString());
if (Array.isArray(json)) {
resolve(json.map(o => o.slug));
return;
@@ -892,7 +903,7 @@ function getAllResources(project: string, apiHostname: string, username: string,
}
export function findObsoleteResources(apiHostname: string, username: string, password: string): ThroughStream {
let resourcesByProject: Map<string[]> = Object.create(null);
const resourcesByProject: Map<string[]> = Object.create(null);
resourcesByProject[extensionsProject] = ([] as any[]).concat(externalExtensionsWithTranslations); // clone
return through(function (this: ThroughStream, file: File) {
@@ -909,10 +920,10 @@ export function findObsoleteResources(apiHostname: string, username: string, pas
}, function () {
const json = JSON.parse(fs.readFileSync('./build/lib/i18n.resources.json', 'utf8'));
let i18Resources = [...json.editor, ...json.workbench].map((r: Resource) => r.project + '/' + r.name.replace(/\//g, '_'));
let extractedResources: string[] = [];
for (let project of [workbenchProject, editorProject]) {
for (let resource of resourcesByProject[project]) {
const i18Resources = [...json.editor, ...json.workbench].map((r: Resource) => r.project + '/' + r.name.replace(/\//g, '_'));
const extractedResources: string[] = [];
for (const project of [workbenchProject, editorProject]) {
for (const resource of resourcesByProject[project]) {
if (resource !== 'setup_messages') {
extractedResources.push(project + '/' + resource);
}
@@ -923,12 +934,12 @@ export function findObsoleteResources(apiHostname: string, username: string, pas
console.log(`[i18n] Missing resources in file 'build/lib/i18n.resources.json': JSON.stringify(${extractedResources.filter(p => i18Resources.indexOf(p) === -1)})`);
}
let promises: Array<Promise<void>> = [];
for (let project in resourcesByProject) {
const promises: Array<Promise<void>> = [];
for (const project in resourcesByProject) {
promises.push(
getAllResources(project, apiHostname, username, password).then(resources => {
let expectedResources = resourcesByProject[project];
let unusedResources = resources.filter(resource => resource && expectedResources.indexOf(resource) === -1);
const expectedResources = resourcesByProject[project];
const unusedResources = resources.filter(resource => resource && expectedResources.indexOf(resource) === -1);
if (unusedResources.length) {
console.log(`[transifex] Obsolete resources in project '${project}': ${unusedResources.join(', ')}`);
}
@@ -986,7 +997,7 @@ function createResource(project: string, slug: string, xlfFile: File, apiHostnam
method: 'POST'
};
let request = https.request(options, (res) => {
const request = https.request(options, (res) => {
if (res.statusCode === 201) {
log(`Resource ${project}/${slug} successfully created on Transifex.`);
} else {
@@ -1020,7 +1031,7 @@ function updateResource(project: string, slug: string, xlfFile: File, apiHostnam
method: 'PUT'
};
let request = https.request(options, (res) => {
const request = https.request(options, (res) => {
if (res.statusCode === 200) {
res.setEncoding('utf8');
@@ -1047,7 +1058,7 @@ function updateResource(project: string, slug: string, xlfFile: File, apiHostnam
}
export function pullSetupXlfFiles(apiHostname: string, username: string, password: string, language: Language, includeDefault: boolean): NodeJS.ReadableStream {
let setupResources = [{ name: 'setup_messages', project: workbenchProject }];
const setupResources = [{ name: 'setup_messages', project: workbenchProject }];
if (includeDefault) {
setupResources.push({ name: 'setup_default', project: setupProject });
}
@@ -1056,7 +1067,7 @@ export function pullSetupXlfFiles(apiHostname: string, username: string, passwor
function pullXlfFiles(apiHostname: string, username: string, password: string, language: Language, resources: Resource[]): NodeJS.ReadableStream {
const credentials = `${username}:${password}`;
let expectedTranslationsCount = resources.length;
const expectedTranslationsCount = resources.length;
let translationsRetrieved = 0, called = false;
return readable(function (_count: any, callback: any) {
@@ -1087,7 +1098,7 @@ function retrieveResource(language: Language, resource: Resource, apiHostname: s
return limiter.queue(() => new Promise<File | null>((resolve, reject) => {
const slug = resource.name.replace(/\//g, '_');
const project = resource.project;
let transifexLanguageId = language.id === 'ps' ? 'en' : language.translationId || language.id;
const transifexLanguageId = language.id === 'ps' ? 'en' : language.translationId || language.id;
const options = {
hostname: apiHostname,
path: `/api/2/project/${project}/resource/${slug}/translation/${transifexLanguageId}?file&mode=onlyreviewed`,
@@ -1097,8 +1108,8 @@ function retrieveResource(language: Language, resource: Resource, apiHostname: s
};
console.log('[transifex] Fetching ' + options.path);
let request = https.request(options, (res) => {
let xlfBuffer: Buffer[] = [];
const request = https.request(options, (res) => {
const xlfBuffer: Buffer[] = [];
res.on('data', (chunk: Buffer) => xlfBuffer.push(chunk));
res.on('end', () => {
if (res.statusCode === 200) {
@@ -1119,16 +1130,16 @@ function retrieveResource(language: Language, resource: Resource, apiHostname: s
}
export function prepareI18nFiles(): ThroughStream {
let parsePromises: Promise<ParsedXLF[]>[] = [];
const parsePromises: Promise<ParsedXLF[]>[] = [];
return through(function (this: ThroughStream, xlf: File) {
let stream = this;
let parsePromise = XLF.parse(xlf.contents.toString());
const stream = this;
const parsePromise = XLF.parse(xlf.contents.toString());
parsePromises.push(parsePromise);
parsePromise.then(
resolvedFiles => {
resolvedFiles.forEach(file => {
let translatedFile = createI18nFile(file.originalFilePath, file.messages);
const translatedFile = createI18nFile(file.originalFilePath, file.messages);
stream.queue(translatedFile);
});
}
@@ -1149,7 +1160,7 @@ export function createI18nFile(originalFilePath: string, messages: any): File {
'--------------------------------------------------------------------------------------------',
'Do not edit this file. It is machine generated.'
];
for (let key of Object.keys(messages)) {
for (const key of Object.keys(messages)) {
result[key] = messages[key];
}
@@ -1178,16 +1189,16 @@ export interface TranslationPath {
}
export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingTranslationPaths: TranslationPath[], pseudo = false): NodeJS.ReadWriteStream {
let parsePromises: Promise<ParsedXLF[]>[] = [];
let mainPack: I18nPack = { version: i18nPackVersion, contents: {} };
let extensionsPacks: Map<I18nPack> = {};
let errors: any[] = [];
const parsePromises: Promise<ParsedXLF[]>[] = [];
const mainPack: I18nPack = { version: i18nPackVersion, contents: {} };
const extensionsPacks: Map<I18nPack> = {};
const errors: any[] = [];
return through(function (this: ThroughStream, xlf: File) {
let project = path.basename(path.dirname(path.dirname(xlf.relative)));
let resource = path.basename(xlf.relative, '.xlf');
let contents = xlf.contents.toString();
const project = path.basename(path.dirname(path.dirname(xlf.relative)));
const resource = path.basename(xlf.relative, '.xlf');
const contents = xlf.contents.toString();
log(`Found ${project}: ${resource}`);
let parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
const parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
parsePromises.push(parsePromise);
parsePromise.then(
resolvedFiles => {
@@ -1225,7 +1236,7 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
this.queue(translatedMainFile);
for (let extension in extensionsPacks) {
for (const extension in extensionsPacks) {
const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
this.queue(translatedExtFile);
@@ -1246,16 +1257,16 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
}
export function prepareIslFiles(language: Language, innoSetupConfig: InnoSetup): ThroughStream {
let parsePromises: Promise<ParsedXLF[]>[] = [];
const parsePromises: Promise<ParsedXLF[]>[] = [];
return through(function (this: ThroughStream, xlf: File) {
let stream = this;
let parsePromise = XLF.parse(xlf.contents.toString());
const stream = this;
const parsePromise = XLF.parse(xlf.contents.toString());
parsePromises.push(parsePromise);
parsePromise.then(
resolvedFiles => {
resolvedFiles.forEach(file => {
let translatedFile = createIslFile(file.originalFilePath, file.messages, language, innoSetupConfig);
const translatedFile = createIslFile(file.originalFilePath, file.messages, language, innoSetupConfig);
stream.queue(translatedFile);
});
}
@@ -1272,7 +1283,7 @@ export function prepareIslFiles(language: Language, innoSetupConfig: InnoSetup):
}
function createIslFile(originalFilePath: string, messages: Map<string>, language: Language, innoSetup: InnoSetup): File {
let content: string[] = [];
const content: string[] = [];
let originalContent: TextModel;
if (path.basename(originalFilePath) === 'Default') {
originalContent = new TextModel(fs.readFileSync(originalFilePath + '.isl', 'utf8'));
@@ -1281,15 +1292,15 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
}
originalContent.lines.forEach(line => {
if (line.length > 0) {
let firstChar = line.charAt(0);
const firstChar = line.charAt(0);
if (firstChar === '[' || firstChar === ';') {
content.push(line);
} else {
let sections: string[] = line.split('=');
let key = sections[0];
const sections: string[] = line.split('=');
const key = sections[0];
let translated = line;
if (key) {
let translatedMessage = messages[key];
const translatedMessage = messages[key];
if (translatedMessage) {
translated = `${key}=${translatedMessage}`;
}
@@ -1311,9 +1322,9 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
}
function encodeEntities(value: string): string {
let result: string[] = [];
const result: string[] = [];
for (let i = 0; i < value.length; i++) {
let ch = value[i];
const ch = value[i];
switch (ch) {
case '<':
result.push('&lt;');

View File

@@ -72,6 +72,11 @@ const RULES = [
target: '**/{vs,sql}/**/test/**',
skip: true // -> skip all test files
},
// TODO@bpasero remove me once electron utility process has landed
{
target: '**/vs/workbench/services/extensions/electron-sandbox/nativeLocalProcessExtensionHost.ts',
skip: true
},
// Common: vs/base/common/platform.ts
{
target: '**/{vs,sql}/base/common/platform.ts',

View File

@@ -27,7 +27,7 @@ function isDeclaration(ts, a) {
}
function visitTopLevelDeclarations(ts, sourceFile, visitor) {
let stop = false;
let visit = (node) => {
const visit = (node) => {
if (stop) {
return;
}
@@ -49,19 +49,19 @@ function visitTopLevelDeclarations(ts, sourceFile, visitor) {
visit(sourceFile);
}
function getAllTopLevelDeclarations(ts, sourceFile) {
let all = [];
const all = [];
visitTopLevelDeclarations(ts, sourceFile, (node) => {
if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) {
let interfaceDeclaration = node;
let triviaStart = interfaceDeclaration.pos;
let triviaEnd = interfaceDeclaration.name.pos;
let triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd });
const interfaceDeclaration = node;
const triviaStart = interfaceDeclaration.pos;
const triviaEnd = interfaceDeclaration.name.pos;
const triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd });
if (triviaText.indexOf('@internal') === -1) {
all.push(node);
}
}
else {
let nodeText = getNodeText(sourceFile, node);
const nodeText = getNodeText(sourceFile, node);
if (nodeText.indexOf('@internal') === -1) {
all.push(node);
}
@@ -95,7 +95,7 @@ function getNodeText(sourceFile, node) {
function hasModifier(modifiers, kind) {
if (modifiers) {
for (let i = 0; i < modifiers.length; i++) {
let mod = modifiers[i];
const mod = modifiers[i];
if (mod.kind === kind) {
return true;
}
@@ -113,14 +113,14 @@ function isDefaultExport(ts, declaration) {
function getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importName, usage, enums) {
let result = getNodeText(sourceFile, declaration);
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
let interfaceDeclaration = declaration;
const interfaceDeclaration = declaration;
const staticTypeName = (isDefaultExport(ts, interfaceDeclaration)
? `${importName}.default`
: `${importName}.${declaration.name.text}`);
let instanceTypeName = staticTypeName;
const typeParametersCnt = (interfaceDeclaration.typeParameters ? interfaceDeclaration.typeParameters.length : 0);
if (typeParametersCnt > 0) {
let arr = [];
const arr = [];
for (let i = 0; i < typeParametersCnt; i++) {
arr.push('any');
}
@@ -129,7 +129,7 @@ function getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importN
const members = interfaceDeclaration.members;
members.forEach((member) => {
try {
let memberText = getNodeText(sourceFile, member);
const memberText = getNodeText(sourceFile, member);
if (memberText.indexOf('@internal') >= 0 || memberText.indexOf('private') >= 0) {
result = result.replace(memberText, '');
}
@@ -152,7 +152,7 @@ function getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importN
result = result.replace(/export default /g, 'export ');
result = result.replace(/export declare /g, 'export ');
result = result.replace(/declare /g, '');
let lines = result.split(/\r\n|\r|\n/);
const lines = result.split(/\r\n|\r|\n/);
for (let i = 0; i < lines.length; i++) {
if (/\s*\*/.test(lines[i])) {
// very likely a comment
@@ -177,9 +177,9 @@ function format(ts, text, endl) {
return text;
}
// Parse the source text
let sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
const sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
// Get the formatting edits on the input sources
let edits = ts.formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt);
const edits = ts.formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt);
// Apply the edits on the input code
return applyEdits(text, edits);
function countParensCurly(text) {
@@ -202,7 +202,7 @@ function format(ts, text, endl) {
return r;
}
function preformat(text, endl) {
let lines = text.split(endl);
const lines = text.split(endl);
let inComment = false;
let inCommentDeltaIndent = 0;
let indent = 0;
@@ -282,9 +282,9 @@ function format(ts, text, endl) {
// Apply edits in reverse on the existing text
let result = text;
for (let i = edits.length - 1; i >= 0; i--) {
let change = edits[i];
let head = result.slice(0, change.span.start);
let tail = result.slice(change.span.start + change.span.length);
const change = edits[i];
const head = result.slice(0, change.span.start);
const tail = result.slice(change.span.start + change.span.length);
result = head + change.newText + tail;
}
return result;
@@ -300,15 +300,15 @@ function createReplacerFromDirectives(directives) {
}
function createReplacer(data) {
data = data || '';
let rawDirectives = data.split(';');
let directives = [];
const rawDirectives = data.split(';');
const directives = [];
rawDirectives.forEach((rawDirective) => {
if (rawDirective.length === 0) {
return;
}
let pieces = rawDirective.split('=>');
const pieces = rawDirective.split('=>');
let findStr = pieces[0];
let replaceStr = pieces[1];
const replaceStr = pieces[1];
findStr = findStr.replace(/[\-\\\{\}\*\+\?\|\^\$\.\,\[\]\(\)\#\s]/g, '\\$&');
findStr = '\\b' + findStr + '\\b';
directives.push([new RegExp(findStr, 'g'), replaceStr]);
@@ -317,32 +317,32 @@ function createReplacer(data) {
}
function generateDeclarationFile(ts, recipe, sourceFileGetter) {
const endl = /\r\n/.test(recipe) ? '\r\n' : '\n';
let lines = recipe.split(endl);
let result = [];
const lines = recipe.split(endl);
const result = [];
let usageCounter = 0;
let usageImports = [];
let usage = [];
const usageImports = [];
const usage = [];
let failed = false;
usage.push(`var a: any;`);
usage.push(`var b: any;`);
const generateUsageImport = (moduleId) => {
let importName = 'm' + (++usageCounter);
const importName = 'm' + (++usageCounter);
usageImports.push(`import * as ${importName} from './${moduleId.replace(/\.d\.ts$/, '')}';`);
return importName;
};
let enums = [];
const enums = [];
let version = null;
lines.forEach(line => {
if (failed) {
return;
}
let m0 = line.match(/^\/\/dtsv=(\d+)$/);
const m0 = line.match(/^\/\/dtsv=(\d+)$/);
if (m0) {
version = m0[1];
}
let m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
const m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m1) {
let moduleId = m1[1];
const moduleId = m1[1];
const sourceFile = sourceFileGetter(moduleId);
if (!sourceFile) {
logErr(`While handling ${line}`);
@@ -351,14 +351,14 @@ function generateDeclarationFile(ts, recipe, sourceFileGetter) {
return;
}
const importName = generateUsageImport(moduleId);
let replacer = createReplacer(m1[2]);
let typeNames = m1[3].split(/,/);
const replacer = createReplacer(m1[2]);
const typeNames = m1[3].split(/,/);
typeNames.forEach((typeName) => {
typeName = typeName.trim();
if (typeName.length === 0) {
return;
}
let declaration = getTopLevelDeclaration(ts, sourceFile, typeName);
const declaration = getTopLevelDeclaration(ts, sourceFile, typeName);
if (!declaration) {
logErr(`While handling ${line}`);
logErr(`Cannot find ${typeName}`);
@@ -369,9 +369,9 @@ function generateDeclarationFile(ts, recipe, sourceFileGetter) {
});
return;
}
let m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
const m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m2) {
let moduleId = m2[1];
const moduleId = m2[1];
const sourceFile = sourceFileGetter(moduleId);
if (!sourceFile) {
logErr(`While handling ${line}`);
@@ -380,10 +380,10 @@ function generateDeclarationFile(ts, recipe, sourceFileGetter) {
return;
}
const importName = generateUsageImport(moduleId);
let replacer = createReplacer(m2[2]);
let typeNames = m2[3].split(/,/);
let typesToExcludeMap = {};
let typesToExcludeArr = [];
const replacer = createReplacer(m2[2]);
const typeNames = m2[3].split(/,/);
const typesToExcludeMap = {};
const typesToExcludeArr = [];
typeNames.forEach((typeName) => {
typeName = typeName.trim();
if (typeName.length === 0) {
@@ -400,7 +400,7 @@ function generateDeclarationFile(ts, recipe, sourceFileGetter) {
}
else {
// node is ts.VariableStatement
let nodeText = getNodeText(sourceFile, declaration);
const nodeText = getNodeText(sourceFile, declaration);
for (let i = 0; i < typesToExcludeArr.length; i++) {
if (nodeText.indexOf(typesToExcludeArr[i]) >= 0) {
return;
@@ -605,7 +605,7 @@ class TypeScriptLanguageServiceHost {
}
}
function execute() {
let r = run3(new DeclarationResolver(new FSProvider()));
const r = run3(new DeclarationResolver(new FSProvider()));
if (!r) {
throw new Error(`monaco.d.ts generation error - Cannot continue`);
}

View File

@@ -40,7 +40,7 @@ function isDeclaration(ts: typeof import('typescript'), a: TSTopLevelDeclare): a
function visitTopLevelDeclarations(ts: typeof import('typescript'), sourceFile: ts.SourceFile, visitor: (node: TSTopLevelDeclare) => boolean): void {
let stop = false;
let visit = (node: ts.Node): void => {
const visit = (node: ts.Node): void => {
if (stop) {
return;
}
@@ -67,19 +67,19 @@ function visitTopLevelDeclarations(ts: typeof import('typescript'), sourceFile:
function getAllTopLevelDeclarations(ts: typeof import('typescript'), sourceFile: ts.SourceFile): TSTopLevelDeclare[] {
let all: TSTopLevelDeclare[] = [];
const all: TSTopLevelDeclare[] = [];
visitTopLevelDeclarations(ts, sourceFile, (node) => {
if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) {
let interfaceDeclaration = <ts.InterfaceDeclaration>node;
let triviaStart = interfaceDeclaration.pos;
let triviaEnd = interfaceDeclaration.name.pos;
let triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd });
const interfaceDeclaration = <ts.InterfaceDeclaration>node;
const triviaStart = interfaceDeclaration.pos;
const triviaEnd = interfaceDeclaration.name.pos;
const triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd });
if (triviaText.indexOf('@internal') === -1) {
all.push(node);
}
} else {
let nodeText = getNodeText(sourceFile, node);
const nodeText = getNodeText(sourceFile, node);
if (nodeText.indexOf('@internal') === -1) {
all.push(node);
}
@@ -115,10 +115,10 @@ function getNodeText(sourceFile: ts.SourceFile, node: { pos: number; end: number
return sourceFile.getFullText().substring(node.pos, node.end);
}
function hasModifier(modifiers: ts.NodeArray<ts.Modifier> | undefined, kind: ts.SyntaxKind): boolean {
function hasModifier(modifiers: ts.NodeArray<ts.ModifierLike> | undefined, kind: ts.SyntaxKind): boolean {
if (modifiers) {
for (let i = 0; i < modifiers.length; i++) {
let mod = modifiers[i];
const mod = modifiers[i];
if (mod.kind === kind) {
return true;
}
@@ -141,7 +141,7 @@ function isDefaultExport(ts: typeof import('typescript'), declaration: ts.Interf
function getMassagedTopLevelDeclarationText(ts: typeof import('typescript'), sourceFile: ts.SourceFile, declaration: TSTopLevelDeclare, importName: string, usage: string[], enums: IEnumEntry[]): string {
let result = getNodeText(sourceFile, declaration);
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
let interfaceDeclaration = <ts.InterfaceDeclaration | ts.ClassDeclaration>declaration;
const interfaceDeclaration = <ts.InterfaceDeclaration | ts.ClassDeclaration>declaration;
const staticTypeName = (
isDefaultExport(ts, interfaceDeclaration)
@@ -152,7 +152,7 @@ function getMassagedTopLevelDeclarationText(ts: typeof import('typescript'), sou
let instanceTypeName = staticTypeName;
const typeParametersCnt = (interfaceDeclaration.typeParameters ? interfaceDeclaration.typeParameters.length : 0);
if (typeParametersCnt > 0) {
let arr: string[] = [];
const arr: string[] = [];
for (let i = 0; i < typeParametersCnt; i++) {
arr.push('any');
}
@@ -162,7 +162,7 @@ function getMassagedTopLevelDeclarationText(ts: typeof import('typescript'), sou
const members: ts.NodeArray<ts.ClassElement | ts.TypeElement> = interfaceDeclaration.members;
members.forEach((member) => {
try {
let memberText = getNodeText(sourceFile, member);
const memberText = getNodeText(sourceFile, member);
if (memberText.indexOf('@internal') >= 0 || memberText.indexOf('private') >= 0) {
result = result.replace(memberText, '');
} else {
@@ -182,7 +182,7 @@ function getMassagedTopLevelDeclarationText(ts: typeof import('typescript'), sou
result = result.replace(/export default /g, 'export ');
result = result.replace(/export declare /g, 'export ');
result = result.replace(/declare /g, '');
let lines = result.split(/\r\n|\r|\n/);
const lines = result.split(/\r\n|\r|\n/);
for (let i = 0; i < lines.length; i++) {
if (/\s*\*/.test(lines[i])) {
// very likely a comment
@@ -212,10 +212,10 @@ function format(ts: typeof import('typescript'), text: string, endl: string): st
}
// Parse the source text
let sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
const sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
// Get the formatting edits on the input sources
let edits = (<any>ts).formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt);
const edits = (<any>ts).formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt);
// Apply the edits on the input code
return applyEdits(text, edits);
@@ -242,7 +242,7 @@ function format(ts: typeof import('typescript'), text: string, endl: string): st
}
function preformat(text: string, endl: string): string {
let lines = text.split(endl);
const lines = text.split(endl);
let inComment = false;
let inCommentDeltaIndent = 0;
let indent = 0;
@@ -328,9 +328,9 @@ function format(ts: typeof import('typescript'), text: string, endl: string): st
// Apply edits in reverse on the existing text
let result = text;
for (let i = edits.length - 1; i >= 0; i--) {
let change = edits[i];
let head = result.slice(0, change.span.start);
let tail = result.slice(change.span.start + change.span.length);
const change = edits[i];
const head = result.slice(0, change.span.start);
const tail = result.slice(change.span.start + change.span.length);
result = head + change.newText + tail;
}
return result;
@@ -348,15 +348,15 @@ function createReplacerFromDirectives(directives: [RegExp, string][]): (str: str
function createReplacer(data: string): (str: string) => string {
data = data || '';
let rawDirectives = data.split(';');
let directives: [RegExp, string][] = [];
const rawDirectives = data.split(';');
const directives: [RegExp, string][] = [];
rawDirectives.forEach((rawDirective) => {
if (rawDirective.length === 0) {
return;
}
let pieces = rawDirective.split('=>');
const pieces = rawDirective.split('=>');
let findStr = pieces[0];
let replaceStr = pieces[1];
const replaceStr = pieces[1];
findStr = findStr.replace(/[\-\\\{\}\*\+\?\|\^\$\.\,\[\]\(\)\#\s]/g, '\\$&');
findStr = '\\b' + findStr + '\\b';
@@ -380,12 +380,12 @@ interface IEnumEntry {
function generateDeclarationFile(ts: typeof import('typescript'), recipe: string, sourceFileGetter: SourceFileGetter): ITempResult | null {
const endl = /\r\n/.test(recipe) ? '\r\n' : '\n';
let lines = recipe.split(endl);
let result: string[] = [];
const lines = recipe.split(endl);
const result: string[] = [];
let usageCounter = 0;
let usageImports: string[] = [];
let usage: string[] = [];
const usageImports: string[] = [];
const usage: string[] = [];
let failed = false;
@@ -393,12 +393,12 @@ function generateDeclarationFile(ts: typeof import('typescript'), recipe: string
usage.push(`var b: any;`);
const generateUsageImport = (moduleId: string) => {
let importName = 'm' + (++usageCounter);
const importName = 'm' + (++usageCounter);
usageImports.push(`import * as ${importName} from './${moduleId.replace(/\.d\.ts$/, '')}';`);
return importName;
};
let enums: IEnumEntry[] = [];
const enums: IEnumEntry[] = [];
let version: string | null = null;
lines.forEach(line => {
@@ -407,14 +407,14 @@ function generateDeclarationFile(ts: typeof import('typescript'), recipe: string
return;
}
let m0 = line.match(/^\/\/dtsv=(\d+)$/);
const m0 = line.match(/^\/\/dtsv=(\d+)$/);
if (m0) {
version = m0[1];
}
let m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
const m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m1) {
let moduleId = m1[1];
const moduleId = m1[1];
const sourceFile = sourceFileGetter(moduleId);
if (!sourceFile) {
logErr(`While handling ${line}`);
@@ -425,15 +425,15 @@ function generateDeclarationFile(ts: typeof import('typescript'), recipe: string
const importName = generateUsageImport(moduleId);
let replacer = createReplacer(m1[2]);
const replacer = createReplacer(m1[2]);
let typeNames = m1[3].split(/,/);
const typeNames = m1[3].split(/,/);
typeNames.forEach((typeName) => {
typeName = typeName.trim();
if (typeName.length === 0) {
return;
}
let declaration = getTopLevelDeclaration(ts, sourceFile, typeName);
const declaration = getTopLevelDeclaration(ts, sourceFile, typeName);
if (!declaration) {
logErr(`While handling ${line}`);
logErr(`Cannot find ${typeName}`);
@@ -445,9 +445,9 @@ function generateDeclarationFile(ts: typeof import('typescript'), recipe: string
return;
}
let m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
const m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m2) {
let moduleId = m2[1];
const moduleId = m2[1];
const sourceFile = sourceFileGetter(moduleId);
if (!sourceFile) {
logErr(`While handling ${line}`);
@@ -458,11 +458,11 @@ function generateDeclarationFile(ts: typeof import('typescript'), recipe: string
const importName = generateUsageImport(moduleId);
let replacer = createReplacer(m2[2]);
const replacer = createReplacer(m2[2]);
let typeNames = m2[3].split(/,/);
let typesToExcludeMap: { [typeName: string]: boolean } = {};
let typesToExcludeArr: string[] = [];
const typeNames = m2[3].split(/,/);
const typesToExcludeMap: { [typeName: string]: boolean } = {};
const typesToExcludeArr: string[] = [];
typeNames.forEach((typeName) => {
typeName = typeName.trim();
if (typeName.length === 0) {
@@ -479,7 +479,7 @@ function generateDeclarationFile(ts: typeof import('typescript'), recipe: string
}
} else {
// node is ts.VariableStatement
let nodeText = getNodeText(sourceFile, declaration);
const nodeText = getNodeText(sourceFile, declaration);
for (let i = 0; i < typesToExcludeArr.length; i++) {
if (nodeText.indexOf(typesToExcludeArr[i]) >= 0) {
return;
@@ -732,7 +732,7 @@ class TypeScriptLanguageServiceHost implements ts.LanguageServiceHost {
}
export function execute(): IMonacoDeclarationResult {
let r = run3(new DeclarationResolver(new FSProvider()));
const r = run3(new DeclarationResolver(new FSProvider()));
if (!r) {
throw new Error(`monaco.d.ts generation error - Cannot continue`);
}

View File

@@ -1,8 +1,8 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.minifyTask = exports.optimizeTask = exports.loaderConfig = void 0;
const es = require("event-stream");
@@ -37,40 +37,58 @@ function loaderConfig() {
}
exports.loaderConfig = loaderConfig;
const IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i;
function loader(src, bundledFileHeader, bundleLoader, externalLoaderInfo) {
let sources = [
`${src}/vs/loader.js`
];
if (bundleLoader) {
sources = sources.concat([
`${src}/vs/css.js`,
`${src}/vs/nls.js`
]);
}
let isFirst = true;
function loaderPlugin(src, base, amdModuleId) {
return (gulp
.src(sources, { base: `${src}` })
.src(src, { base })
.pipe(es.through(function (data) {
if (isFirst) {
isFirst = false;
this.emit('data', new VinylFile({
path: 'fake',
base: '.',
contents: Buffer.from(bundledFileHeader)
}));
this.emit('data', data);
if (amdModuleId) {
let contents = data.contents.toString('utf8');
contents = contents.replace(/^define\(/m, `define("${amdModuleId}",`);
data.contents = Buffer.from(contents);
}
else {
this.emit('data', data);
this.emit('data', data);
})));
}
function loader(src, bundledFileHeader, bundleLoader, externalLoaderInfo) {
let loaderStream = gulp.src(`${src}/vs/loader.js`, { base: `${src}` });
if (bundleLoader) {
loaderStream = es.merge(loaderStream, loaderPlugin(`${src}/vs/css.js`, `${src}`, 'vs/css'), loaderPlugin(`${src}/vs/nls.js`, `${src}`, 'vs/nls'));
}
const files = [];
const order = (f) => {
if (f.path.endsWith('loader.js')) {
return 0;
}
if (f.path.endsWith('css.js')) {
return 1;
}
if (f.path.endsWith('nls.js')) {
return 2;
}
return 3;
};
return (loaderStream
.pipe(es.through(function (data) {
files.push(data);
}, function () {
files.sort((a, b) => {
return order(a) - order(b);
});
files.unshift(new VinylFile({
path: 'fake',
base: '.',
contents: Buffer.from(bundledFileHeader)
}));
if (externalLoaderInfo !== undefined) {
this.emit('data', new VinylFile({
files.push(new VinylFile({
path: 'fake2',
base: '.',
contents: Buffer.from(`require.config(${JSON.stringify(externalLoaderInfo, undefined, 2)});`)
}));
}
for (const file of files) {
this.emit('data', file);
}
this.emit('end');
}))
.pipe(concat('vs/loader.js')));

View File

@@ -3,8 +3,6 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as es from 'event-stream';
import * as gulp from 'gulp';
import * as concat from 'gulp-concat';
@@ -43,41 +41,68 @@ export function loaderConfig() {
const IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i;
function loader(src: string, bundledFileHeader: string, bundleLoader: boolean, externalLoaderInfo?: any): NodeJS.ReadWriteStream {
let sources = [
`${src}/vs/loader.js`
];
if (bundleLoader) {
sources = sources.concat([
`${src}/vs/css.js`,
`${src}/vs/nls.js`
]);
}
let isFirst = true;
function loaderPlugin(src: string, base: string, amdModuleId: string | undefined): NodeJS.ReadWriteStream {
return (
gulp
.src(sources, { base: `${src}` })
.pipe(es.through(function (data) {
if (isFirst) {
isFirst = false;
this.emit('data', new VinylFile({
path: 'fake',
base: '.',
contents: Buffer.from(bundledFileHeader)
}));
this.emit('data', data);
} else {
this.emit('data', data);
.src(src, { base })
.pipe(es.through(function (data: VinylFile) {
if (amdModuleId) {
let contents = data.contents.toString('utf8');
contents = contents.replace(/^define\(/m, `define("${amdModuleId}",`);
data.contents = Buffer.from(contents);
}
this.emit('data', data);
}))
);
}
function loader(src: string, bundledFileHeader: string, bundleLoader: boolean, externalLoaderInfo?: any): NodeJS.ReadWriteStream {
let loaderStream = gulp.src(`${src}/vs/loader.js`, { base: `${src}` });
if (bundleLoader) {
loaderStream = es.merge(
loaderStream,
loaderPlugin(`${src}/vs/css.js`, `${src}`, 'vs/css'),
loaderPlugin(`${src}/vs/nls.js`, `${src}`, 'vs/nls'),
);
}
const files: VinylFile[] = [];
const order = (f: VinylFile) => {
if (f.path.endsWith('loader.js')) {
return 0;
}
if (f.path.endsWith('css.js')) {
return 1;
}
if (f.path.endsWith('nls.js')) {
return 2;
}
return 3;
};
return (
loaderStream
.pipe(es.through(function (data) {
files.push(data);
}, function () {
files.sort((a, b) => {
return order(a) - order(b);
});
files.unshift(new VinylFile({
path: 'fake',
base: '.',
contents: Buffer.from(bundledFileHeader)
}));
if (externalLoaderInfo !== undefined) {
this.emit('data', new VinylFile({
files.push(new VinylFile({
path: 'fake2',
base: '.',
contents: Buffer.from(`require.config(${JSON.stringify(externalLoaderInfo, undefined, 2)});`)
}));
}
for (const file of files) {
this.emit('data', file);
}
this.emit('end');
}))
.pipe(concat('vs/loader.js'))

497
build/lib/policies.js Normal file
View File

@@ -0,0 +1,497 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const child_process_1 = require("child_process");
const fs_1 = require("fs");
const path = require("path");
const byline = require("byline");
const ripgrep_1 = require("@vscode/ripgrep");
const Parser = require("tree-sitter");
const node_fetch_1 = require("node-fetch");
const { typescript } = require('tree-sitter-typescript');
const product = require('../../product.json');
function isNlsString(value) {
return value ? typeof value !== 'string' : false;
}
function isStringArray(value) {
return !value.some(s => isNlsString(s));
}
function isNlsStringArray(value) {
return value.every(s => isNlsString(s));
}
var PolicyType;
(function (PolicyType) {
PolicyType[PolicyType["StringEnum"] = 0] = "StringEnum";
})(PolicyType || (PolicyType = {}));
function renderADMLString(prefix, moduleName, nlsString, translations) {
let value;
if (translations) {
const moduleTranslations = translations[moduleName];
if (moduleTranslations) {
value = moduleTranslations[nlsString.nlsKey];
}
}
if (!value) {
value = nlsString.value;
}
return `<string id="${prefix}_${nlsString.nlsKey}">${value}</string>`;
}
class BasePolicy {
constructor(policyType, name, category, minimumVersion, description, moduleName) {
this.policyType = policyType;
this.name = name;
this.category = category;
this.minimumVersion = minimumVersion;
this.description = description;
this.moduleName = moduleName;
}
renderADMLString(nlsString, translations) {
return renderADMLString(this.name, this.moduleName, nlsString, translations);
}
renderADMX(regKey) {
return [
`<policy name="${this.name}" class="Both" displayName="$(string.${this.name})" explainText="$(string.${this.name}_${this.description.nlsKey})" key="Software\\Policies\\Microsoft\\${regKey}" presentation="$(presentation.${this.name})">`,
` <parentCategory ref="${this.category.name.nlsKey}" />`,
` <supportedOn ref="Supported_${this.minimumVersion.replace(/\./g, '_')}" />`,
` <elements>`,
...this.renderADMXElements(),
` </elements>`,
`</policy>`
];
}
renderADMLStrings(translations) {
return [
`<string id="${this.name}">${this.name}</string>`,
this.renderADMLString(this.description, translations)
];
}
renderADMLPresentation() {
return `<presentation id="${this.name}">${this.renderADMLPresentationContents()}</presentation>`;
}
}
class BooleanPolicy extends BasePolicy {
static from(name, category, minimumVersion, description, moduleName, settingNode) {
const type = getStringProperty(settingNode, 'type');
if (type !== 'boolean') {
return undefined;
}
return new BooleanPolicy(name, category, minimumVersion, description, moduleName);
}
constructor(name, category, minimumVersion, description, moduleName) {
super(PolicyType.StringEnum, name, category, minimumVersion, description, moduleName);
}
renderADMXElements() {
return [
`<boolean id="${this.name}" valueName="${this.name}">`,
` <trueValue><decimal value="1" /></trueValue><falseValue><decimal value="0" /></falseValue>`,
`</boolean>`
];
}
renderADMLPresentationContents() {
return `<checkBox refId="${this.name}">${this.name}</checkBox>`;
}
}
class IntPolicy extends BasePolicy {
constructor(name, category, minimumVersion, description, moduleName, defaultValue) {
super(PolicyType.StringEnum, name, category, minimumVersion, description, moduleName);
this.defaultValue = defaultValue;
}
static from(name, category, minimumVersion, description, moduleName, settingNode) {
const type = getStringProperty(settingNode, 'type');
if (type !== 'number') {
return undefined;
}
const defaultValue = getIntProperty(settingNode, 'default');
if (typeof defaultValue === 'undefined') {
throw new Error(`Missing required 'default' property.`);
}
return new IntPolicy(name, category, minimumVersion, description, moduleName, defaultValue);
}
renderADMXElements() {
return [
`<decimal id="${this.name}" valueName="${this.name}" />`
// `<decimal id="Quarantine_PurgeItemsAfterDelay" valueName="PurgeItemsAfterDelay" minValue="0" maxValue="10000000" />`
];
}
renderADMLPresentationContents() {
return `<decimalTextBox refId="${this.name}" defaultValue="${this.defaultValue}">${this.name}</decimalTextBox>`;
}
}
class StringPolicy extends BasePolicy {
static from(name, category, minimumVersion, description, moduleName, settingNode) {
const type = getStringProperty(settingNode, 'type');
if (type !== 'string') {
return undefined;
}
return new StringPolicy(name, category, minimumVersion, description, moduleName);
}
constructor(name, category, minimumVersion, description, moduleName) {
super(PolicyType.StringEnum, name, category, minimumVersion, description, moduleName);
}
renderADMXElements() {
return [`<text id="${this.name}" valueName="${this.name}" required="true" />`];
}
renderADMLPresentationContents() {
return `<textBox refId="${this.name}"><label>${this.name}:</label></textBox>`;
}
}
class StringEnumPolicy extends BasePolicy {
constructor(name, category, minimumVersion, description, moduleName, enum_, enumDescriptions) {
super(PolicyType.StringEnum, name, category, minimumVersion, description, moduleName);
this.enum_ = enum_;
this.enumDescriptions = enumDescriptions;
}
static from(name, category, minimumVersion, description, moduleName, settingNode) {
const type = getStringProperty(settingNode, 'type');
if (type !== 'string') {
return undefined;
}
const enum_ = getStringArrayProperty(settingNode, 'enum');
if (!enum_) {
return undefined;
}
if (!isStringArray(enum_)) {
throw new Error(`Property 'enum' should not be localized.`);
}
const enumDescriptions = getStringArrayProperty(settingNode, 'enumDescriptions');
if (!enumDescriptions) {
throw new Error(`Missing required 'enumDescriptions' property.`);
}
else if (!isNlsStringArray(enumDescriptions)) {
throw new Error(`Property 'enumDescriptions' should be localized.`);
}
return new StringEnumPolicy(name, category, minimumVersion, description, moduleName, enum_, enumDescriptions);
}
renderADMXElements() {
return [
`<enum id="${this.name}" valueName="${this.name}">`,
...this.enum_.map((value, index) => ` <item displayName="$(string.${this.name}_${this.enumDescriptions[index].nlsKey})"><value><string>${value}</string></value></item>`),
`</enum>`
];
}
renderADMLStrings(translations) {
return [
...super.renderADMLStrings(translations),
...this.enumDescriptions.map(e => this.renderADMLString(e, translations))
];
}
renderADMLPresentationContents() {
return `<dropdownList refId="${this.name}" />`;
}
}
const IntQ = {
Q: `(number) @value`,
value(matches) {
const match = matches[0];
if (!match) {
return undefined;
}
const value = match.captures.filter(c => c.name === 'value')[0]?.node.text;
if (!value) {
throw new Error(`Missing required 'value' property.`);
}
return parseInt(value);
}
};
const StringQ = {
Q: `[
(string (string_fragment) @value)
(call_expression function: (identifier) @localizeFn arguments: (arguments (string (string_fragment) @nlsKey) (string (string_fragment) @value)) (#eq? @localizeFn localize))
]`,
value(matches) {
const match = matches[0];
if (!match) {
return undefined;
}
const value = match.captures.filter(c => c.name === 'value')[0]?.node.text;
if (!value) {
throw new Error(`Missing required 'value' property.`);
}
const nlsKey = match.captures.filter(c => c.name === 'nlsKey')[0]?.node.text;
if (nlsKey) {
return { value, nlsKey };
}
else {
return value;
}
}
};
const StringArrayQ = {
Q: `(array ${StringQ.Q})`,
value(matches) {
if (matches.length === 0) {
return undefined;
}
return matches.map(match => {
return StringQ.value([match]);
});
}
};
function getProperty(qtype, node, key) {
const query = new Parser.Query(typescript, `(
(pair
key: [(property_identifier)(string)] @key
value: ${qtype.Q}
)
(#eq? @key ${key})
)`);
return qtype.value(query.matches(node));
}
function getIntProperty(node, key) {
return getProperty(IntQ, node, key);
}
function getStringProperty(node, key) {
return getProperty(StringQ, node, key);
}
function getStringArrayProperty(node, key) {
return getProperty(StringArrayQ, node, key);
}
// TODO: add more policy types
const PolicyTypes = [
BooleanPolicy,
IntPolicy,
StringEnumPolicy,
StringPolicy,
];
function getPolicy(moduleName, configurationNode, settingNode, policyNode, categories) {
const name = getStringProperty(policyNode, 'name');
if (!name) {
throw new Error(`Missing required 'name' property.`);
}
else if (isNlsString(name)) {
throw new Error(`Property 'name' should be a literal string.`);
}
const categoryName = getStringProperty(configurationNode, 'title');
if (!categoryName) {
throw new Error(`Missing required 'title' property.`);
}
else if (!isNlsString(categoryName)) {
throw new Error(`Property 'title' should be localized.`);
}
const categoryKey = `${categoryName.nlsKey}:${categoryName.value}`;
let category = categories.get(categoryKey);
if (!category) {
category = { moduleName, name: categoryName };
categories.set(categoryKey, category);
}
const minimumVersion = getStringProperty(policyNode, 'minimumVersion');
if (!minimumVersion) {
throw new Error(`Missing required 'minimumVersion' property.`);
}
else if (isNlsString(minimumVersion)) {
throw new Error(`Property 'minimumVersion' should be a literal string.`);
}
const description = getStringProperty(settingNode, 'description');
if (!description) {
throw new Error(`Missing required 'description' property.`);
}
if (!isNlsString(description)) {
throw new Error(`Property 'description' should be localized.`);
}
let result;
for (const policyType of PolicyTypes) {
if (result = policyType.from(name, category, minimumVersion, description, moduleName, settingNode)) {
break;
}
}
if (!result) {
throw new Error(`Failed to parse policy '${name}'.`);
}
return result;
}
function getPolicies(moduleName, node) {
const query = new Parser.Query(typescript, `
(
(call_expression
function: (member_expression property: (property_identifier) @registerConfigurationFn) (#eq? @registerConfigurationFn registerConfiguration)
arguments: (arguments (object (pair
key: [(property_identifier)(string)] @propertiesKey (#eq? @propertiesKey properties)
value: (object (pair
key: [(property_identifier)(string)]
value: (object (pair
key: [(property_identifier)(string)] @policyKey (#eq? @policyKey policy)
value: (object) @policy
)) @setting
))
)) @configuration)
)
)
`);
const categories = new Map();
return query.matches(node).map(m => {
const configurationNode = m.captures.filter(c => c.name === 'configuration')[0].node;
const settingNode = m.captures.filter(c => c.name === 'setting')[0].node;
const policyNode = m.captures.filter(c => c.name === 'policy')[0].node;
return getPolicy(moduleName, configurationNode, settingNode, policyNode, categories);
});
}
async function getFiles(root) {
return new Promise((c, e) => {
const result = [];
const rg = (0, child_process_1.spawn)(ripgrep_1.rgPath, ['-l', 'registerConfiguration\\(', '-g', 'src/**/*.ts', '-g', '!src/**/test/**', root]);
const stream = byline(rg.stdout.setEncoding('utf8'));
stream.on('data', path => result.push(path));
stream.on('error', err => e(err));
stream.on('end', () => c(result));
});
}
function renderADMX(regKey, versions, categories, policies) {
versions = versions.map(v => v.replace(/\./g, '_'));
return `<?xml version="1.0" encoding="utf-8"?>
<policyDefinitions revision="1.1" schemaVersion="1.0">
<policyNamespaces>
<target prefix="${regKey}" namespace="Microsoft.Policies.${regKey}" />
</policyNamespaces>
<resources minRequiredRevision="1.0" />
<supportedOn>
<definitions>
${versions.map(v => `<definition name="Supported_${v}" displayName="$(string.Supported_${v})" />`).join(`\n `)}
</definitions>
</supportedOn>
<categories>
<category displayName="$(string.Application)" name="Application" />
${categories.map(c => `<category displayName="$(string.Category_${c.name.nlsKey})" name="${c.name.nlsKey}"><parentCategory ref="Application" /></category>`).join(`\n `)}
</categories>
<policies>
${policies.map(p => p.renderADMX(regKey)).flat().join(`\n `)}
</policies>
</policyDefinitions>
`;
}
function renderADML(appName, versions, categories, policies, translations) {
return `<?xml version="1.0" encoding="utf-8"?>
<policyDefinitionResources revision="1.0" schemaVersion="1.0">
<displayName />
<description />
<resources>
<stringTable>
<string id="Application">${appName}</string>
${versions.map(v => `<string id="Supported_${v.replace(/\./g, '_')}">${appName} &gt;= ${v}</string>`)}
${categories.map(c => renderADMLString('Category', c.moduleName, c.name, translations))}
${policies.map(p => p.renderADMLStrings(translations)).flat().join(`\n `)}
</stringTable>
<presentationTable>
${policies.map(p => p.renderADMLPresentation()).join(`\n `)}
</presentationTable>
</resources>
</policyDefinitionResources>
`;
}
function renderGP(policies, translations) {
const appName = product.nameLong;
const regKey = product.win32RegValueName;
const versions = [...new Set(policies.map(p => p.minimumVersion)).values()].sort();
const categories = [...new Set(policies.map(p => p.category))];
return {
admx: renderADMX(regKey, versions, categories, policies),
adml: [
{ languageId: 'en-us', contents: renderADML(appName, versions, categories, policies) },
...translations.map(({ languageId, languageTranslations }) => ({ languageId, contents: renderADML(appName, versions, categories, policies, languageTranslations) }))
]
};
}
const Languages = {
'fr': 'fr-fr',
'it': 'it-it',
'de': 'de-de',
'es': 'es-es',
'ru': 'ru-ru',
'zh-hans': 'zh-cn',
'zh-hant': 'zh-tw',
'ja': 'ja-jp',
'ko': 'ko-kr',
'cs': 'cs-cz',
'pt-br': 'pt-br',
'tr': 'tr-tr',
'pl': 'pl-pl',
};
async function getLatestStableVersion(updateUrl) {
const res = await (0, node_fetch_1.default)(`${updateUrl}/api/update/darwin/stable/latest`);
const { name: version } = await res.json();
return version;
}
async function getSpecificNLS(resourceUrlTemplate, languageId, version) {
const resource = {
publisher: 'ms-ceintl',
name: `vscode-language-pack-${languageId}`,
version,
path: 'extension/translations/main.i18n.json'
};
const url = resourceUrlTemplate.replace(/\{([^}]+)\}/g, (_, key) => resource[key]);
const res = await (0, node_fetch_1.default)(url);
if (res.status !== 200) {
throw new Error(`[${res.status}] Error downloading language pack ${languageId}@${version}`);
}
const { contents: result } = await res.json();
return result;
}
function previousVersion(version) {
const [, major, minor, patch] = /^(\d+)\.(\d+)\.(\d+)$/.exec(version);
return `${major}.${parseInt(minor) - 1}.${patch}`;
}
async function getNLS(resourceUrlTemplate, languageId, version) {
try {
return await getSpecificNLS(resourceUrlTemplate, languageId, version);
}
catch (err) {
if (/\[404\]/.test(err.message)) {
console.warn(`Language pack ${languageId}@${version} is missing. Downloading previous version...`);
return await getSpecificNLS(resourceUrlTemplate, languageId, previousVersion(version));
}
else {
throw err;
}
}
}
async function parsePolicies() {
const parser = new Parser();
parser.setLanguage(typescript);
const files = await getFiles(process.cwd());
const base = path.join(process.cwd(), 'src');
const policies = [];
for (const file of files) {
const moduleName = path.relative(base, file).replace(/\.ts$/i, '').replace(/\\/g, '/');
const contents = await fs_1.promises.readFile(file, { encoding: 'utf8' });
const tree = parser.parse(contents);
policies.push(...getPolicies(moduleName, tree.rootNode));
}
return policies;
}
async function getTranslations() {
const updateUrl = product.updateUrl;
if (!updateUrl) {
console.warn(`Skipping policy localization: No 'updateUrl' found in 'product.json'.`);
return [];
}
const resourceUrlTemplate = product.extensionsGallery?.resourceUrlTemplate;
if (!resourceUrlTemplate) {
console.warn(`Skipping policy localization: No 'resourceUrlTemplate' found in 'product.json'.`);
return [];
}
const version = await getLatestStableVersion(updateUrl);
const languageIds = Object.keys(Languages);
return await Promise.all(languageIds.map(languageId => getNLS(resourceUrlTemplate, languageId, version)
.then(languageTranslations => ({ languageId, languageTranslations }))));
}
async function main() {
const [policies, translations] = await Promise.all([parsePolicies(), getTranslations()]);
const { admx, adml } = await renderGP(policies, translations);
const root = '.build/policies/win32';
await fs_1.promises.rm(root, { recursive: true, force: true });
await fs_1.promises.mkdir(root, { recursive: true });
await fs_1.promises.writeFile(path.join(root, `${product.win32RegValueName}.admx`), admx.replace(/\r?\n/g, '\n'));
for (const { languageId, contents } of adml) {
const languagePath = path.join(root, languageId === 'en-us' ? 'en-us' : Languages[languageId]);
await fs_1.promises.mkdir(languagePath, { recursive: true });
await fs_1.promises.writeFile(path.join(languagePath, `${product.win32RegValueName}.adml`), contents.replace(/\r?\n/g, '\n'));
}
}
if (require.main === module) {
main().catch(err => {
console.error(err);
process.exit(1);
});
}

697
build/lib/policies.ts Normal file
View File

@@ -0,0 +1,697 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { spawn } from 'child_process';
import { promises as fs } from 'fs';
import * as path from 'path';
import * as byline from 'byline';
import { rgPath } from '@vscode/ripgrep';
import * as Parser from 'tree-sitter';
import fetch from 'node-fetch';
const { typescript } = require('tree-sitter-typescript');
const product = require('../../product.json');
type NlsString = { value: string; nlsKey: string };
function isNlsString(value: string | NlsString | undefined): value is NlsString {
return value ? typeof value !== 'string' : false;
}
function isStringArray(value: (string | NlsString)[]): value is string[] {
return !value.some(s => isNlsString(s));
}
function isNlsStringArray(value: (string | NlsString)[]): value is NlsString[] {
return value.every(s => isNlsString(s));
}
interface Category {
readonly moduleName: string;
readonly name: NlsString;
}
enum PolicyType {
StringEnum
}
interface Policy {
readonly category: Category;
readonly minimumVersion: string;
renderADMX(regKey: string): string[];
renderADMLStrings(translations?: LanguageTranslations): string[];
renderADMLPresentation(): string;
}
function renderADMLString(prefix: string, moduleName: string, nlsString: NlsString, translations?: LanguageTranslations): string {
let value: string | undefined;
if (translations) {
const moduleTranslations = translations[moduleName];
if (moduleTranslations) {
value = moduleTranslations[nlsString.nlsKey];
}
}
if (!value) {
value = nlsString.value;
}
return `<string id="${prefix}_${nlsString.nlsKey}">${value}</string>`;
}
abstract class BasePolicy implements Policy {
constructor(
protected policyType: PolicyType,
protected name: string,
readonly category: Category,
readonly minimumVersion: string,
protected description: NlsString,
protected moduleName: string,
) { }
protected renderADMLString(nlsString: NlsString, translations?: LanguageTranslations): string {
return renderADMLString(this.name, this.moduleName, nlsString, translations);
}
renderADMX(regKey: string) {
return [
`<policy name="${this.name}" class="Both" displayName="$(string.${this.name})" explainText="$(string.${this.name}_${this.description.nlsKey})" key="Software\\Policies\\Microsoft\\${regKey}" presentation="$(presentation.${this.name})">`,
` <parentCategory ref="${this.category.name.nlsKey}" />`,
` <supportedOn ref="Supported_${this.minimumVersion.replace(/\./g, '_')}" />`,
` <elements>`,
...this.renderADMXElements(),
` </elements>`,
`</policy>`
];
}
protected abstract renderADMXElements(): string[];
renderADMLStrings(translations?: LanguageTranslations) {
return [
`<string id="${this.name}">${this.name}</string>`,
this.renderADMLString(this.description, translations)
];
}
renderADMLPresentation(): string {
return `<presentation id="${this.name}">${this.renderADMLPresentationContents()}</presentation>`;
}
protected abstract renderADMLPresentationContents(): string;
}
class BooleanPolicy extends BasePolicy {
static from(
name: string,
category: Category,
minimumVersion: string,
description: NlsString,
moduleName: string,
settingNode: Parser.SyntaxNode
): BooleanPolicy | undefined {
const type = getStringProperty(settingNode, 'type');
if (type !== 'boolean') {
return undefined;
}
return new BooleanPolicy(name, category, minimumVersion, description, moduleName);
}
private constructor(
name: string,
category: Category,
minimumVersion: string,
description: NlsString,
moduleName: string,
) {
super(PolicyType.StringEnum, name, category, minimumVersion, description, moduleName);
}
protected renderADMXElements(): string[] {
return [
`<boolean id="${this.name}" valueName="${this.name}">`,
` <trueValue><decimal value="1" /></trueValue><falseValue><decimal value="0" /></falseValue>`,
`</boolean>`
];
}
renderADMLPresentationContents() {
return `<checkBox refId="${this.name}">${this.name}</checkBox>`;
}
}
class IntPolicy extends BasePolicy {
static from(
name: string,
category: Category,
minimumVersion: string,
description: NlsString,
moduleName: string,
settingNode: Parser.SyntaxNode
): IntPolicy | undefined {
const type = getStringProperty(settingNode, 'type');
if (type !== 'number') {
return undefined;
}
const defaultValue = getIntProperty(settingNode, 'default');
if (typeof defaultValue === 'undefined') {
throw new Error(`Missing required 'default' property.`);
}
return new IntPolicy(name, category, minimumVersion, description, moduleName, defaultValue);
}
private constructor(
name: string,
category: Category,
minimumVersion: string,
description: NlsString,
moduleName: string,
protected readonly defaultValue: number,
) {
super(PolicyType.StringEnum, name, category, minimumVersion, description, moduleName);
}
protected renderADMXElements(): string[] {
return [
`<decimal id="${this.name}" valueName="${this.name}" />`
// `<decimal id="Quarantine_PurgeItemsAfterDelay" valueName="PurgeItemsAfterDelay" minValue="0" maxValue="10000000" />`
];
}
renderADMLPresentationContents() {
return `<decimalTextBox refId="${this.name}" defaultValue="${this.defaultValue}">${this.name}</decimalTextBox>`;
}
}
class StringPolicy extends BasePolicy {
static from(
name: string,
category: Category,
minimumVersion: string,
description: NlsString,
moduleName: string,
settingNode: Parser.SyntaxNode
): StringPolicy | undefined {
const type = getStringProperty(settingNode, 'type');
if (type !== 'string') {
return undefined;
}
return new StringPolicy(name, category, minimumVersion, description, moduleName);
}
private constructor(
name: string,
category: Category,
minimumVersion: string,
description: NlsString,
moduleName: string,
) {
super(PolicyType.StringEnum, name, category, minimumVersion, description, moduleName);
}
protected renderADMXElements(): string[] {
return [`<text id="${this.name}" valueName="${this.name}" required="true" />`];
}
renderADMLPresentationContents() {
return `<textBox refId="${this.name}"><label>${this.name}:</label></textBox>`;
}
}
class StringEnumPolicy extends BasePolicy {
static from(
name: string,
category: Category,
minimumVersion: string,
description: NlsString,
moduleName: string,
settingNode: Parser.SyntaxNode
): StringEnumPolicy | undefined {
const type = getStringProperty(settingNode, 'type');
if (type !== 'string') {
return undefined;
}
const enum_ = getStringArrayProperty(settingNode, 'enum');
if (!enum_) {
return undefined;
}
if (!isStringArray(enum_)) {
throw new Error(`Property 'enum' should not be localized.`);
}
const enumDescriptions = getStringArrayProperty(settingNode, 'enumDescriptions');
if (!enumDescriptions) {
throw new Error(`Missing required 'enumDescriptions' property.`);
} else if (!isNlsStringArray(enumDescriptions)) {
throw new Error(`Property 'enumDescriptions' should be localized.`);
}
return new StringEnumPolicy(name, category, minimumVersion, description, moduleName, enum_, enumDescriptions);
}
private constructor(
name: string,
category: Category,
minimumVersion: string,
description: NlsString,
moduleName: string,
protected enum_: string[],
protected enumDescriptions: NlsString[],
) {
super(PolicyType.StringEnum, name, category, minimumVersion, description, moduleName);
}
protected renderADMXElements(): string[] {
return [
`<enum id="${this.name}" valueName="${this.name}">`,
...this.enum_.map((value, index) => ` <item displayName="$(string.${this.name}_${this.enumDescriptions[index].nlsKey})"><value><string>${value}</string></value></item>`),
`</enum>`
];
}
renderADMLStrings(translations?: LanguageTranslations) {
return [
...super.renderADMLStrings(translations),
...this.enumDescriptions.map(e => this.renderADMLString(e, translations))
];
}
renderADMLPresentationContents() {
return `<dropdownList refId="${this.name}" />`;
}
}
interface QType<T> {
Q: string;
value(matches: Parser.QueryMatch[]): T | undefined;
}
const IntQ: QType<number> = {
Q: `(number) @value`,
value(matches: Parser.QueryMatch[]): number | undefined {
const match = matches[0];
if (!match) {
return undefined;
}
const value = match.captures.filter(c => c.name === 'value')[0]?.node.text;
if (!value) {
throw new Error(`Missing required 'value' property.`);
}
return parseInt(value);
}
};
const StringQ: QType<string | NlsString> = {
Q: `[
(string (string_fragment) @value)
(call_expression function: (identifier) @localizeFn arguments: (arguments (string (string_fragment) @nlsKey) (string (string_fragment) @value)) (#eq? @localizeFn localize))
]`,
value(matches: Parser.QueryMatch[]): string | NlsString | undefined {
const match = matches[0];
if (!match) {
return undefined;
}
const value = match.captures.filter(c => c.name === 'value')[0]?.node.text;
if (!value) {
throw new Error(`Missing required 'value' property.`);
}
const nlsKey = match.captures.filter(c => c.name === 'nlsKey')[0]?.node.text;
if (nlsKey) {
return { value, nlsKey };
} else {
return value;
}
}
};
const StringArrayQ: QType<(string | NlsString)[]> = {
Q: `(array ${StringQ.Q})`,
value(matches: Parser.QueryMatch[]): (string | NlsString)[] | undefined {
if (matches.length === 0) {
return undefined;
}
return matches.map(match => {
return StringQ.value([match]) as string | NlsString;
});
}
};
function getProperty<T>(qtype: QType<T>, node: Parser.SyntaxNode, key: string): T | undefined {
const query = new Parser.Query(
typescript,
`(
(pair
key: [(property_identifier)(string)] @key
value: ${qtype.Q}
)
(#eq? @key ${key})
)`
);
return qtype.value(query.matches(node));
}
function getIntProperty(node: Parser.SyntaxNode, key: string): number | undefined {
return getProperty(IntQ, node, key);
}
function getStringProperty(node: Parser.SyntaxNode, key: string): string | NlsString | undefined {
return getProperty(StringQ, node, key);
}
function getStringArrayProperty(node: Parser.SyntaxNode, key: string): (string | NlsString)[] | undefined {
return getProperty(StringArrayQ, node, key);
}
// TODO: add more policy types
const PolicyTypes = [
BooleanPolicy,
IntPolicy,
StringEnumPolicy,
StringPolicy,
];
function getPolicy(
moduleName: string,
configurationNode: Parser.SyntaxNode,
settingNode: Parser.SyntaxNode,
policyNode: Parser.SyntaxNode,
categories: Map<string, Category>
): Policy {
const name = getStringProperty(policyNode, 'name');
if (!name) {
throw new Error(`Missing required 'name' property.`);
} else if (isNlsString(name)) {
throw new Error(`Property 'name' should be a literal string.`);
}
const categoryName = getStringProperty(configurationNode, 'title');
if (!categoryName) {
throw new Error(`Missing required 'title' property.`);
} else if (!isNlsString(categoryName)) {
throw new Error(`Property 'title' should be localized.`);
}
const categoryKey = `${categoryName.nlsKey}:${categoryName.value}`;
let category = categories.get(categoryKey);
if (!category) {
category = { moduleName, name: categoryName };
categories.set(categoryKey, category);
}
const minimumVersion = getStringProperty(policyNode, 'minimumVersion');
if (!minimumVersion) {
throw new Error(`Missing required 'minimumVersion' property.`);
} else if (isNlsString(minimumVersion)) {
throw new Error(`Property 'minimumVersion' should be a literal string.`);
}
const description = getStringProperty(settingNode, 'description');
if (!description) {
throw new Error(`Missing required 'description' property.`);
} if (!isNlsString(description)) {
throw new Error(`Property 'description' should be localized.`);
}
let result: Policy | undefined;
for (const policyType of PolicyTypes) {
if (result = policyType.from(name, category, minimumVersion, description, moduleName, settingNode)) {
break;
}
}
if (!result) {
throw new Error(`Failed to parse policy '${name}'.`);
}
return result;
}
function getPolicies(moduleName: string, node: Parser.SyntaxNode): Policy[] {
const query = new Parser.Query(typescript, `
(
(call_expression
function: (member_expression property: (property_identifier) @registerConfigurationFn) (#eq? @registerConfigurationFn registerConfiguration)
arguments: (arguments (object (pair
key: [(property_identifier)(string)] @propertiesKey (#eq? @propertiesKey properties)
value: (object (pair
key: [(property_identifier)(string)]
value: (object (pair
key: [(property_identifier)(string)] @policyKey (#eq? @policyKey policy)
value: (object) @policy
)) @setting
))
)) @configuration)
)
)
`);
const categories = new Map<string, Category>();
return query.matches(node).map(m => {
const configurationNode = m.captures.filter(c => c.name === 'configuration')[0].node;
const settingNode = m.captures.filter(c => c.name === 'setting')[0].node;
const policyNode = m.captures.filter(c => c.name === 'policy')[0].node;
return getPolicy(moduleName, configurationNode, settingNode, policyNode, categories);
});
}
async function getFiles(root: string): Promise<string[]> {
return new Promise((c, e) => {
const result: string[] = [];
const rg = spawn(rgPath, ['-l', 'registerConfiguration\\(', '-g', 'src/**/*.ts', '-g', '!src/**/test/**', root]);
const stream = byline(rg.stdout.setEncoding('utf8'));
stream.on('data', path => result.push(path));
stream.on('error', err => e(err));
stream.on('end', () => c(result));
});
}
function renderADMX(regKey: string, versions: string[], categories: Category[], policies: Policy[]) {
versions = versions.map(v => v.replace(/\./g, '_'));
return `<?xml version="1.0" encoding="utf-8"?>
<policyDefinitions revision="1.1" schemaVersion="1.0">
<policyNamespaces>
<target prefix="${regKey}" namespace="Microsoft.Policies.${regKey}" />
</policyNamespaces>
<resources minRequiredRevision="1.0" />
<supportedOn>
<definitions>
${versions.map(v => `<definition name="Supported_${v}" displayName="$(string.Supported_${v})" />`).join(`\n `)}
</definitions>
</supportedOn>
<categories>
<category displayName="$(string.Application)" name="Application" />
${categories.map(c => `<category displayName="$(string.Category_${c.name.nlsKey})" name="${c.name.nlsKey}"><parentCategory ref="Application" /></category>`).join(`\n `)}
</categories>
<policies>
${policies.map(p => p.renderADMX(regKey)).flat().join(`\n `)}
</policies>
</policyDefinitions>
`;
}
function renderADML(appName: string, versions: string[], categories: Category[], policies: Policy[], translations?: LanguageTranslations) {
return `<?xml version="1.0" encoding="utf-8"?>
<policyDefinitionResources revision="1.0" schemaVersion="1.0">
<displayName />
<description />
<resources>
<stringTable>
<string id="Application">${appName}</string>
${versions.map(v => `<string id="Supported_${v.replace(/\./g, '_')}">${appName} &gt;= ${v}</string>`)}
${categories.map(c => renderADMLString('Category', c.moduleName, c.name, translations))}
${policies.map(p => p.renderADMLStrings(translations)).flat().join(`\n `)}
</stringTable>
<presentationTable>
${policies.map(p => p.renderADMLPresentation()).join(`\n `)}
</presentationTable>
</resources>
</policyDefinitionResources>
`;
}
function renderGP(policies: Policy[], translations: Translations) {
const appName = product.nameLong;
const regKey = product.win32RegValueName;
const versions = [...new Set(policies.map(p => p.minimumVersion)).values()].sort();
const categories = [...new Set(policies.map(p => p.category))];
return {
admx: renderADMX(regKey, versions, categories, policies),
adml: [
{ languageId: 'en-us', contents: renderADML(appName, versions, categories, policies) },
...translations.map(({ languageId, languageTranslations }) =>
({ languageId, contents: renderADML(appName, versions, categories, policies, languageTranslations) }))
]
};
}
const Languages = {
'fr': 'fr-fr',
'it': 'it-it',
'de': 'de-de',
'es': 'es-es',
'ru': 'ru-ru',
'zh-hans': 'zh-cn',
'zh-hant': 'zh-tw',
'ja': 'ja-jp',
'ko': 'ko-kr',
'cs': 'cs-cz',
'pt-br': 'pt-br',
'tr': 'tr-tr',
'pl': 'pl-pl',
};
type LanguageTranslations = { [moduleName: string]: { [nlsKey: string]: string } };
type Translations = { languageId: string; languageTranslations: LanguageTranslations }[];
async function getLatestStableVersion(updateUrl: string) {
const res = await fetch(`${updateUrl}/api/update/darwin/stable/latest`);
const { name: version } = await res.json() as { name: string };
return version;
}
async function getSpecificNLS(resourceUrlTemplate: string, languageId: string, version: string) {
const resource = {
publisher: 'ms-ceintl',
name: `vscode-language-pack-${languageId}`,
version,
path: 'extension/translations/main.i18n.json'
};
const url = resourceUrlTemplate.replace(/\{([^}]+)\}/g, (_, key) => resource[key as keyof typeof resource]);
const res = await fetch(url);
if (res.status !== 200) {
throw new Error(`[${res.status}] Error downloading language pack ${languageId}@${version}`);
}
const { contents: result } = await res.json() as { contents: LanguageTranslations };
return result;
}
function previousVersion(version: string): string {
const [, major, minor, patch] = /^(\d+)\.(\d+)\.(\d+)$/.exec(version)!;
return `${major}.${parseInt(minor) - 1}.${patch}`;
}
async function getNLS(resourceUrlTemplate: string, languageId: string, version: string) {
try {
return await getSpecificNLS(resourceUrlTemplate, languageId, version);
} catch (err) {
if (/\[404\]/.test(err.message)) {
console.warn(`Language pack ${languageId}@${version} is missing. Downloading previous version...`);
return await getSpecificNLS(resourceUrlTemplate, languageId, previousVersion(version));
} else {
throw err;
}
}
}
async function parsePolicies(): Promise<Policy[]> {
const parser = new Parser();
parser.setLanguage(typescript);
const files = await getFiles(process.cwd());
const base = path.join(process.cwd(), 'src');
const policies = [];
for (const file of files) {
const moduleName = path.relative(base, file).replace(/\.ts$/i, '').replace(/\\/g, '/');
const contents = await fs.readFile(file, { encoding: 'utf8' });
const tree = parser.parse(contents);
policies.push(...getPolicies(moduleName, tree.rootNode));
}
return policies;
}
async function getTranslations(): Promise<Translations> {
const updateUrl = product.updateUrl;
if (!updateUrl) {
console.warn(`Skipping policy localization: No 'updateUrl' found in 'product.json'.`);
return [];
}
const resourceUrlTemplate = product.extensionsGallery?.resourceUrlTemplate;
if (!resourceUrlTemplate) {
console.warn(`Skipping policy localization: No 'resourceUrlTemplate' found in 'product.json'.`);
return [];
}
const version = await getLatestStableVersion(updateUrl);
const languageIds = Object.keys(Languages);
return await Promise.all(languageIds.map(
languageId => getNLS(resourceUrlTemplate, languageId, version)
.then(languageTranslations => ({ languageId, languageTranslations }))
));
}
async function main() {
const [policies, translations] = await Promise.all([parsePolicies(), getTranslations()]);
const { admx, adml } = await renderGP(policies, translations);
const root = '.build/policies/win32';
await fs.rm(root, { recursive: true, force: true });
await fs.mkdir(root, { recursive: true });
await fs.writeFile(path.join(root, `${product.win32RegValueName}.admx`), admx.replace(/\r?\n/g, '\n'));
for (const { languageId, contents } of adml) {
const languagePath = path.join(root, languageId === 'en-us' ? 'en-us' : Languages[languageId as keyof typeof Languages]);
await fs.mkdir(languagePath, { recursive: true });
await fs.writeFile(path.join(languagePath, `${product.win32RegValueName}.adml`), contents.replace(/\r?\n/g, '\n'));
}
}
if (require.main === module) {
main().catch(err => {
console.error(err);
process.exit(1);
});
}

View File

@@ -1,8 +1,8 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
// @ts-check
const path = require("path");

View File

@@ -3,8 +3,6 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
// @ts-check
import * as path from 'path';

View File

@@ -1,8 +1,8 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.createReporter = void 0;
const es = require("event-stream");

View File

@@ -3,8 +3,6 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as es from 'event-stream';
import * as _ from 'underscore';
import * as fancyLog from 'fancy-log';

View File

@@ -1,8 +1,8 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
var snaps;
(function (snaps) {
const fs = require('fs');

View File

@@ -3,8 +3,6 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
namespace snaps {
const fs = require('fs');

View File

@@ -10,7 +10,7 @@ const path = require("path");
const tss = require("./treeshaking");
const REPO_ROOT = path.join(__dirname, '../../');
const SRC_DIR = path.join(REPO_ROOT, 'src');
let dirCache = {};
const dirCache = {};
function writeFile(filePath, contents) {
function ensureDirs(dirPath) {
if (dirCache[dirPath]) {
@@ -54,13 +54,13 @@ function extractEditor(options) {
options.typings.push(`../node_modules/@types/${type}/index.d.ts`);
});
}
let result = tss.shake(options);
for (let fileName in result) {
const result = tss.shake(options);
for (const fileName in result) {
if (result.hasOwnProperty(fileName)) {
writeFile(path.join(options.destRoot, fileName), result[fileName]);
}
}
let copied = {};
const copied = {};
const copyFile = (fileName) => {
if (copied[fileName]) {
return;
@@ -73,7 +73,7 @@ function extractEditor(options) {
const writeOutputFile = (fileName, contents) => {
writeFile(path.join(options.destRoot, fileName), contents);
};
for (let fileName in result) {
for (const fileName in result) {
if (result.hasOwnProperty(fileName)) {
const fileContents = result[fileName];
const info = ts.preProcessFile(fileContents);
@@ -103,13 +103,12 @@ function extractEditor(options) {
delete tsConfig.compilerOptions.moduleResolution;
writeOutputFile('tsconfig.json', JSON.stringify(tsConfig, null, '\t'));
[
'vs/css.build.js',
'vs/css.d.ts',
'vs/css.js',
'vs/css.build.ts',
'vs/css.ts',
'vs/loader.js',
'vs/nls.build.js',
'vs/nls.d.ts',
'vs/nls.js',
'vs/loader.d.ts',
'vs/nls.build.ts',
'vs/nls.ts',
'vs/nls.mock.ts',
].forEach(copyFile);
}
@@ -120,7 +119,7 @@ function createESMSourcesAndResources2(options) {
const OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
const getDestAbsoluteFilePath = (file) => {
let dest = options.renames[file.replace(/\\/g, '/')] || file;
const dest = options.renames[file.replace(/\\/g, '/')] || file;
if (dest === 'tsconfig.json') {
return path.join(OUT_FOLDER, `tsconfig.json`);
}
@@ -194,7 +193,7 @@ function createESMSourcesAndResources2(options) {
if (dir.charAt(dir.length - 1) !== '/' || dir.charAt(dir.length - 1) !== '\\') {
dir += '/';
}
let result = [];
const result = [];
_walkDirRecursive(dir, result, dir.length);
return result;
}
@@ -216,7 +215,7 @@ function createESMSourcesAndResources2(options) {
}
writeFile(absoluteFilePath, contents);
function toggleComments(fileContents) {
let lines = fileContents.split(/\r\n|\r|\n/);
const lines = fileContents.split(/\r\n|\r|\n/);
let mode = 0;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
@@ -279,14 +278,14 @@ function transportCSS(module, enqueue, write) {
let DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
const newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
const encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}

View File

@@ -10,7 +10,7 @@ import * as tss from './treeshaking';
const REPO_ROOT = path.join(__dirname, '../../');
const SRC_DIR = path.join(REPO_ROOT, 'src');
let dirCache: { [dir: string]: boolean } = {};
const dirCache: { [dir: string]: boolean } = {};
function writeFile(filePath: string, contents: Buffer | string): void {
function ensureDirs(dirPath: string): void {
@@ -63,13 +63,13 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
});
}
let result = tss.shake(options);
for (let fileName in result) {
const result = tss.shake(options);
for (const fileName in result) {
if (result.hasOwnProperty(fileName)) {
writeFile(path.join(options.destRoot, fileName), result[fileName]);
}
}
let copied: { [fileName: string]: boolean } = {};
const copied: { [fileName: string]: boolean } = {};
const copyFile = (fileName: string) => {
if (copied[fileName]) {
return;
@@ -82,7 +82,7 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
const writeOutputFile = (fileName: string, contents: string | Buffer) => {
writeFile(path.join(options.destRoot, fileName), contents);
};
for (let fileName in result) {
for (const fileName in result) {
if (result.hasOwnProperty(fileName)) {
const fileContents = result[fileName];
const info = ts.preProcessFile(fileContents);
@@ -115,13 +115,12 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
writeOutputFile('tsconfig.json', JSON.stringify(tsConfig, null, '\t'));
[
'vs/css.build.js',
'vs/css.d.ts',
'vs/css.js',
'vs/css.build.ts',
'vs/css.ts',
'vs/loader.js',
'vs/nls.build.js',
'vs/nls.d.ts',
'vs/nls.js',
'vs/loader.d.ts',
'vs/nls.build.ts',
'vs/nls.ts',
'vs/nls.mock.ts',
].forEach(copyFile);
}
@@ -142,7 +141,7 @@ export function createESMSourcesAndResources2(options: IOptions2): void {
const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
const getDestAbsoluteFilePath = (file: string): string => {
let dest = options.renames[file.replace(/\\/g, '/')] || file;
const dest = options.renames[file.replace(/\\/g, '/')] || file;
if (dest === 'tsconfig.json') {
return path.join(OUT_FOLDER, `tsconfig.json`);
}
@@ -229,7 +228,7 @@ export function createESMSourcesAndResources2(options: IOptions2): void {
if (dir.charAt(dir.length - 1) !== '/' || dir.charAt(dir.length - 1) !== '\\') {
dir += '/';
}
let result: string[] = [];
const result: string[] = [];
_walkDirRecursive(dir, result, dir.length);
return result;
}
@@ -253,7 +252,7 @@ export function createESMSourcesAndResources2(options: IOptions2): void {
writeFile(absoluteFilePath, contents);
function toggleComments(fileContents: string): string {
let lines = fileContents.split(/\r\n|\r|\n/);
const lines = fileContents.split(/\r\n|\r|\n/);
let mode = 0;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
@@ -325,14 +324,14 @@ function transportCSS(module: string, enqueue: (module: string) => void, write:
if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString()
const newText = fileContents.toString()
.replace(/"/g, '\'')
.replace(/</g, '%3C')
.replace(/>/g, '%3E')
.replace(/&/g, '%26')
.replace(/#/g, '%23')
.replace(/\s+/g, ' ');
let encodedData = ',' + newText;
const encodedData = ',' + newText;
if (encodedData.length < DATA.length) {
DATA = encodedData;
}

View File

@@ -1,8 +1,8 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.define = exports.parallel = exports.series = void 0;
const fancyLog = require("fancy-log");

View File

@@ -3,8 +3,6 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fancyLog from 'fancy-log';
import * as ansiColors from 'ansi-colors';

View File

@@ -1,8 +1,8 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.shake = exports.toStringShakeLevel = exports.ShakeLevel = void 0;
const fs = require("fs");
@@ -32,7 +32,7 @@ function printDiagnostics(options, diagnostics) {
result += `${path.join(options.sourcesRoot, diag.file.fileName)}`;
}
if (diag.file && diag.start) {
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
const location = diag.file.getLineAndCharacterOfPosition(diag.start);
result += `:${location.line + 1}:${location.character}`;
}
result += ` - ` + JSON.stringify(diag.messageText);
@@ -89,6 +89,8 @@ function discoverAndReadFiles(ts, options) {
const in_queue = Object.create(null);
const queue = [];
const enqueue = (moduleId) => {
// To make the treeshaker work on windows...
moduleId = moduleId.replace(/\\/g, '/');
if (in_queue[moduleId]) {
return;
}
@@ -150,7 +152,7 @@ function processLibFiles(ts, options) {
result[key] = sourceText;
// precess dependencies and "recurse"
const info = ts.preProcessFile(sourceText);
for (let ref of info.libReferenceDirectives) {
for (const ref of info.libReferenceDirectives) {
stack.push(ref.fileName);
}
}
@@ -226,6 +228,12 @@ function getColor(node) {
function setColor(node, color) {
node.$$$color = color;
}
function markNeededSourceFile(node) {
node.$$$neededSourceFile = true;
}
function isNeededSourceFile(node) {
return Boolean(node.$$$neededSourceFile);
}
function nodeOrParentIsBlack(node) {
while (node) {
const color = getColor(node);
@@ -351,6 +359,19 @@ function markNodes(ts, languageService, options) {
}
});
}
/**
* Return the parent of `node` which is an ImportDeclaration
*/
function findParentImportDeclaration(node) {
let _node = node;
do {
if (ts.isImportDeclaration(_node)) {
return _node;
}
_node = _node.parent;
} while (_node);
return null;
}
function enqueue_gray(node) {
if (nodeOrParentIsBlack(node) || getColor(node) === 1 /* Gray */) {
return;
@@ -418,6 +439,8 @@ function markNodes(ts, languageService, options) {
console.warn(`Cannot find source file ${filename}`);
return;
}
// This source file should survive even if it is empty
markNeededSourceFile(sourceFile);
enqueue_black(sourceFile);
}
function enqueueImport(node, importText) {
@@ -469,7 +492,11 @@ function markNodes(ts, languageService, options) {
const loop = (node) => {
const [symbol, symbolImportNode] = getRealNodeSymbol(ts, checker, node);
if (symbolImportNode) {
setColor(symbolImportNode, 2 /* Black */);
setColor(symbolImportNode, 2 /* NodeColor.Black */);
const importDeclarationNode = findParentImportDeclaration(symbolImportNode);
if (importDeclarationNode && ts.isStringLiteral(importDeclarationNode.moduleSpecifier)) {
enqueueImport(importDeclarationNode, importDeclarationNode.moduleSpecifier.text);
}
}
if (isSymbolWithDeclarations(symbol) && !nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol)) {
for (let i = 0, len = symbol.declarations.length; i < len; i++) { // {{SQL CARBON EDIT}} Compile fixes
@@ -503,7 +530,7 @@ function markNodes(ts, languageService, options) {
}
// queue the heritage clauses
if (declaration.heritageClauses) {
for (let heritageClause of declaration.heritageClauses) {
for (const heritageClause of declaration.heritageClauses) {
enqueue_black(heritageClause);
}
}
@@ -551,7 +578,7 @@ function generateResult(ts, languageService, shakeLevel) {
if (!program) {
throw new Error('Could not get program from language service');
}
let result = {};
const result = {};
const writeFile = (filePath, contents) => {
result[filePath] = contents;
};
@@ -567,7 +594,7 @@ function generateResult(ts, languageService, shakeLevel) {
}
return;
}
let text = sourceFile.text;
const text = sourceFile.text;
let result = '';
function keep(node) {
result += text.substring(node.pos, node.end);
@@ -597,7 +624,7 @@ function generateResult(ts, languageService, shakeLevel) {
}
}
else {
let survivingImports = [];
const survivingImports = [];
for (const importNode of node.importClause.namedBindings.elements) {
if (getColor(importNode) === 2 /* Black */) {
survivingImports.push(importNode.getFullText(sourceFile));
@@ -626,7 +653,7 @@ function generateResult(ts, languageService, shakeLevel) {
}
if (ts.isExportDeclaration(node)) {
if (node.exportClause && node.moduleSpecifier && ts.isNamedExports(node.exportClause)) {
let survivingExports = [];
const survivingExports = [];
for (const exportSpecifier of node.exportClause.elements) {
if (getColor(exportSpecifier) === 2 /* Black */) {
survivingExports.push(exportSpecifier.getFullText(sourceFile));
@@ -647,8 +674,8 @@ function generateResult(ts, languageService, shakeLevel) {
// keep method
continue;
}
let pos = member.pos - node.pos;
let end = member.end - node.pos;
const pos = member.pos - node.pos;
const end = member.end - node.pos;
toWrite = toWrite.substring(0, pos) + toWrite.substring(end);
}
return write(toWrite);
@@ -661,11 +688,23 @@ function generateResult(ts, languageService, shakeLevel) {
}
if (getColor(sourceFile) !== 2 /* Black */) {
if (!nodeOrChildIsBlack(sourceFile)) {
// none of the elements are reachable => don't write this file at all!
return;
// none of the elements are reachable
if (isNeededSourceFile(sourceFile)) {
// this source file must be written, even if nothing is used from it
// because there is an import somewhere for it.
// However, TS complains with empty files with the error "x" is not a module,
// so we will export a dummy variable
result = 'export const __dummy = 0;';
}
else {
// don't write this file at all!
return;
}
}
else {
sourceFile.forEachChild(writeMarkedNodes);
result += sourceFile.endOfFileToken.getFullText(sourceFile);
}
sourceFile.forEachChild(writeMarkedNodes);
result += sourceFile.endOfFileToken.getFullText(sourceFile);
}
else {
result = text;
@@ -839,3 +878,4 @@ function getTokenAtPosition(ts, sourceFile, position, allowPositionInLeadingTriv
return current;
}
}
//#endregion

View File

@@ -3,8 +3,6 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import * as path from 'path';
import type * as ts from 'typescript';
@@ -73,7 +71,7 @@ function printDiagnostics(options: ITreeShakingOptions, diagnostics: ReadonlyArr
result += `${path.join(options.sourcesRoot, diag.file.fileName)}`;
}
if (diag.file && diag.start) {
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
const location = diag.file.getLineAndCharacterOfPosition(diag.start);
result += `:${location.line + 1}:${location.character}`;
}
result += ` - ` + JSON.stringify(diag.messageText);
@@ -144,6 +142,8 @@ function discoverAndReadFiles(ts: typeof import('typescript'), options: ITreeSha
const queue: string[] = [];
const enqueue = (moduleId: string) => {
// To make the treeshaker work on windows...
moduleId = moduleId.replace(/\\/g, '/');
if (in_queue[moduleId]) {
return;
}
@@ -216,7 +216,7 @@ function processLibFiles(ts: typeof import('typescript'), options: ITreeShakingO
// precess dependencies and "recurse"
const info = ts.preProcessFile(sourceText);
for (let ref of info.libReferenceDirectives) {
for (const ref of info.libReferenceDirectives) {
stack.push(ref.fileName);
}
}
@@ -307,6 +307,12 @@ function getColor(node: ts.Node): NodeColor {
function setColor(node: ts.Node, color: NodeColor): void {
(<any>node).$$$color = color;
}
function markNeededSourceFile(node: ts.SourceFile): void {
(<any>node).$$$neededSourceFile = true;
}
function isNeededSourceFile(node: ts.SourceFile): boolean {
return Boolean((<any>node).$$$neededSourceFile);
}
function nodeOrParentIsBlack(node: ts.Node): boolean {
while (node) {
const color = getColor(node);
@@ -449,6 +455,20 @@ function markNodes(ts: typeof import('typescript'), languageService: ts.Language
});
}
/**
* Return the parent of `node` which is an ImportDeclaration
*/
function findParentImportDeclaration(node: ts.Declaration): ts.ImportDeclaration | null {
let _node: ts.Node = node;
do {
if (ts.isImportDeclaration(_node)) {
return _node;
}
_node = _node.parent;
} while (_node);
return null;
}
function enqueue_gray(node: ts.Node): void {
if (nodeOrParentIsBlack(node) || getColor(node) === NodeColor.Gray) {
return;
@@ -531,6 +551,8 @@ function markNodes(ts: typeof import('typescript'), languageService: ts.Language
console.warn(`Cannot find source file ${filename}`);
return;
}
// This source file should survive even if it is empty
markNeededSourceFile(sourceFile);
enqueue_black(sourceFile);
}
@@ -590,6 +612,10 @@ function markNodes(ts: typeof import('typescript'), languageService: ts.Language
const [symbol, symbolImportNode] = getRealNodeSymbol(ts, checker, node);
if (symbolImportNode) {
setColor(symbolImportNode, NodeColor.Black);
const importDeclarationNode = findParentImportDeclaration(symbolImportNode);
if (importDeclarationNode && ts.isStringLiteral(importDeclarationNode.moduleSpecifier)) {
enqueueImport(importDeclarationNode, importDeclarationNode.moduleSpecifier.text);
}
}
if (isSymbolWithDeclarations(symbol) && !nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol)) {
@@ -629,7 +655,7 @@ function markNodes(ts: typeof import('typescript'), languageService: ts.Language
// queue the heritage clauses
if (declaration.heritageClauses) {
for (let heritageClause of declaration.heritageClauses) {
for (const heritageClause of declaration.heritageClauses) {
enqueue_black(heritageClause);
}
}
@@ -682,7 +708,7 @@ function generateResult(ts: typeof import('typescript'), languageService: ts.Lan
throw new Error('Could not get program from language service');
}
let result: ITreeShakingResult = {};
const result: ITreeShakingResult = {};
const writeFile = (filePath: string, contents: string): void => {
result[filePath] = contents;
};
@@ -700,7 +726,7 @@ function generateResult(ts: typeof import('typescript'), languageService: ts.Lan
return;
}
let text = sourceFile.text;
const text = sourceFile.text;
let result = '';
function keep(node: ts.Node): void {
@@ -734,7 +760,7 @@ function generateResult(ts: typeof import('typescript'), languageService: ts.Lan
return keep(node);
}
} else {
let survivingImports: string[] = [];
const survivingImports: string[] = [];
for (const importNode of node.importClause.namedBindings.elements) {
if (getColor(importNode) === NodeColor.Black) {
survivingImports.push(importNode.getFullText(sourceFile));
@@ -762,7 +788,7 @@ function generateResult(ts: typeof import('typescript'), languageService: ts.Lan
if (ts.isExportDeclaration(node)) {
if (node.exportClause && node.moduleSpecifier && ts.isNamedExports(node.exportClause)) {
let survivingExports: string[] = [];
const survivingExports: string[] = [];
for (const exportSpecifier of node.exportClause.elements) {
if (getColor(exportSpecifier) === NodeColor.Black) {
survivingExports.push(exportSpecifier.getFullText(sourceFile));
@@ -785,8 +811,8 @@ function generateResult(ts: typeof import('typescript'), languageService: ts.Lan
continue;
}
let pos = member.pos - node.pos;
let end = member.end - node.pos;
const pos = member.pos - node.pos;
const end = member.end - node.pos;
toWrite = toWrite.substring(0, pos) + toWrite.substring(end);
}
return write(toWrite);
@@ -802,11 +828,21 @@ function generateResult(ts: typeof import('typescript'), languageService: ts.Lan
if (getColor(sourceFile) !== NodeColor.Black) {
if (!nodeOrChildIsBlack(sourceFile)) {
// none of the elements are reachable => don't write this file at all!
return;
// none of the elements are reachable
if (isNeededSourceFile(sourceFile)) {
// this source file must be written, even if nothing is used from it
// because there is an import somewhere for it.
// However, TS complains with empty files with the error "x" is not a module,
// so we will export a dummy variable
result = 'export const __dummy = 0;';
} else {
// don't write this file at all!
return;
}
} else {
sourceFile.forEachChild(writeMarkedNodes);
result += sourceFile.endOfFileToken.getFullText(sourceFile);
}
sourceFile.forEachChild(writeMarkedNodes);
result += sourceFile.endOfFileToken.getFullText(sourceFile);
} else {
result = text;
}

491
build/lib/tsb/builder.js Normal file
View File

@@ -0,0 +1,491 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.createTypeScriptBuilder = exports.CancellationToken = void 0;
const fs_1 = require("fs");
const path = require("path");
const crypto = require("crypto");
const utils = require("./utils");
const colors = require("ansi-colors");
const ts = require("typescript");
const Vinyl = require("vinyl");
var CancellationToken;
(function (CancellationToken) {
CancellationToken.None = {
isCancellationRequested() { return false; }
};
})(CancellationToken = exports.CancellationToken || (exports.CancellationToken = {}));
function normalize(path) {
return path.replace(/\\/g, '/');
}
function createTypeScriptBuilder(config, projectFile, cmd) {
const _log = config.logFn;
const host = new LanguageServiceHost(cmd, projectFile, _log);
const service = ts.createLanguageService(host, ts.createDocumentRegistry());
const lastBuildVersion = Object.create(null);
const lastDtsHash = Object.create(null);
const userWantsDeclarations = cmd.options.declaration;
let oldErrors = Object.create(null);
let headUsed = process.memoryUsage().heapUsed;
let emitSourceMapsInStream = true;
// always emit declaraction files
host.getCompilationSettings().declaration = true;
function file(file) {
// support gulp-sourcemaps
if (file.sourceMap) {
emitSourceMapsInStream = false;
}
if (!file.contents) {
host.removeScriptSnapshot(file.path);
}
else {
host.addScriptSnapshot(file.path, new VinylScriptSnapshot(file));
}
}
function baseFor(snapshot) {
if (snapshot instanceof VinylScriptSnapshot) {
return cmd.options.outDir || snapshot.getBase();
}
else {
return '';
}
}
function isExternalModule(sourceFile) {
return sourceFile.externalModuleIndicator
|| /declare\s+module\s+('|")(.+)\1/.test(sourceFile.getText());
}
function build(out, onError, token = CancellationToken.None) {
function checkSyntaxSoon(fileName) {
return new Promise(resolve => {
process.nextTick(function () {
if (!host.getScriptSnapshot(fileName, false)) {
resolve([]); // no script, no problems
}
else {
resolve(service.getSyntacticDiagnostics(fileName));
}
});
});
}
function checkSemanticsSoon(fileName) {
return new Promise(resolve => {
process.nextTick(function () {
if (!host.getScriptSnapshot(fileName, false)) {
resolve([]); // no script, no problems
}
else {
resolve(service.getSemanticDiagnostics(fileName));
}
});
});
}
function emitSoon(fileName) {
return new Promise(resolve => {
process.nextTick(function () {
if (/\.d\.ts$/.test(fileName)) {
// if it's already a d.ts file just emit it signature
const snapshot = host.getScriptSnapshot(fileName);
const signature = crypto.createHash('md5')
.update(snapshot.getText(0, snapshot.getLength()))
.digest('base64');
return resolve({
fileName,
signature,
files: []
});
}
const output = service.getEmitOutput(fileName);
const files = [];
let signature;
for (const file of output.outputFiles) {
if (!emitSourceMapsInStream && /\.js\.map$/.test(file.name)) {
continue;
}
if (/\.d\.ts$/.test(file.name)) {
signature = crypto.createHash('md5')
.update(file.text)
.digest('base64');
if (!userWantsDeclarations) {
// don't leak .d.ts files if users don't want them
continue;
}
}
const vinyl = new Vinyl({
path: file.name,
contents: Buffer.from(file.text),
base: !config._emitWithoutBasePath && baseFor(host.getScriptSnapshot(fileName)) || undefined
});
if (!emitSourceMapsInStream && /\.js$/.test(file.name)) {
const sourcemapFile = output.outputFiles.filter(f => /\.js\.map$/.test(f.name))[0];
if (sourcemapFile) {
const extname = path.extname(vinyl.relative);
const basename = path.basename(vinyl.relative, extname);
const dirname = path.dirname(vinyl.relative);
const tsname = (dirname === '.' ? '' : dirname + '/') + basename + '.ts';
const sourceMap = JSON.parse(sourcemapFile.text);
sourceMap.sources[0] = tsname.replace(/\\/g, '/');
vinyl.sourceMap = sourceMap;
}
}
files.push(vinyl);
}
resolve({
fileName,
signature,
files
});
});
});
}
const newErrors = Object.create(null);
const t1 = Date.now();
const toBeEmitted = [];
const toBeCheckedSyntactically = [];
const toBeCheckedSemantically = [];
const filesWithChangedSignature = [];
const dependentFiles = [];
const newLastBuildVersion = new Map();
for (const fileName of host.getScriptFileNames()) {
if (lastBuildVersion[fileName] !== host.getScriptVersion(fileName)) {
toBeEmitted.push(fileName);
toBeCheckedSyntactically.push(fileName);
toBeCheckedSemantically.push(fileName);
}
}
return new Promise(resolve => {
const semanticCheckInfo = new Map();
const seenAsDependentFile = new Set();
function workOnNext() {
let promise;
// let fileName: string;
// someone told us to stop this
if (token.isCancellationRequested()) {
_log('[CANCEL]', '>>This compile run was cancelled<<');
newLastBuildVersion.clear();
resolve();
return;
}
// (1st) emit code
else if (toBeEmitted.length) {
const fileName = toBeEmitted.pop();
promise = emitSoon(fileName).then(value => {
for (const file of value.files) {
_log('[emit code]', file.path);
out(file);
}
// remember when this was build
newLastBuildVersion.set(fileName, host.getScriptVersion(fileName));
// remeber the signature
if (value.signature && lastDtsHash[fileName] !== value.signature) {
lastDtsHash[fileName] = value.signature;
filesWithChangedSignature.push(fileName);
}
}).catch(e => {
// can't just skip this or make a result up..
host.error(`ERROR emitting ${fileName}`);
host.error(e);
});
}
// (2nd) check syntax
else if (toBeCheckedSyntactically.length) {
const fileName = toBeCheckedSyntactically.pop();
_log('[check syntax]', fileName);
promise = checkSyntaxSoon(fileName).then(diagnostics => {
delete oldErrors[fileName];
if (diagnostics.length > 0) {
diagnostics.forEach(d => onError(d));
newErrors[fileName] = diagnostics;
// stop the world when there are syntax errors
toBeCheckedSyntactically.length = 0;
toBeCheckedSemantically.length = 0;
filesWithChangedSignature.length = 0;
}
});
}
// (3rd) check semantics
else if (toBeCheckedSemantically.length) {
let fileName = toBeCheckedSemantically.pop();
while (fileName && semanticCheckInfo.has(fileName)) {
fileName = toBeCheckedSemantically.pop();
}
if (fileName) {
_log('[check semantics]', fileName);
promise = checkSemanticsSoon(fileName).then(diagnostics => {
delete oldErrors[fileName];
semanticCheckInfo.set(fileName, diagnostics.length);
if (diagnostics.length > 0) {
diagnostics.forEach(d => onError(d));
newErrors[fileName] = diagnostics;
}
});
}
}
// (4th) check dependents
else if (filesWithChangedSignature.length) {
while (filesWithChangedSignature.length) {
const fileName = filesWithChangedSignature.pop();
if (!isExternalModule(service.getProgram().getSourceFile(fileName))) {
_log('[check semantics*]', fileName + ' is an internal module and it has changed shape -> check whatever hasn\'t been checked yet');
toBeCheckedSemantically.push(...host.getScriptFileNames());
filesWithChangedSignature.length = 0;
dependentFiles.length = 0;
break;
}
host.collectDependents(fileName, dependentFiles);
}
}
// (5th) dependents contd
else if (dependentFiles.length) {
let fileName = dependentFiles.pop();
while (fileName && seenAsDependentFile.has(fileName)) {
fileName = dependentFiles.pop();
}
if (fileName) {
seenAsDependentFile.add(fileName);
const value = semanticCheckInfo.get(fileName);
if (value === 0) {
// already validated successfully -> look at dependents next
host.collectDependents(fileName, dependentFiles);
}
else if (typeof value === 'undefined') {
// first validate -> look at dependents next
dependentFiles.push(fileName);
toBeCheckedSemantically.push(fileName);
}
}
}
// (last) done
else {
resolve();
return;
}
if (!promise) {
promise = Promise.resolve();
}
promise.then(function () {
// change to change
process.nextTick(workOnNext);
}).catch(err => {
console.error(err);
});
}
workOnNext();
}).then(() => {
// store the build versions to not rebuilt the next time
newLastBuildVersion.forEach((value, key) => {
lastBuildVersion[key] = value;
});
// print old errors and keep them
utils.collections.forEach(oldErrors, entry => {
entry.value.forEach(diag => onError(diag));
newErrors[entry.key] = entry.value;
});
oldErrors = newErrors;
// print stats
const headNow = process.memoryUsage().heapUsed;
const MB = 1024 * 1024;
_log('[tsb]', `time: ${colors.yellow((Date.now() - t1) + 'ms')} + \nmem: ${colors.cyan(Math.ceil(headNow / MB) + 'MB')} ${colors.bgCyan('delta: ' + Math.ceil((headNow - headUsed) / MB))}`);
headUsed = headNow;
});
}
return {
file,
build,
languageService: service
};
}
exports.createTypeScriptBuilder = createTypeScriptBuilder;
class ScriptSnapshot {
constructor(text, mtime) {
this._text = text;
this._mtime = mtime;
}
getVersion() {
return this._mtime.toUTCString();
}
getText(start, end) {
return this._text.substring(start, end);
}
getLength() {
return this._text.length;
}
getChangeRange(_oldSnapshot) {
return undefined;
}
}
class VinylScriptSnapshot extends ScriptSnapshot {
constructor(file) {
super(file.contents.toString(), file.stat.mtime);
this._base = file.base;
}
getBase() {
return this._base;
}
}
class LanguageServiceHost {
constructor(_cmdLine, _projectPath, _log) {
this._cmdLine = _cmdLine;
this._projectPath = _projectPath;
this._log = _log;
this.directoryExists = ts.sys.directoryExists;
this.getDirectories = ts.sys.getDirectories;
this.fileExists = ts.sys.fileExists;
this.readFile = ts.sys.readFile;
this.readDirectory = ts.sys.readDirectory;
this._snapshots = Object.create(null);
this._filesInProject = new Set(_cmdLine.fileNames);
this._filesAdded = new Set();
this._dependencies = new utils.graph.Graph(s => s);
this._dependenciesRecomputeList = [];
this._fileNameToDeclaredModule = Object.create(null);
this._projectVersion = 1;
}
log(_s) {
// console.log(s);
}
trace(_s) {
// console.log(s);
}
error(s) {
console.error(s);
}
getCompilationSettings() {
return this._cmdLine.options;
}
getProjectVersion() {
return String(this._projectVersion);
}
getScriptFileNames() {
const res = Object.keys(this._snapshots).filter(path => this._filesInProject.has(path) || this._filesAdded.has(path));
return res;
}
getScriptVersion(filename) {
filename = normalize(filename);
const result = this._snapshots[filename];
if (result) {
return result.getVersion();
}
return 'UNKNWON_FILE_' + Math.random().toString(16).slice(2);
}
getScriptSnapshot(filename, resolve = true) {
filename = normalize(filename);
let result = this._snapshots[filename];
if (!result && resolve) {
try {
result = new VinylScriptSnapshot(new Vinyl({
path: filename,
contents: (0, fs_1.readFileSync)(filename),
base: this.getCompilationSettings().outDir,
stat: (0, fs_1.statSync)(filename)
}));
this.addScriptSnapshot(filename, result);
}
catch (e) {
// ignore
}
}
return result;
}
addScriptSnapshot(filename, snapshot) {
this._projectVersion++;
filename = normalize(filename);
const old = this._snapshots[filename];
if (!old && !this._filesInProject.has(filename) && !filename.endsWith('.d.ts')) {
// ^^^^^^^^^^^^^^^^^^^^^^^^^^
// not very proper!
this._filesAdded.add(filename);
}
if (!old || old.getVersion() !== snapshot.getVersion()) {
this._dependenciesRecomputeList.push(filename);
const node = this._dependencies.lookup(filename);
if (node) {
node.outgoing = Object.create(null);
}
// (cheap) check for declare module
LanguageServiceHost._declareModule.lastIndex = 0;
let match;
while ((match = LanguageServiceHost._declareModule.exec(snapshot.getText(0, snapshot.getLength())))) {
let declaredModules = this._fileNameToDeclaredModule[filename];
if (!declaredModules) {
this._fileNameToDeclaredModule[filename] = declaredModules = [];
}
declaredModules.push(match[2]);
}
}
this._snapshots[filename] = snapshot;
return old;
}
removeScriptSnapshot(filename) {
this._filesInProject.delete(filename);
this._filesAdded.delete(filename);
this._projectVersion++;
filename = normalize(filename);
delete this._fileNameToDeclaredModule[filename];
return delete this._snapshots[filename];
}
getCurrentDirectory() {
return path.dirname(this._projectPath);
}
getDefaultLibFileName(options) {
return ts.getDefaultLibFilePath(options);
}
// ---- dependency management
collectDependents(filename, target) {
while (this._dependenciesRecomputeList.length) {
this._processFile(this._dependenciesRecomputeList.pop());
}
filename = normalize(filename);
const node = this._dependencies.lookup(filename);
if (node) {
utils.collections.forEach(node.incoming, entry => target.push(entry.key));
}
}
_processFile(filename) {
if (filename.match(/.*\.d\.ts$/)) {
return;
}
filename = normalize(filename);
const snapshot = this.getScriptSnapshot(filename);
if (!snapshot) {
this._log('processFile', `Missing snapshot for: ${filename}`);
return;
}
const info = ts.preProcessFile(snapshot.getText(0, snapshot.getLength()), true);
// (1) ///-references
info.referencedFiles.forEach(ref => {
const resolvedPath = path.resolve(path.dirname(filename), ref.fileName);
const normalizedPath = normalize(resolvedPath);
this._dependencies.inertEdge(filename, normalizedPath);
});
// (2) import-require statements
info.importedFiles.forEach(ref => {
const stopDirname = normalize(this.getCurrentDirectory());
let dirname = filename;
let found = false;
while (!found && dirname.indexOf(stopDirname) === 0) {
dirname = path.dirname(dirname);
const resolvedPath = path.resolve(dirname, ref.fileName);
const normalizedPath = normalize(resolvedPath);
if (this.getScriptSnapshot(normalizedPath + '.ts')) {
this._dependencies.inertEdge(filename, normalizedPath + '.ts');
found = true;
}
else if (this.getScriptSnapshot(normalizedPath + '.d.ts')) {
this._dependencies.inertEdge(filename, normalizedPath + '.d.ts');
found = true;
}
}
if (!found) {
for (const key in this._fileNameToDeclaredModule) {
if (this._fileNameToDeclaredModule[key] && ~this._fileNameToDeclaredModule[key].indexOf(ref.fileName)) {
this._dependencies.inertEdge(filename, key);
}
}
}
});
}
}
LanguageServiceHost._declareModule = /declare\s+module\s+('|")(.+)\1/g;

608
build/lib/tsb/builder.ts Normal file
View File

@@ -0,0 +1,608 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { statSync, readFileSync } from 'fs';
import * as path from 'path';
import * as crypto from 'crypto';
import * as utils from './utils';
import * as colors from 'ansi-colors';
import * as ts from 'typescript';
import * as Vinyl from 'vinyl';
export interface IConfiguration {
logFn: (topic: string, message: string) => void;
_emitWithoutBasePath?: boolean;
}
export interface CancellationToken {
isCancellationRequested(): boolean;
}
export namespace CancellationToken {
export const None: CancellationToken = {
isCancellationRequested() { return false; }
};
}
export interface ITypeScriptBuilder {
build(out: (file: Vinyl) => void, onError: (err: ts.Diagnostic) => void, token?: CancellationToken): Promise<any>;
file(file: Vinyl): void;
languageService: ts.LanguageService;
}
function normalize(path: string): string {
return path.replace(/\\/g, '/');
}
export function createTypeScriptBuilder(config: IConfiguration, projectFile: string, cmd: ts.ParsedCommandLine): ITypeScriptBuilder {
const _log = config.logFn;
const host = new LanguageServiceHost(cmd, projectFile, _log);
const service = ts.createLanguageService(host, ts.createDocumentRegistry());
const lastBuildVersion: { [path: string]: string } = Object.create(null);
const lastDtsHash: { [path: string]: string } = Object.create(null);
const userWantsDeclarations = cmd.options.declaration;
let oldErrors: { [path: string]: ts.Diagnostic[] } = Object.create(null);
let headUsed = process.memoryUsage().heapUsed;
let emitSourceMapsInStream = true;
// always emit declaraction files
host.getCompilationSettings().declaration = true;
function file(file: Vinyl): void {
// support gulp-sourcemaps
if ((<any>file).sourceMap) {
emitSourceMapsInStream = false;
}
if (!file.contents) {
host.removeScriptSnapshot(file.path);
} else {
host.addScriptSnapshot(file.path, new VinylScriptSnapshot(file));
}
}
function baseFor(snapshot: ScriptSnapshot): string {
if (snapshot instanceof VinylScriptSnapshot) {
return cmd.options.outDir || snapshot.getBase();
} else {
return '';
}
}
function isExternalModule(sourceFile: ts.SourceFile): boolean {
return (<any>sourceFile).externalModuleIndicator
|| /declare\s+module\s+('|")(.+)\1/.test(sourceFile.getText());
}
function build(out: (file: Vinyl) => void, onError: (err: any) => void, token = CancellationToken.None): Promise<any> {
function checkSyntaxSoon(fileName: string): Promise<ts.Diagnostic[]> {
return new Promise<ts.Diagnostic[]>(resolve => {
process.nextTick(function () {
if (!host.getScriptSnapshot(fileName, false)) {
resolve([]); // no script, no problems
} else {
resolve(service.getSyntacticDiagnostics(fileName));
}
});
});
}
function checkSemanticsSoon(fileName: string): Promise<ts.Diagnostic[]> {
return new Promise<ts.Diagnostic[]>(resolve => {
process.nextTick(function () {
if (!host.getScriptSnapshot(fileName, false)) {
resolve([]); // no script, no problems
} else {
resolve(service.getSemanticDiagnostics(fileName));
}
});
});
}
function emitSoon(fileName: string): Promise<{ fileName: string; signature?: string; files: Vinyl[] }> {
return new Promise(resolve => {
process.nextTick(function () {
if (/\.d\.ts$/.test(fileName)) {
// if it's already a d.ts file just emit it signature
const snapshot = host.getScriptSnapshot(fileName);
const signature = crypto.createHash('md5')
.update(snapshot.getText(0, snapshot.getLength()))
.digest('base64');
return resolve({
fileName,
signature,
files: []
});
}
const output = service.getEmitOutput(fileName);
const files: Vinyl[] = [];
let signature: string | undefined;
for (const file of output.outputFiles) {
if (!emitSourceMapsInStream && /\.js\.map$/.test(file.name)) {
continue;
}
if (/\.d\.ts$/.test(file.name)) {
signature = crypto.createHash('md5')
.update(file.text)
.digest('base64');
if (!userWantsDeclarations) {
// don't leak .d.ts files if users don't want them
continue;
}
}
const vinyl = new Vinyl({
path: file.name,
contents: Buffer.from(file.text),
base: !config._emitWithoutBasePath && baseFor(host.getScriptSnapshot(fileName)) || undefined
});
if (!emitSourceMapsInStream && /\.js$/.test(file.name)) {
const sourcemapFile = output.outputFiles.filter(f => /\.js\.map$/.test(f.name))[0];
if (sourcemapFile) {
const extname = path.extname(vinyl.relative);
const basename = path.basename(vinyl.relative, extname);
const dirname = path.dirname(vinyl.relative);
const tsname = (dirname === '.' ? '' : dirname + '/') + basename + '.ts';
const sourceMap = JSON.parse(sourcemapFile.text);
sourceMap.sources[0] = tsname.replace(/\\/g, '/');
(<any>vinyl).sourceMap = sourceMap;
}
}
files.push(vinyl);
}
resolve({
fileName,
signature,
files
});
});
});
}
const newErrors: { [path: string]: ts.Diagnostic[] } = Object.create(null);
const t1 = Date.now();
const toBeEmitted: string[] = [];
const toBeCheckedSyntactically: string[] = [];
const toBeCheckedSemantically: string[] = [];
const filesWithChangedSignature: string[] = [];
const dependentFiles: string[] = [];
const newLastBuildVersion = new Map<string, string>();
for (const fileName of host.getScriptFileNames()) {
if (lastBuildVersion[fileName] !== host.getScriptVersion(fileName)) {
toBeEmitted.push(fileName);
toBeCheckedSyntactically.push(fileName);
toBeCheckedSemantically.push(fileName);
}
}
return new Promise<void>(resolve => {
const semanticCheckInfo = new Map<string, number>();
const seenAsDependentFile = new Set<string>();
function workOnNext() {
let promise: Promise<any> | undefined;
// let fileName: string;
// someone told us to stop this
if (token.isCancellationRequested()) {
_log('[CANCEL]', '>>This compile run was cancelled<<');
newLastBuildVersion.clear();
resolve();
return;
}
// (1st) emit code
else if (toBeEmitted.length) {
const fileName = toBeEmitted.pop()!;
promise = emitSoon(fileName).then(value => {
for (const file of value.files) {
_log('[emit code]', file.path);
out(file);
}
// remember when this was build
newLastBuildVersion.set(fileName, host.getScriptVersion(fileName));
// remeber the signature
if (value.signature && lastDtsHash[fileName] !== value.signature) {
lastDtsHash[fileName] = value.signature;
filesWithChangedSignature.push(fileName);
}
}).catch(e => {
// can't just skip this or make a result up..
host.error(`ERROR emitting ${fileName}`);
host.error(e);
});
}
// (2nd) check syntax
else if (toBeCheckedSyntactically.length) {
const fileName = toBeCheckedSyntactically.pop()!;
_log('[check syntax]', fileName);
promise = checkSyntaxSoon(fileName).then(diagnostics => {
delete oldErrors[fileName];
if (diagnostics.length > 0) {
diagnostics.forEach(d => onError(d));
newErrors[fileName] = diagnostics;
// stop the world when there are syntax errors
toBeCheckedSyntactically.length = 0;
toBeCheckedSemantically.length = 0;
filesWithChangedSignature.length = 0;
}
});
}
// (3rd) check semantics
else if (toBeCheckedSemantically.length) {
let fileName = toBeCheckedSemantically.pop();
while (fileName && semanticCheckInfo.has(fileName)) {
fileName = toBeCheckedSemantically.pop()!;
}
if (fileName) {
_log('[check semantics]', fileName);
promise = checkSemanticsSoon(fileName).then(diagnostics => {
delete oldErrors[fileName!];
semanticCheckInfo.set(fileName!, diagnostics.length);
if (diagnostics.length > 0) {
diagnostics.forEach(d => onError(d));
newErrors[fileName!] = diagnostics;
}
});
}
}
// (4th) check dependents
else if (filesWithChangedSignature.length) {
while (filesWithChangedSignature.length) {
const fileName = filesWithChangedSignature.pop()!;
if (!isExternalModule(service.getProgram()!.getSourceFile(fileName)!)) {
_log('[check semantics*]', fileName + ' is an internal module and it has changed shape -> check whatever hasn\'t been checked yet');
toBeCheckedSemantically.push(...host.getScriptFileNames());
filesWithChangedSignature.length = 0;
dependentFiles.length = 0;
break;
}
host.collectDependents(fileName, dependentFiles);
}
}
// (5th) dependents contd
else if (dependentFiles.length) {
let fileName = dependentFiles.pop();
while (fileName && seenAsDependentFile.has(fileName)) {
fileName = dependentFiles.pop();
}
if (fileName) {
seenAsDependentFile.add(fileName);
const value = semanticCheckInfo.get(fileName);
if (value === 0) {
// already validated successfully -> look at dependents next
host.collectDependents(fileName, dependentFiles);
} else if (typeof value === 'undefined') {
// first validate -> look at dependents next
dependentFiles.push(fileName);
toBeCheckedSemantically.push(fileName);
}
}
}
// (last) done
else {
resolve();
return;
}
if (!promise) {
promise = Promise.resolve();
}
promise.then(function () {
// change to change
process.nextTick(workOnNext);
}).catch(err => {
console.error(err);
});
}
workOnNext();
}).then(() => {
// store the build versions to not rebuilt the next time
newLastBuildVersion.forEach((value, key) => {
lastBuildVersion[key] = value;
});
// print old errors and keep them
utils.collections.forEach(oldErrors, entry => {
entry.value.forEach(diag => onError(diag));
newErrors[entry.key] = entry.value;
});
oldErrors = newErrors;
// print stats
const headNow = process.memoryUsage().heapUsed;
const MB = 1024 * 1024;
_log(
'[tsb]',
`time: ${colors.yellow((Date.now() - t1) + 'ms')} + \nmem: ${colors.cyan(Math.ceil(headNow / MB) + 'MB')} ${colors.bgCyan('delta: ' + Math.ceil((headNow - headUsed) / MB))}`
);
headUsed = headNow;
});
}
return {
file,
build,
languageService: service
};
}
class ScriptSnapshot implements ts.IScriptSnapshot {
private readonly _text: string;
private readonly _mtime: Date;
constructor(text: string, mtime: Date) {
this._text = text;
this._mtime = mtime;
}
getVersion(): string {
return this._mtime.toUTCString();
}
getText(start: number, end: number): string {
return this._text.substring(start, end);
}
getLength(): number {
return this._text.length;
}
getChangeRange(_oldSnapshot: ts.IScriptSnapshot): ts.TextChangeRange | undefined {
return undefined;
}
}
class VinylScriptSnapshot extends ScriptSnapshot {
private readonly _base: string;
constructor(file: Vinyl) {
super(file.contents!.toString(), file.stat!.mtime);
this._base = file.base;
}
getBase(): string {
return this._base;
}
}
class LanguageServiceHost implements ts.LanguageServiceHost {
private readonly _snapshots: { [path: string]: ScriptSnapshot };
private readonly _filesInProject: Set<string>;
private readonly _filesAdded: Set<string>;
private readonly _dependencies: utils.graph.Graph<string>;
private readonly _dependenciesRecomputeList: string[];
private readonly _fileNameToDeclaredModule: { [path: string]: string[] };
private _projectVersion: number;
constructor(
private readonly _cmdLine: ts.ParsedCommandLine,
private readonly _projectPath: string,
private readonly _log: (topic: string, message: string) => void
) {
this._snapshots = Object.create(null);
this._filesInProject = new Set(_cmdLine.fileNames);
this._filesAdded = new Set();
this._dependencies = new utils.graph.Graph<string>(s => s);
this._dependenciesRecomputeList = [];
this._fileNameToDeclaredModule = Object.create(null);
this._projectVersion = 1;
}
log(_s: string): void {
// console.log(s);
}
trace(_s: string): void {
// console.log(s);
}
error(s: string): void {
console.error(s);
}
getCompilationSettings(): ts.CompilerOptions {
return this._cmdLine.options;
}
getProjectVersion(): string {
return String(this._projectVersion);
}
getScriptFileNames(): string[] {
const res = Object.keys(this._snapshots).filter(path => this._filesInProject.has(path) || this._filesAdded.has(path));
return res;
}
getScriptVersion(filename: string): string {
filename = normalize(filename);
const result = this._snapshots[filename];
if (result) {
return result.getVersion();
}
return 'UNKNWON_FILE_' + Math.random().toString(16).slice(2);
}
getScriptSnapshot(filename: string, resolve: boolean = true): ScriptSnapshot {
filename = normalize(filename);
let result = this._snapshots[filename];
if (!result && resolve) {
try {
result = new VinylScriptSnapshot(new Vinyl(<any>{
path: filename,
contents: readFileSync(filename),
base: this.getCompilationSettings().outDir,
stat: statSync(filename)
}));
this.addScriptSnapshot(filename, result);
} catch (e) {
// ignore
}
}
return result;
}
private static _declareModule = /declare\s+module\s+('|")(.+)\1/g;
addScriptSnapshot(filename: string, snapshot: ScriptSnapshot): ScriptSnapshot {
this._projectVersion++;
filename = normalize(filename);
const old = this._snapshots[filename];
if (!old && !this._filesInProject.has(filename) && !filename.endsWith('.d.ts')) {
// ^^^^^^^^^^^^^^^^^^^^^^^^^^
// not very proper!
this._filesAdded.add(filename);
}
if (!old || old.getVersion() !== snapshot.getVersion()) {
this._dependenciesRecomputeList.push(filename);
const node = this._dependencies.lookup(filename);
if (node) {
node.outgoing = Object.create(null);
}
// (cheap) check for declare module
LanguageServiceHost._declareModule.lastIndex = 0;
let match: RegExpExecArray | null | undefined;
while ((match = LanguageServiceHost._declareModule.exec(snapshot.getText(0, snapshot.getLength())))) {
let declaredModules = this._fileNameToDeclaredModule[filename];
if (!declaredModules) {
this._fileNameToDeclaredModule[filename] = declaredModules = [];
}
declaredModules.push(match[2]);
}
}
this._snapshots[filename] = snapshot;
return old;
}
removeScriptSnapshot(filename: string): boolean {
this._filesInProject.delete(filename);
this._filesAdded.delete(filename);
this._projectVersion++;
filename = normalize(filename);
delete this._fileNameToDeclaredModule[filename];
return delete this._snapshots[filename];
}
getCurrentDirectory(): string {
return path.dirname(this._projectPath);
}
getDefaultLibFileName(options: ts.CompilerOptions): string {
return ts.getDefaultLibFilePath(options);
}
readonly directoryExists = ts.sys.directoryExists;
readonly getDirectories = ts.sys.getDirectories;
readonly fileExists = ts.sys.fileExists;
readonly readFile = ts.sys.readFile;
readonly readDirectory = ts.sys.readDirectory;
// ---- dependency management
collectDependents(filename: string, target: string[]): void {
while (this._dependenciesRecomputeList.length) {
this._processFile(this._dependenciesRecomputeList.pop()!);
}
filename = normalize(filename);
const node = this._dependencies.lookup(filename);
if (node) {
utils.collections.forEach(node.incoming, entry => target.push(entry.key));
}
}
_processFile(filename: string): void {
if (filename.match(/.*\.d\.ts$/)) {
return;
}
filename = normalize(filename);
const snapshot = this.getScriptSnapshot(filename);
if (!snapshot) {
this._log('processFile', `Missing snapshot for: ${filename}`);
return;
}
const info = ts.preProcessFile(snapshot.getText(0, snapshot.getLength()), true);
// (1) ///-references
info.referencedFiles.forEach(ref => {
const resolvedPath = path.resolve(path.dirname(filename), ref.fileName);
const normalizedPath = normalize(resolvedPath);
this._dependencies.inertEdge(filename, normalizedPath);
});
// (2) import-require statements
info.importedFiles.forEach(ref => {
const stopDirname = normalize(this.getCurrentDirectory());
let dirname = filename;
let found = false;
while (!found && dirname.indexOf(stopDirname) === 0) {
dirname = path.dirname(dirname);
const resolvedPath = path.resolve(dirname, ref.fileName);
const normalizedPath = normalize(resolvedPath);
if (this.getScriptSnapshot(normalizedPath + '.ts')) {
this._dependencies.inertEdge(filename, normalizedPath + '.ts');
found = true;
} else if (this.getScriptSnapshot(normalizedPath + '.d.ts')) {
this._dependencies.inertEdge(filename, normalizedPath + '.d.ts');
found = true;
}
}
if (!found) {
for (const key in this._fileNameToDeclaredModule) {
if (this._fileNameToDeclaredModule[key] && ~this._fileNameToDeclaredModule[key].indexOf(ref.fileName)) {
this._dependencies.inertEdge(filename, key);
}
}
}
});
}
}

130
build/lib/tsb/index.js Normal file
View File

@@ -0,0 +1,130 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.create = void 0;
const Vinyl = require("vinyl");
const through = require("through");
const builder = require("./builder");
const ts = require("typescript");
const stream_1 = require("stream");
const path_1 = require("path");
const utils_1 = require("./utils");
const fs_1 = require("fs");
const log = require("fancy-log");
const colors = require("ansi-colors");
const transpiler_1 = require("./transpiler");
class EmptyDuplex extends stream_1.Duplex {
_write(_chunk, _encoding, callback) { callback(); }
_read() { this.push(null); }
}
function createNullCompiler() {
const result = function () { return new EmptyDuplex(); };
result.src = () => new EmptyDuplex();
return result;
}
const _defaultOnError = (err) => console.log(JSON.stringify(err, null, 4));
function create(projectPath, existingOptions, config, onError = _defaultOnError) {
function printDiagnostic(diag) {
if (!diag.file || !diag.start) {
onError(ts.flattenDiagnosticMessageText(diag.messageText, '\n'));
}
else {
const lineAndCh = diag.file.getLineAndCharacterOfPosition(diag.start);
onError(utils_1.strings.format('{0}({1},{2}): {3}', diag.file.fileName, lineAndCh.line + 1, lineAndCh.character + 1, ts.flattenDiagnosticMessageText(diag.messageText, '\n')));
}
}
const parsed = ts.readConfigFile(projectPath, ts.sys.readFile);
if (parsed.error) {
printDiagnostic(parsed.error);
return createNullCompiler();
}
const cmdLine = ts.parseJsonConfigFileContent(parsed.config, ts.sys, (0, path_1.dirname)(projectPath), existingOptions);
if (cmdLine.errors.length > 0) {
cmdLine.errors.forEach(printDiagnostic);
return createNullCompiler();
}
function logFn(topic, message) {
if (config.verbose) {
log(colors.cyan(topic), message);
}
}
// FULL COMPILE stream doing transpile, syntax and semantic diagnostics
function createCompileStream(builder, token) {
return through(function (file) {
// give the file to the compiler
if (file.isStream()) {
this.emit('error', 'no support for streams');
return;
}
builder.file(file);
}, function () {
// start the compilation process
builder.build(file => this.queue(file), printDiagnostic, token).catch(e => console.error(e)).then(() => this.queue(null));
});
}
// TRANSPILE ONLY stream doing just TS to JS conversion
function createTranspileStream(transpiler) {
return through(function (file) {
// give the file to the compiler
if (file.isStream()) {
this.emit('error', 'no support for streams');
return;
}
if (!file.contents) {
return;
}
if (!config.transpileOnlyIncludesDts && file.path.endsWith('.d.ts')) {
return;
}
if (!transpiler.onOutfile) {
transpiler.onOutfile = file => this.queue(file);
}
transpiler.transpile(file);
}, function () {
transpiler.join().then(() => {
this.queue(null);
transpiler.onOutfile = undefined;
});
});
}
let result;
if (config.transpileOnly) {
const transpiler = new transpiler_1.Transpiler(logFn, printDiagnostic, projectPath, cmdLine);
result = (() => createTranspileStream(transpiler));
}
else {
const _builder = builder.createTypeScriptBuilder({ logFn }, projectPath, cmdLine);
result = ((token) => createCompileStream(_builder, token));
}
result.src = (opts) => {
let _pos = 0;
const _fileNames = cmdLine.fileNames.slice(0);
return new class extends stream_1.Readable {
constructor() {
super({ objectMode: true });
}
_read() {
let more = true;
let path;
for (; more && _pos < _fileNames.length; _pos++) {
path = _fileNames[_pos];
more = this.push(new Vinyl({
path,
contents: (0, fs_1.readFileSync)(path),
stat: (0, fs_1.statSync)(path),
cwd: opts && opts.cwd,
base: opts && opts.base || (0, path_1.dirname)(projectPath)
}));
}
if (_pos >= _fileNames.length) {
this.push(null);
}
}
};
};
return result;
}
exports.create = create;

164
build/lib/tsb/index.ts Normal file
View File

@@ -0,0 +1,164 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as Vinyl from 'vinyl';
import * as through from 'through';
import * as builder from './builder';
import * as ts from 'typescript';
import { Readable, Writable, Duplex } from 'stream';
import { dirname } from 'path';
import { strings } from './utils';
import { readFileSync, statSync } from 'fs';
import * as log from 'fancy-log';
import colors = require('ansi-colors');
import { Transpiler } from './transpiler';
export interface IncrementalCompiler {
(token?: any): Readable & Writable;
src(opts?: { cwd?: string; base?: string }): Readable;
}
class EmptyDuplex extends Duplex {
_write(_chunk: any, _encoding: string, callback: (err?: Error) => void): void { callback(); }
_read() { this.push(null); }
}
function createNullCompiler(): IncrementalCompiler {
const result: IncrementalCompiler = function () { return new EmptyDuplex(); };
result.src = () => new EmptyDuplex();
return result;
}
const _defaultOnError = (err: string) => console.log(JSON.stringify(err, null, 4));
export function create(
projectPath: string,
existingOptions: Partial<ts.CompilerOptions>,
config: { verbose?: boolean; transpileOnly?: boolean; transpileOnlyIncludesDts?: boolean },
onError: (message: string) => void = _defaultOnError
): IncrementalCompiler {
function printDiagnostic(diag: ts.Diagnostic): void {
if (!diag.file || !diag.start) {
onError(ts.flattenDiagnosticMessageText(diag.messageText, '\n'));
} else {
const lineAndCh = diag.file.getLineAndCharacterOfPosition(diag.start);
onError(strings.format('{0}({1},{2}): {3}',
diag.file.fileName,
lineAndCh.line + 1,
lineAndCh.character + 1,
ts.flattenDiagnosticMessageText(diag.messageText, '\n'))
);
}
}
const parsed = ts.readConfigFile(projectPath, ts.sys.readFile);
if (parsed.error) {
printDiagnostic(parsed.error);
return createNullCompiler();
}
const cmdLine = ts.parseJsonConfigFileContent(parsed.config, ts.sys, dirname(projectPath), existingOptions);
if (cmdLine.errors.length > 0) {
cmdLine.errors.forEach(printDiagnostic);
return createNullCompiler();
}
function logFn(topic: string, message: string): void {
if (config.verbose) {
log(colors.cyan(topic), message);
}
}
// FULL COMPILE stream doing transpile, syntax and semantic diagnostics
function createCompileStream(builder: builder.ITypeScriptBuilder, token?: builder.CancellationToken): Readable & Writable {
return through(function (this: through.ThroughStream, file: Vinyl) {
// give the file to the compiler
if (file.isStream()) {
this.emit('error', 'no support for streams');
return;
}
builder.file(file);
}, function (this: { queue(a: any): void }) {
// start the compilation process
builder.build(
file => this.queue(file),
printDiagnostic,
token
).catch(e => console.error(e)).then(() => this.queue(null));
});
}
// TRANSPILE ONLY stream doing just TS to JS conversion
function createTranspileStream(transpiler: Transpiler): Readable & Writable {
return through(function (this: through.ThroughStream & { queue(a: any): void }, file: Vinyl) {
// give the file to the compiler
if (file.isStream()) {
this.emit('error', 'no support for streams');
return;
}
if (!file.contents) {
return;
}
if (!config.transpileOnlyIncludesDts && file.path.endsWith('.d.ts')) {
return;
}
if (!transpiler.onOutfile) {
transpiler.onOutfile = file => this.queue(file);
}
transpiler.transpile(file);
}, function (this: { queue(a: any): void }) {
transpiler.join().then(() => {
this.queue(null);
transpiler.onOutfile = undefined;
});
});
}
let result: IncrementalCompiler;
if (config.transpileOnly) {
const transpiler = new Transpiler(logFn, printDiagnostic, projectPath, cmdLine);
result = <any>(() => createTranspileStream(transpiler));
} else {
const _builder = builder.createTypeScriptBuilder({ logFn }, projectPath, cmdLine);
result = <any>((token: builder.CancellationToken) => createCompileStream(_builder, token));
}
result.src = (opts?: { cwd?: string; base?: string }) => {
let _pos = 0;
const _fileNames = cmdLine.fileNames.slice(0);
return new class extends Readable {
constructor() {
super({ objectMode: true });
}
_read() {
let more: boolean = true;
let path: string;
for (; more && _pos < _fileNames.length; _pos++) {
path = _fileNames[_pos];
more = this.push(new Vinyl({
path,
contents: readFileSync(path),
stat: statSync(path),
cwd: opts && opts.cwd,
base: opts && opts.base || dirname(projectPath)
}));
}
if (_pos >= _fileNames.length) {
this.push(null);
}
}
};
};
return <IncrementalCompiler>result;
}

220
build/lib/tsb/transpiler.js Normal file
View File

@@ -0,0 +1,220 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.Transpiler = void 0;
const ts = require("typescript");
const threads = require("node:worker_threads");
const Vinyl = require("vinyl");
const node_os_1 = require("node:os");
function transpile(tsSrc, options) {
const isAmd = /\n(import|export)/m.test(tsSrc);
if (!isAmd && options.compilerOptions?.module === ts.ModuleKind.AMD) {
// enforce NONE module-system for not-amd cases
options = { ...options, ...{ compilerOptions: { ...options.compilerOptions, module: ts.ModuleKind.None } } };
}
const out = ts.transpileModule(tsSrc, options);
return {
jsSrc: out.outputText,
diag: out.diagnostics ?? []
};
}
if (!threads.isMainThread) {
// WORKER
threads.parentPort?.addListener('message', (req) => {
const res = {
jsSrcs: [],
diagnostics: []
};
for (const tsSrc of req.tsSrcs) {
const out = transpile(tsSrc, req.options);
res.jsSrcs.push(out.jsSrc);
res.diagnostics.push(out.diag);
}
threads.parentPort.postMessage(res);
});
}
class TranspileWorker {
constructor(outFileFn) {
this.id = TranspileWorker.pool++;
this._worker = new threads.Worker(__filename);
this._durations = [];
this._worker.addListener('message', (res) => {
if (!this._pending) {
console.error('RECEIVING data WITHOUT request');
return;
}
const [resolve, reject, files, options, t1] = this._pending;
const outFiles = [];
const diag = [];
for (let i = 0; i < res.jsSrcs.length; i++) {
// inputs and outputs are aligned across the arrays
const file = files[i];
const jsSrc = res.jsSrcs[i];
const diag = res.diagnostics[i];
if (diag.length > 0) {
diag.push(...diag);
continue;
}
let SuffixTypes;
(function (SuffixTypes) {
SuffixTypes[SuffixTypes["Dts"] = 5] = "Dts";
SuffixTypes[SuffixTypes["Ts"] = 3] = "Ts";
SuffixTypes[SuffixTypes["Unknown"] = 0] = "Unknown";
})(SuffixTypes || (SuffixTypes = {}));
const suffixLen = file.path.endsWith('.d.ts') ? 5 /* SuffixTypes.Dts */
: file.path.endsWith('.ts') ? 3 /* SuffixTypes.Ts */
: 0 /* SuffixTypes.Unknown */;
// check if output of a DTS-files isn't just "empty" and iff so
// skip this file
if (suffixLen === 5 /* SuffixTypes.Dts */ && _isDefaultEmpty(jsSrc)) {
continue;
}
const outBase = options.compilerOptions?.outDir ?? file.base;
const outPath = outFileFn(file.path);
outFiles.push(new Vinyl({
path: outPath,
base: outBase,
contents: Buffer.from(jsSrc),
}));
}
this._pending = undefined;
this._durations.push(Date.now() - t1);
if (diag.length > 0) {
reject(diag);
}
else {
resolve(outFiles);
}
});
}
terminate() {
// console.log(`Worker#${this.id} ENDS after ${this._durations.length} jobs (total: ${this._durations.reduce((p, c) => p + c, 0)}, avg: ${this._durations.reduce((p, c) => p + c, 0) / this._durations.length})`);
this._worker.terminate();
}
get isBusy() {
return this._pending !== undefined;
}
next(files, options) {
if (this._pending !== undefined) {
throw new Error('BUSY');
}
return new Promise((resolve, reject) => {
this._pending = [resolve, reject, files, options, Date.now()];
const req = {
options,
tsSrcs: files.map(file => String(file.contents))
};
this._worker.postMessage(req);
});
}
}
TranspileWorker.pool = 1;
class Transpiler {
constructor(logFn, _onError, configFilePath, _cmdLine) {
this._onError = _onError;
this._cmdLine = _cmdLine;
this._workerPool = [];
this._queue = [];
this._allJobs = [];
logFn('Transpile', `will use ${Transpiler.P} transpile worker`);
this._getOutputFileName = (file) => {
try {
// windows: path-sep normalizing
file = ts.normalizePath(file);
if (!_cmdLine.options.configFilePath) {
// this is needed for the INTERNAL getOutputFileNames-call below...
_cmdLine.options.configFilePath = configFilePath;
}
const isDts = file.endsWith('.d.ts');
if (isDts) {
file = file.slice(0, -5) + '.ts';
_cmdLine.fileNames.push(file);
}
const outfile = ts.getOutputFileNames(_cmdLine, file, true)[0];
if (isDts) {
_cmdLine.fileNames.pop();
}
return outfile;
}
catch (err) {
console.error(file, _cmdLine.fileNames);
console.error(err);
throw new err;
}
};
}
async join() {
// wait for all penindg jobs
this._consumeQueue();
await Promise.allSettled(this._allJobs);
this._allJobs.length = 0;
// terminate all worker
this._workerPool.forEach(w => w.terminate());
this._workerPool.length = 0;
}
transpile(file) {
if (this._cmdLine.options.noEmit) {
// not doing ANYTHING here
return;
}
const newLen = this._queue.push(file);
if (newLen > Transpiler.P ** 2) {
this._consumeQueue();
}
}
_consumeQueue() {
if (this._queue.length === 0) {
// no work...
return;
}
// kinda LAZYily create workers
if (this._workerPool.length === 0) {
for (let i = 0; i < Transpiler.P; i++) {
this._workerPool.push(new TranspileWorker(file => this._getOutputFileName(file)));
}
}
const freeWorker = this._workerPool.filter(w => !w.isBusy);
if (freeWorker.length === 0) {
// OK, they will pick up work themselves
return;
}
for (const worker of freeWorker) {
if (this._queue.length === 0) {
break;
}
const job = new Promise(resolve => {
const consume = () => {
const files = this._queue.splice(0, Transpiler.P);
if (files.length === 0) {
// DONE
resolve(undefined);
return;
}
// work on the NEXT file
// const [inFile, outFn] = req;
worker.next(files, { compilerOptions: this._cmdLine.options }).then(outFiles => {
if (this.onOutfile) {
outFiles.map(this.onOutfile, this);
}
consume();
}).catch(err => {
this._onError(err);
});
};
consume();
});
this._allJobs.push(job);
}
}
}
exports.Transpiler = Transpiler;
Transpiler.P = Math.floor((0, node_os_1.cpus)().length * .5);
function _isDefaultEmpty(src) {
return src
.replace('"use strict";', '')
.replace(/\/\*[\s\S]*?\*\/|([^\\:]|^)\/\/.*$/gm, '$1')
.trim().length === 0;
}

285
build/lib/tsb/transpiler.ts Normal file
View File

@@ -0,0 +1,285 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as threads from 'node:worker_threads';
import * as Vinyl from 'vinyl';
import { cpus } from 'node:os';
interface TranspileReq {
readonly tsSrcs: string[];
readonly options: ts.TranspileOptions;
}
interface TranspileRes {
readonly jsSrcs: string[];
readonly diagnostics: ts.Diagnostic[][];
}
function transpile(tsSrc: string, options: ts.TranspileOptions): { jsSrc: string; diag: ts.Diagnostic[] } {
const isAmd = /\n(import|export)/m.test(tsSrc);
if (!isAmd && options.compilerOptions?.module === ts.ModuleKind.AMD) {
// enforce NONE module-system for not-amd cases
options = { ...options, ...{ compilerOptions: { ...options.compilerOptions, module: ts.ModuleKind.None } } };
}
const out = ts.transpileModule(tsSrc, options);
return {
jsSrc: out.outputText,
diag: out.diagnostics ?? []
};
}
if (!threads.isMainThread) {
// WORKER
threads.parentPort?.addListener('message', (req: TranspileReq) => {
const res: TranspileRes = {
jsSrcs: [],
diagnostics: []
};
for (const tsSrc of req.tsSrcs) {
const out = transpile(tsSrc, req.options);
res.jsSrcs.push(out.jsSrc);
res.diagnostics.push(out.diag);
}
threads.parentPort!.postMessage(res);
});
}
class TranspileWorker {
private static pool = 1;
readonly id = TranspileWorker.pool++;
private _worker = new threads.Worker(__filename);
private _pending?: [resolve: Function, reject: Function, file: Vinyl[], options: ts.TranspileOptions, t1: number];
private _durations: number[] = [];
constructor(outFileFn: (fileName: string) => string) {
this._worker.addListener('message', (res: TranspileRes) => {
if (!this._pending) {
console.error('RECEIVING data WITHOUT request');
return;
}
const [resolve, reject, files, options, t1] = this._pending;
const outFiles: Vinyl[] = [];
const diag: ts.Diagnostic[] = [];
for (let i = 0; i < res.jsSrcs.length; i++) {
// inputs and outputs are aligned across the arrays
const file = files[i];
const jsSrc = res.jsSrcs[i];
const diag = res.diagnostics[i];
if (diag.length > 0) {
diag.push(...diag);
continue;
}
const enum SuffixTypes {
Dts = 5,
Ts = 3,
Unknown = 0
}
const suffixLen = file.path.endsWith('.d.ts') ? SuffixTypes.Dts
: file.path.endsWith('.ts') ? SuffixTypes.Ts
: SuffixTypes.Unknown;
// check if output of a DTS-files isn't just "empty" and iff so
// skip this file
if (suffixLen === SuffixTypes.Dts && _isDefaultEmpty(jsSrc)) {
continue;
}
const outBase = options.compilerOptions?.outDir ?? file.base;
const outPath = outFileFn(file.path);
outFiles.push(new Vinyl({
path: outPath,
base: outBase,
contents: Buffer.from(jsSrc),
}));
}
this._pending = undefined;
this._durations.push(Date.now() - t1);
if (diag.length > 0) {
reject(diag);
} else {
resolve(outFiles);
}
});
}
terminate() {
// console.log(`Worker#${this.id} ENDS after ${this._durations.length} jobs (total: ${this._durations.reduce((p, c) => p + c, 0)}, avg: ${this._durations.reduce((p, c) => p + c, 0) / this._durations.length})`);
this._worker.terminate();
}
get isBusy() {
return this._pending !== undefined;
}
next(files: Vinyl[], options: ts.TranspileOptions) {
if (this._pending !== undefined) {
throw new Error('BUSY');
}
return new Promise<Vinyl[]>((resolve, reject) => {
this._pending = [resolve, reject, files, options, Date.now()];
const req: TranspileReq = {
options,
tsSrcs: files.map(file => String(file.contents))
};
this._worker.postMessage(req);
});
}
}
export class Transpiler {
static P = Math.floor(cpus().length * .5);
private readonly _getOutputFileName: (name: string) => string;
public onOutfile?: (file: Vinyl) => void;
private _workerPool: TranspileWorker[] = [];
private _queue: Vinyl[] = [];
private _allJobs: Promise<any>[] = [];
constructor(
logFn: (topic: string, message: string) => void,
private readonly _onError: (err: any) => void,
configFilePath: string,
private readonly _cmdLine: ts.ParsedCommandLine
) {
logFn('Transpile', `will use ${Transpiler.P} transpile worker`);
// very complicated logic to re-use TS internal functions to know the output path
// given a TS input path and its config
type InternalTsApi = typeof ts & {
normalizePath(path: string): string;
getOutputFileNames(commandLine: ts.ParsedCommandLine, inputFileName: string, ignoreCase: boolean): readonly string[];
};
this._getOutputFileName = (file) => {
try {
// windows: path-sep normalizing
file = (<InternalTsApi>ts).normalizePath(file);
if (!_cmdLine.options.configFilePath) {
// this is needed for the INTERNAL getOutputFileNames-call below...
_cmdLine.options.configFilePath = configFilePath;
}
const isDts = file.endsWith('.d.ts');
if (isDts) {
file = file.slice(0, -5) + '.ts';
_cmdLine.fileNames.push(file);
}
const outfile = (<InternalTsApi>ts).getOutputFileNames(_cmdLine, file, true)[0];
if (isDts) {
_cmdLine.fileNames.pop();
}
return outfile;
} catch (err) {
console.error(file, _cmdLine.fileNames);
console.error(err);
throw new err;
}
};
}
async join() {
// wait for all penindg jobs
this._consumeQueue();
await Promise.allSettled(this._allJobs);
this._allJobs.length = 0;
// terminate all worker
this._workerPool.forEach(w => w.terminate());
this._workerPool.length = 0;
}
transpile(file: Vinyl) {
if (this._cmdLine.options.noEmit) {
// not doing ANYTHING here
return;
}
const newLen = this._queue.push(file);
if (newLen > Transpiler.P ** 2) {
this._consumeQueue();
}
}
private _consumeQueue(): void {
if (this._queue.length === 0) {
// no work...
return;
}
// kinda LAZYily create workers
if (this._workerPool.length === 0) {
for (let i = 0; i < Transpiler.P; i++) {
this._workerPool.push(new TranspileWorker(file => this._getOutputFileName(file)));
}
}
const freeWorker = this._workerPool.filter(w => !w.isBusy);
if (freeWorker.length === 0) {
// OK, they will pick up work themselves
return;
}
for (const worker of freeWorker) {
if (this._queue.length === 0) {
break;
}
const job = new Promise(resolve => {
const consume = () => {
const files = this._queue.splice(0, Transpiler.P);
if (files.length === 0) {
// DONE
resolve(undefined);
return;
}
// work on the NEXT file
// const [inFile, outFn] = req;
worker.next(files, { compilerOptions: this._cmdLine.options }).then(outFiles => {
if (this.onOutfile) {
outFiles.map(this.onOutfile, this);
}
consume();
}).catch(err => {
this._onError(err);
});
};
consume();
});
this._allJobs.push(job);
}
}
}
function _isDefaultEmpty(src: string): boolean {
return src
.replace('"use strict";', '')
.replace(/\/\*[\s\S]*?\*\/|([^\\:]|^)\/\/.*$/gm, '$1')
.trim().length === 0;
}

124
build/lib/tsb/utils.js Normal file
View File

@@ -0,0 +1,124 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.graph = exports.strings = exports.collections = void 0;
var collections;
(function (collections) {
const hasOwnProperty = Object.prototype.hasOwnProperty;
function lookup(collection, key) {
if (hasOwnProperty.call(collection, key)) {
return collection[key];
}
return null;
}
collections.lookup = lookup;
function insert(collection, key, value) {
collection[key] = value;
}
collections.insert = insert;
function lookupOrInsert(collection, key, value) {
if (hasOwnProperty.call(collection, key)) {
return collection[key];
}
else {
collection[key] = value;
return value;
}
}
collections.lookupOrInsert = lookupOrInsert;
function forEach(collection, callback) {
for (const key in collection) {
if (hasOwnProperty.call(collection, key)) {
callback({
key: key,
value: collection[key]
});
}
}
}
collections.forEach = forEach;
function contains(collection, key) {
return hasOwnProperty.call(collection, key);
}
collections.contains = contains;
})(collections = exports.collections || (exports.collections = {}));
var strings;
(function (strings) {
/**
* The empty string. The one and only.
*/
strings.empty = '';
strings.eolUnix = '\r\n';
function format(value, ...rest) {
return value.replace(/({\d+})/g, function (match) {
const index = Number(match.substring(1, match.length - 1));
return String(rest[index]) || match;
});
}
strings.format = format;
})(strings = exports.strings || (exports.strings = {}));
var graph;
(function (graph) {
function newNode(data) {
return {
data: data,
incoming: {},
outgoing: {}
};
}
graph.newNode = newNode;
class Graph {
constructor(_hashFn) {
this._hashFn = _hashFn;
this._nodes = {};
// empty
}
traverse(start, inwards, callback) {
const startNode = this.lookup(start);
if (!startNode) {
return;
}
this._traverse(startNode, inwards, {}, callback);
}
_traverse(node, inwards, seen, callback) {
const key = this._hashFn(node.data);
if (collections.contains(seen, key)) {
return;
}
seen[key] = true;
callback(node.data);
const nodes = inwards ? node.outgoing : node.incoming;
collections.forEach(nodes, (entry) => this._traverse(entry.value, inwards, seen, callback));
}
inertEdge(from, to) {
const fromNode = this.lookupOrInsertNode(from);
const toNode = this.lookupOrInsertNode(to);
fromNode.outgoing[this._hashFn(to)] = toNode;
toNode.incoming[this._hashFn(from)] = fromNode;
}
removeNode(data) {
const key = this._hashFn(data);
delete this._nodes[key];
collections.forEach(this._nodes, (entry) => {
delete entry.value.outgoing[key];
delete entry.value.incoming[key];
});
}
lookupOrInsertNode(data) {
const key = this._hashFn(data);
let node = collections.lookup(this._nodes, key);
if (!node) {
node = newNode(data);
this._nodes[key] = node;
}
return node;
}
lookup(data) {
return collections.lookup(this._nodes, this._hashFn(data));
}
}
graph.Graph = Graph;
})(graph = exports.graph || (exports.graph = {}));

140
build/lib/tsb/utils.ts Normal file
View File

@@ -0,0 +1,140 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
export module collections {
const hasOwnProperty = Object.prototype.hasOwnProperty;
export function lookup<T>(collection: { [keys: string]: T }, key: string): T | null {
if (hasOwnProperty.call(collection, key)) {
return collection[key];
}
return null;
}
export function insert<T>(collection: { [keys: string]: T }, key: string, value: T): void {
collection[key] = value;
}
export function lookupOrInsert<T>(collection: { [keys: string]: T }, key: string, value: T): T {
if (hasOwnProperty.call(collection, key)) {
return collection[key];
} else {
collection[key] = value;
return value;
}
}
export function forEach<T>(collection: { [keys: string]: T }, callback: (entry: { key: string; value: T }) => void): void {
for (const key in collection) {
if (hasOwnProperty.call(collection, key)) {
callback({
key: key,
value: collection[key]
});
}
}
}
export function contains(collection: { [keys: string]: any }, key: string): boolean {
return hasOwnProperty.call(collection, key);
}
}
export module strings {
/**
* The empty string. The one and only.
*/
export const empty = '';
export const eolUnix = '\r\n';
export function format(value: string, ...rest: any[]): string {
return value.replace(/({\d+})/g, function (match) {
const index = Number(match.substring(1, match.length - 1));
return String(rest[index]) || match;
});
}
}
export module graph {
export interface Node<T> {
data: T;
incoming: { [key: string]: Node<T> };
outgoing: { [key: string]: Node<T> };
}
export function newNode<T>(data: T): Node<T> {
return {
data: data,
incoming: {},
outgoing: {}
};
}
export class Graph<T> {
private _nodes: { [key: string]: Node<T> } = {};
constructor(private _hashFn: (element: T) => string) {
// empty
}
traverse(start: T, inwards: boolean, callback: (data: T) => void): void {
const startNode = this.lookup(start);
if (!startNode) {
return;
}
this._traverse(startNode, inwards, {}, callback);
}
private _traverse(node: Node<T>, inwards: boolean, seen: { [key: string]: boolean }, callback: (data: T) => void): void {
const key = this._hashFn(node.data);
if (collections.contains(seen, key)) {
return;
}
seen[key] = true;
callback(node.data);
const nodes = inwards ? node.outgoing : node.incoming;
collections.forEach(nodes, (entry) => this._traverse(entry.value, inwards, seen, callback));
}
inertEdge(from: T, to: T): void {
const fromNode = this.lookupOrInsertNode(from);
const toNode = this.lookupOrInsertNode(to);
fromNode.outgoing[this._hashFn(to)] = toNode;
toNode.incoming[this._hashFn(from)] = fromNode;
}
removeNode(data: T): void {
const key = this._hashFn(data);
delete this._nodes[key];
collections.forEach(this._nodes, (entry) => {
delete entry.value.outgoing[key];
delete entry.value.incoming[key];
});
}
lookupOrInsertNode(data: T): Node<T> {
const key = this._hashFn(data);
let node = collections.lookup(this._nodes, key);
if (!node) {
node = newNode(data);
this._nodes[key] = node;
}
return node;
}
lookup(data: T): Node<T> | null {
return collections.lookup(this._nodes, this._hashFn(data));
}
}
}

View File

@@ -1,18 +0,0 @@
declare module "gulp-tsb" {
export interface ICancellationToken {
isCancellationRequested(): boolean;
}
export interface IncrementalCompiler {
(token?: ICancellationToken): NodeJS.ReadWriteStream;
src(opts?: {
cwd?: string;
base?: string;
}): NodeJS.ReadStream;
}
export function create(projectPath: string, existingOptions: any, verbose?: boolean, onError?: (message: any) => void): IncrementalCompiler;
}

View File

@@ -1,8 +1,8 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.buildWebNodePaths = exports.createExternalLoaderConfig = exports.acquireWebNodePaths = exports.getElectronVersion = exports.streamToPromise = exports.versionStringToNumber = exports.filter = exports.rebase = exports.getVersion = exports.ensureDir = exports.rreddir = exports.rimraf = exports.rewriteSourceMappingURL = exports.stripSourceMappingURL = exports.loadSourcemaps = exports.cleanNodeModules = exports.skipDirectories = exports.toFileUri = exports.setExecutableBit = exports.fixWin32DirectoryPermissions = exports.debounce = exports.incremental = void 0;
const es = require("event-stream");
@@ -240,7 +240,7 @@ function _rreaddir(dirPath, prepend, result) {
}
}
function rreddir(dirPath) {
let result = [];
const result = [];
_rreaddir(dirPath, '', result);
return result;
}
@@ -337,6 +337,13 @@ function acquireWebNodePaths() {
}
nodePaths[key] = entryPoint;
}
// @TODO lramos15 can we make this dynamic like the rest of the node paths
// Add these paths as well for 1DS SDK dependencies.
// Not sure why given the 1DS entrypoint then requires these modules
// they are not fetched from the right location and instead are fetched from out/
nodePaths['@microsoft/dynamicproto-js'] = 'lib/dist/umd/dynamicproto-js.min.js';
nodePaths['@microsoft/applicationinsights-shims'] = 'dist/umd/applicationinsights-shims.min.js';
nodePaths['@microsoft/applicationinsights-core-js'] = 'browser/applicationinsights-core-js.min.js';
return nodePaths;
}
exports.acquireWebNodePaths = acquireWebNodePaths;
@@ -345,7 +352,7 @@ function createExternalLoaderConfig(webEndpoint, commit, quality) {
return undefined;
}
webEndpoint = webEndpoint + `/${quality}/${commit}`;
let nodePaths = acquireWebNodePaths();
const nodePaths = acquireWebNodePaths();
Object.keys(nodePaths).map(function (key, _) {
nodePaths[key] = `${webEndpoint}/node_modules/${key}/${nodePaths[key]}`;
});

View File

@@ -3,8 +3,6 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as es from 'event-stream';
import _debounce = require('debounce');
import * as _filter from 'gulp-filter';
@@ -306,7 +304,7 @@ function _rreaddir(dirPath: string, prepend: string, result: string[]): void {
}
export function rreddir(dirPath: string): string[] {
let result: string[] = [];
const result: string[] = [];
_rreaddir(dirPath, '', result);
return result;
}
@@ -414,6 +412,13 @@ export function acquireWebNodePaths() {
nodePaths[key] = entryPoint;
}
// @TODO lramos15 can we make this dynamic like the rest of the node paths
// Add these paths as well for 1DS SDK dependencies.
// Not sure why given the 1DS entrypoint then requires these modules
// they are not fetched from the right location and instead are fetched from out/
nodePaths['@microsoft/dynamicproto-js'] = 'lib/dist/umd/dynamicproto-js.min.js';
nodePaths['@microsoft/applicationinsights-shims'] = 'dist/umd/applicationinsights-shims.min.js';
nodePaths['@microsoft/applicationinsights-core-js'] = 'browser/applicationinsights-core-js.min.js';
return nodePaths;
}
@@ -422,7 +427,7 @@ export function createExternalLoaderConfig(webEndpoint?: string, commit?: string
return undefined;
}
webEndpoint = webEndpoint + `/${quality}/${commit}`;
let nodePaths = acquireWebNodePaths();
const nodePaths = acquireWebNodePaths();
Object.keys(nodePaths).map(function (key, _) {
nodePaths[key] = `${webEndpoint}/node_modules/${key}/${nodePaths[key]}`;
});

View File

@@ -1 +0,0 @@
.yarnrc

View File

@@ -1,12 +0,0 @@
{
"name": "watch",
"version": "1.0.0",
"description": "",
"author": "Microsoft ",
"private": true,
"license": "MIT",
"devDependencies": {},
"dependencies": {
"vscode-gulp-watch": "^5.0.3"
}
}

View File

@@ -1,400 +0,0 @@
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
# yarn lockfile v1
ansi-colors@4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.1.tgz#cbb9ae256bf750af1eab344f229aa27fe94ba348"
integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA==
ansi-colors@^1.0.1:
version "1.1.0"
resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-1.1.0.tgz#6374b4dd5d4718ff3ce27a671a3b1cad077132a9"
integrity sha512-SFKX67auSNoVR38N3L+nvsPjOE0bybKTYbkf5tRvushrAPQ9V75huw0ZxBkKVeRU9kqH3d6HA4xTckbwZ4ixmA==
dependencies:
ansi-wrap "^0.1.0"
ansi-gray@^0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/ansi-gray/-/ansi-gray-0.1.1.tgz#2962cf54ec9792c48510a3deb524436861ef7251"
integrity sha1-KWLPVOyXksSFEKPetSRDaGHvclE=
dependencies:
ansi-wrap "0.1.0"
ansi-wrap@0.1.0, ansi-wrap@^0.1.0:
version "0.1.0"
resolved "https://registry.yarnpkg.com/ansi-wrap/-/ansi-wrap-0.1.0.tgz#a82250ddb0015e9a27ca82e82ea603bbfa45efaf"
integrity sha1-qCJQ3bABXponyoLoLqYDu/pF768=
anymatch@^3.1.1, anymatch@~3.1.1:
version "3.1.1"
resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142"
integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg==
dependencies:
normalize-path "^3.0.0"
picomatch "^2.0.4"
arr-diff@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520"
integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA=
arr-union@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4"
integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ=
assign-symbols@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367"
integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c=
binary-extensions@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.0.0.tgz#23c0df14f6a88077f5f986c0d167ec03c3d5537c"
integrity sha512-Phlt0plgpIIBOGTT/ehfFnbNlfsDEiqmzE2KRXoX1bLIlir4X/MR+zSyBEkL05ffWgnRSf/DXv+WrUAVr93/ow==
braces@~3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107"
integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==
dependencies:
fill-range "^7.0.1"
chokidar@3.5.1:
version "3.5.1"
resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.1.tgz#ee9ce7bbebd2b79f49f304799d5468e31e14e68a"
integrity sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw==
dependencies:
anymatch "~3.1.1"
braces "~3.0.2"
glob-parent "~5.1.0"
is-binary-path "~2.1.0"
is-glob "~4.0.1"
normalize-path "~3.0.0"
readdirp "~3.5.0"
optionalDependencies:
fsevents "~2.3.1"
clone-buffer@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/clone-buffer/-/clone-buffer-1.0.0.tgz#e3e25b207ac4e701af721e2cb5a16792cac3dc58"
integrity sha1-4+JbIHrE5wGvch4staFnksrD3Fg=
clone-stats@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/clone-stats/-/clone-stats-1.0.0.tgz#b3782dff8bb5474e18b9b6bf0fdfe782f8777680"
integrity sha1-s3gt/4u1R04Yuba/D9/ngvh3doA=
clone@^2.1.1:
version "2.1.2"
resolved "https://registry.yarnpkg.com/clone/-/clone-2.1.2.tgz#1b7f4b9f591f1e8f83670401600345a02887435f"
integrity sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18=
cloneable-readable@^1.0.0:
version "1.1.3"
resolved "https://registry.yarnpkg.com/cloneable-readable/-/cloneable-readable-1.1.3.tgz#120a00cb053bfb63a222e709f9683ea2e11d8cec"
integrity sha512-2EF8zTQOxYq70Y4XKtorQupqF0m49MBz2/yf5Bj+MHjvpG3Hy7sImifnqD6UA+TKYxeSV+u6qqQPawN5UvnpKQ==
dependencies:
inherits "^2.0.1"
process-nextick-args "^2.0.0"
readable-stream "^2.3.5"
color-support@^1.1.3:
version "1.1.3"
resolved "https://registry.yarnpkg.com/color-support/-/color-support-1.1.3.tgz#93834379a1cc9a0c61f82f52f0d04322251bd5a2"
integrity sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==
core-util-is@~1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7"
integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=
extend-shallow@^3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8"
integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg=
dependencies:
assign-symbols "^1.0.0"
is-extendable "^1.0.1"
fancy-log@^1.3.3:
version "1.3.3"
resolved "https://registry.yarnpkg.com/fancy-log/-/fancy-log-1.3.3.tgz#dbc19154f558690150a23953a0adbd035be45fc7"
integrity sha512-k9oEhlyc0FrVh25qYuSELjr8oxsCoc4/LEZfg2iJJrfEk/tZL9bCoJE47gqAvI2m/AUjluCS4+3I0eTx8n3AEw==
dependencies:
ansi-gray "^0.1.1"
color-support "^1.1.3"
parse-node-version "^1.0.0"
time-stamp "^1.0.0"
fill-range@^7.0.1:
version "7.0.1"
resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40"
integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==
dependencies:
to-regex-range "^5.0.1"
first-chunk-stream@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/first-chunk-stream/-/first-chunk-stream-2.0.0.tgz#1bdecdb8e083c0664b91945581577a43a9f31d70"
integrity sha1-G97NuOCDwGZLkZRVgVd6Q6nzHXA=
dependencies:
readable-stream "^2.0.2"
fsevents@~2.3.1:
version "2.3.1"
resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.1.tgz#b209ab14c61012636c8863507edf7fb68cc54e9f"
integrity sha512-YR47Eg4hChJGAB1O3yEAOkGO+rlzutoICGqGo9EZ4lKWokzZRSyIW1QmTzqjtw8MJdj9srP869CuWw/hyzSiBw==
glob-parent@^5.1.1, glob-parent@~5.1.0:
version "5.1.2"
resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4"
integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==
dependencies:
is-glob "^4.0.1"
graceful-fs@^4.1.2:
version "4.2.3"
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423"
integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==
inherits@^2.0.1, inherits@^2.0.3, inherits@~2.0.3:
version "2.0.4"
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c"
integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==
is-binary-path@~2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09"
integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==
dependencies:
binary-extensions "^2.0.0"
is-extendable@^1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4"
integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA==
dependencies:
is-plain-object "^2.0.4"
is-extglob@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2"
integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=
is-glob@^4.0.1, is-glob@~4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc"
integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==
dependencies:
is-extglob "^2.1.1"
is-number@^7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b"
integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==
is-plain-object@^2.0.4:
version "2.0.4"
resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677"
integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og==
dependencies:
isobject "^3.0.1"
is-utf8@^0.2.0, is-utf8@^0.2.1:
version "0.2.1"
resolved "https://registry.yarnpkg.com/is-utf8/-/is-utf8-0.2.1.tgz#4b0da1442104d1b336340e80797e865cf39f7d72"
integrity sha1-Sw2hRCEE0bM2NA6AeX6GXPOffXI=
isarray@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11"
integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE=
isobject@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df"
integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8=
normalize-path@^3.0.0, normalize-path@~3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65"
integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==
object-assign@^4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863"
integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=
parse-node-version@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/parse-node-version/-/parse-node-version-1.0.1.tgz#e2b5dbede00e7fa9bc363607f53327e8b073189b"
integrity sha512-3YHlOa/JgH6Mnpr05jP9eDG254US9ek25LyIxZlDItp2iJtwyaXQb57lBYLdT3MowkUFYEV2XXNAYIPlESvJlA==
picomatch@^2.0.4, picomatch@^2.2.1:
version "2.2.2"
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==
pify@^2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c"
integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw=
plugin-error@1.0.1:
version "1.0.1"
resolved "https://registry.yarnpkg.com/plugin-error/-/plugin-error-1.0.1.tgz#77016bd8919d0ac377fdcdd0322328953ca5781c"
integrity sha512-L1zP0dk7vGweZME2i+EeakvUNqSrdiI3F91TwEoYiGrAfUXmVv6fJIq4g82PAXxNsWOp0J7ZqQy/3Szz0ajTxA==
dependencies:
ansi-colors "^1.0.1"
arr-diff "^4.0.0"
arr-union "^3.1.0"
extend-shallow "^3.0.2"
process-nextick-args@^2.0.0, process-nextick-args@~2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2"
integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==
readable-stream@^2.0.2, readable-stream@^2.3.5:
version "2.3.7"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==
dependencies:
core-util-is "~1.0.0"
inherits "~2.0.3"
isarray "~1.0.0"
process-nextick-args "~2.0.0"
safe-buffer "~5.1.1"
string_decoder "~1.1.1"
util-deprecate "~1.0.1"
readable-stream@^3.6.0:
version "3.6.0"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.0.tgz#337bbda3adc0706bd3e024426a286d4b4b2c9198"
integrity sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==
dependencies:
inherits "^2.0.3"
string_decoder "^1.1.1"
util-deprecate "^1.0.1"
readdirp@~3.5.0:
version "3.5.0"
resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.5.0.tgz#9ba74c019b15d365278d2e91bb8c48d7b4d42c9e"
integrity sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ==
dependencies:
picomatch "^2.2.1"
remove-trailing-separator@^1.0.1:
version "1.1.0"
resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef"
integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8=
replace-ext@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/replace-ext/-/replace-ext-1.0.0.tgz#de63128373fcbf7c3ccfa4de5a480c45a67958eb"
integrity sha1-3mMSg3P8v3w8z6TeWkgMRaZ5WOs=
safe-buffer@~5.1.0, safe-buffer@~5.1.1:
version "5.1.2"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
safe-buffer@~5.2.0:
version "5.2.1"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==
string_decoder@^1.1.1:
version "1.3.0"
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.3.0.tgz#42f114594a46cf1a8e30b0a84f56c78c3edac21e"
integrity sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==
dependencies:
safe-buffer "~5.2.0"
string_decoder@~1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8"
integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==
dependencies:
safe-buffer "~5.1.0"
strip-bom-buf@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/strip-bom-buf/-/strip-bom-buf-1.0.0.tgz#1cb45aaf57530f4caf86c7f75179d2c9a51dd572"
integrity sha1-HLRar1dTD0yvhsf3UXnSyaUd1XI=
dependencies:
is-utf8 "^0.2.1"
strip-bom-stream@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/strip-bom-stream/-/strip-bom-stream-2.0.0.tgz#f87db5ef2613f6968aa545abfe1ec728b6a829ca"
integrity sha1-+H217yYT9paKpUWr/h7HKLaoKco=
dependencies:
first-chunk-stream "^2.0.0"
strip-bom "^2.0.0"
strip-bom@^2.0.0:
version "2.0.0"
resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e"
integrity sha1-YhmoVhZSBJHzV4i9vxRHqZx+aw4=
dependencies:
is-utf8 "^0.2.0"
time-stamp@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/time-stamp/-/time-stamp-1.1.0.tgz#764a5a11af50561921b133f3b44e618687e0f5c3"
integrity sha1-dkpaEa9QVhkhsTPztE5hhofg9cM=
to-regex-range@^5.0.1:
version "5.0.1"
resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4"
integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==
dependencies:
is-number "^7.0.0"
util-deprecate@^1.0.1, util-deprecate@~1.0.1:
version "1.0.2"
resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf"
integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8=
vinyl-file@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/vinyl-file/-/vinyl-file-3.0.0.tgz#b104d9e4409ffa325faadd520642d0a3b488b365"
integrity sha1-sQTZ5ECf+jJfqt1SBkLQo7SIs2U=
dependencies:
graceful-fs "^4.1.2"
pify "^2.3.0"
strip-bom-buf "^1.0.0"
strip-bom-stream "^2.0.0"
vinyl "^2.0.1"
vinyl@^2.0.1, vinyl@^2.2.0:
version "2.2.1"
resolved "https://registry.yarnpkg.com/vinyl/-/vinyl-2.2.1.tgz#23cfb8bbab5ece3803aa2c0a1eb28af7cbba1974"
integrity sha512-LII3bXRFBZLlezoG5FfZVcXflZgWP/4dCwKtxd5ky9+LOtM4CS3bIRQsmR1KMnMW07jpE8fqR2lcxPZ+8sJIcw==
dependencies:
clone "^2.1.1"
clone-buffer "^1.0.0"
clone-stats "^1.0.0"
cloneable-readable "^1.0.0"
remove-trailing-separator "^1.0.1"
replace-ext "^1.0.0"
vscode-gulp-watch@^5.0.3:
version "5.0.3"
resolved "https://registry.yarnpkg.com/vscode-gulp-watch/-/vscode-gulp-watch-5.0.3.tgz#1ca1c03581d43692ecb1fe0b9afd4256faeb701b"
integrity sha512-MTUp2yLE9CshhkNSNV58EQNxQSeF8lIj3mkXZX9a1vAk+EQNM2PAYdPUDSd/P/08W3PMHGznEiZyfK7JAjLosg==
dependencies:
ansi-colors "4.1.1"
anymatch "^3.1.1"
chokidar "3.5.1"
fancy-log "^1.3.3"
glob-parent "^5.1.1"
normalize-path "^3.0.0"
object-assign "^4.1.1"
plugin-error "1.0.1"
readable-stream "^3.6.0"
vinyl "^2.2.0"
vinyl-file "^3.0.0"