Fixes build folder compilation + enable linux .deb files (#23006)

This commit is contained in:
Cheena Malhotra
2023-05-08 13:40:53 -07:00
committed by GitHub
parent 9415db87a2
commit ea39622e81
57 changed files with 1212 additions and 2101 deletions

View File

@@ -45,8 +45,7 @@ function isUpToDate(extension) {
}
}
function syncMarketplaceExtension(extension) {
var _a;
const galleryServiceUrl = (_a = productjson.extensionsGallery) === null || _a === void 0 ? void 0 : _a.serviceUrl;
const galleryServiceUrl = productjson.extensionsGallery?.serviceUrl;
const source = ansiColors.blue(galleryServiceUrl ? '[marketplace]' : '[github]');
if (isUpToDate(extension)) {
log(source, `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));

View File

@@ -18,7 +18,6 @@ const token = process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'
const contentBasePath = 'raw.githubusercontent.com';
const contentFileNames = ['package.json', 'package-lock.json', 'yarn.lock'];
async function downloadExtensionDetails(extension) {
var _a, _b, _c;
const extensionLabel = `${extension.name}@${extension.version}`;
const repository = url.parse(extension.repo).path.substr(1);
const repositoryContentBaseUrl = `https://${token ? `${token}@` : ''}${contentBasePath}/${repository}/v${extension.version}`;
@@ -56,11 +55,11 @@ async function downloadExtensionDetails(extension) {
}
}
// Validation
if (!((_a = results.find(r => r.fileName === 'package.json')) === null || _a === void 0 ? void 0 : _a.body)) {
if (!results.find(r => r.fileName === 'package.json')?.body) {
// throw new Error(`The "package.json" file could not be found for the built-in extension - ${extensionLabel}`);
}
if (!((_b = results.find(r => r.fileName === 'package-lock.json')) === null || _b === void 0 ? void 0 : _b.body) &&
!((_c = results.find(r => r.fileName === 'yarn.lock')) === null || _c === void 0 ? void 0 : _c.body)) {
if (!results.find(r => r.fileName === 'package-lock.json')?.body &&
!results.find(r => r.fileName === 'yarn.lock')?.body) {
// throw new Error(`The "package-lock.json"/"yarn.lock" could not be found for the built-in extension - ${extensionLabel}`);
}
}

View File

@@ -21,12 +21,11 @@ function bundle(entryPoints, config, callback) {
});
const allMentionedModulesMap = {};
entryPoints.forEach((module) => {
var _a, _b;
allMentionedModulesMap[module.name] = true;
(_a = module.include) === null || _a === void 0 ? void 0 : _a.forEach(function (includedModule) {
module.include?.forEach(function (includedModule) {
allMentionedModulesMap[includedModule] = true;
});
(_b = module.exclude) === null || _b === void 0 ? void 0 : _b.forEach(function (excludedModule) {
module.exclude?.forEach(function (excludedModule) {
allMentionedModulesMap[excludedModule] = true;
});
});

View File

@@ -1,8 +1,8 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.watchApiProposalNamesTask = exports.compileApiProposalNamesTask = exports.watchTask = exports.compileTask = exports.transpileTask = void 0;
const es = require("event-stream");
@@ -38,7 +38,7 @@ function createCompile(src, build, emitError, transpileOnly) {
const tsb = require('./tsb');
const sourcemaps = require('gulp-sourcemaps');
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
const overrideOptions = Object.assign(Object.assign({}, getTypeScriptCompilerOptions(src)), { inlineSources: Boolean(build) });
const overrideOptions = { ...getTypeScriptCompilerOptions(src), inlineSources: Boolean(build) };
// {{SQL CARBON EDIT}} Add override for not inlining the sourcemap during build so we can get code coverage - it
// currently expects a *.map.js file to exist next to the source file for proper source mapping
if (!build && !process.env['SQL_NO_INLINE_SOURCEMAP']) {
@@ -52,7 +52,7 @@ function createCompile(src, build, emitError, transpileOnly) {
console.warn('* and re-run the build/watch task *');
console.warn('********************************************************************************************');
}
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
const compilation = tsb.create(projectPath, overrideOptions, { verbose: false, transpileOnly }, err => reporter(err));
function pipeline(token) {
const bom = require('gulp-bom');
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));

View File

@@ -60,7 +60,7 @@ function createCompile(src: string, build: boolean, emitError: boolean, transpil
}
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
const compilation = tsb.create(projectPath, overrideOptions, { verbose: false, transpileOnly }, err => reporter(err));
function pipeline(token?: util.ICancellationToken) {
const bom = require('gulp-bom') as typeof import('gulp-bom');

View File

@@ -36,7 +36,7 @@ function asYarnDependency(prefix, tree) {
return { name, version, path: dependencyPath, children };
}
function getYarnProductionDependencies(cwd) {
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: Object.assign(Object.assign({}, process.env), { NODE_ENV: 'production' }), stdio: [null, null, 'inherit'] });
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'inherit'] });
const match = /^{"type":"tree".*$/m.exec(raw);
if (!match || match.length !== 1) {
throw new Error('Could not parse result of `yarn list --json`');

View File

@@ -40,7 +40,7 @@ const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSyn
function darwinBundleDocumentType(extensions, icon, nameOrSuffix, utis) {
// If given a suffix, generate a name from it. If not given anything, default to 'document'
if (isDocumentSuffix(nameOrSuffix) || !nameOrSuffix) {
nameOrSuffix = icon.charAt(0).toUpperCase() + icon.slice(1) + ' ' + (nameOrSuffix !== null && nameOrSuffix !== void 0 ? nameOrSuffix : 'document');
nameOrSuffix = icon.charAt(0).toUpperCase() + icon.slice(1) + ' ' + (nameOrSuffix ?? 'document');
}
return {
name: nameOrSuffix,

View File

@@ -20,8 +20,7 @@ module.exports = {
return {
// /.../
['Literal[regex]']: (node) => {
var _a;
const pattern = (_a = node.regex) === null || _a === void 0 ? void 0 : _a.pattern;
const pattern = node.regex?.pattern;
if (_containsLookBehind(pattern)) {
context.report({
node,

View File

@@ -14,9 +14,8 @@ module.exports = new class ApiLiteralOrTypes {
create(context) {
return {
['TSDeclareFunction Identifier[name=/create.*/]']: (node) => {
var _a;
const decl = node.parent;
if (((_a = decl.returnType) === null || _a === void 0 ? void 0 : _a.typeAnnotation.type) !== experimental_utils_1.AST_NODE_TYPES.TSTypeReference) {
if (decl.returnType?.typeAnnotation.type !== experimental_utils_1.AST_NODE_TYPES.TSTypeReference) {
return;
}
if (decl.returnType.typeAnnotation.typeName.type !== experimental_utils_1.AST_NODE_TYPES.Identifier) {

View File

@@ -25,8 +25,7 @@ module.exports = new (_a = class ApiEventNaming {
const verbs = new Set(config.verbs);
return {
['TSTypeAnnotation TSTypeReference Identifier[name="Event"]']: (node) => {
var _a, _b;
const def = (_b = (_a = node.parent) === null || _a === void 0 ? void 0 : _a.parent) === null || _b === void 0 ? void 0 : _b.parent;
const def = node.parent?.parent?.parent;
const ident = this.getIdent(def);
if (!ident) {
// event on unknown structure...

View File

@@ -17,8 +17,7 @@ module.exports = new (_a = class ApiProviderNaming {
const allowed = new Set(config.allowed);
return {
['TSInterfaceDeclaration[id.name=/.+Provider/] TSMethodSignature']: (node) => {
var _a;
const interfaceName = ((_a = node.parent) === null || _a === void 0 ? void 0 : _a.parent).id.name;
const interfaceName = (node.parent?.parent).id.name;
if (allowed.has(interfaceName)) {
// allowed
return;

View File

@@ -34,14 +34,14 @@ function minifyExtensionResources(input) {
.pipe(jsonFilter)
.pipe(buffer())
.pipe(es.mapSync((f) => {
const errors = [];
const value = jsoncParser.parse(f.contents.toString('utf8'), errors);
if (errors.length === 0) {
// file parsed OK => just stringify to drop whitespace and comments
f.contents = Buffer.from(JSON.stringify(value));
}
return f;
}))
const errors = [];
const value = jsoncParser.parse(f.contents.toString('utf8'), errors);
if (errors.length === 0) {
// file parsed OK => just stringify to drop whitespace and comments
f.contents = Buffer.from(JSON.stringify(value));
}
return f;
}))
.pipe(jsonFilter.restore);
}
function updateExtensionPackageJSON(input, update) {
@@ -50,10 +50,10 @@ function updateExtensionPackageJSON(input, update) {
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(es.mapSync((f) => {
const data = JSON.parse(f.contents.toString('utf8'));
f.contents = Buffer.from(JSON.stringify(update(data)));
return f;
}))
const data = JSON.parse(f.contents.toString('utf8'));
f.contents = Buffer.from(JSON.stringify(update(data)));
return f;
}))
.pipe(packageJsonFilter.restore);
}
function fromLocal(extensionPath, forWeb) {
@@ -95,11 +95,11 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName) {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
// check for a webpack configuration files, then invoke webpack
// and merge its output with the files stream.
const webpackConfigLocations = glob.sync(path.join(extensionPath, '**', webpackConfigFileName), { ignore: ['**/node_modules'] });
@@ -119,24 +119,27 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName) {
};
const exportedConfig = require(webpackConfigPath);
return (Array.isArray(exportedConfig) ? exportedConfig : [exportedConfig]).map(config => {
const webpackConfig = Object.assign(Object.assign({}, config), { mode: 'production' });
const webpackConfig = {
...config,
...{ mode: 'production' }
};
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(es.through(function (data) {
data.stat = data.stat || {};
data.base = extensionPath;
this.emit('data', data);
}))
data.stat = data.stat || {};
data.base = extensionPath;
this.emit('data', data);
}))
.pipe(es.through(function (data) {
// source map handling:
// * rewrite sourceMappingURL
// * save to disk so that upload-task picks this up
const contents = data.contents.toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
this.emit('data', data);
}));
// source map handling:
// * rewrite sourceMappingURL
// * save to disk so that upload-task picks this up
const contents = data.contents.toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
this.emit('data', data);
}));
});
});
es.merge(...webpackStreams, es.readArray(files))
@@ -158,16 +161,16 @@ function fromLocalNormal(extensionPath) {
const result = es.through();
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
.then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
es.readArray(files).pipe(result);
})
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
es.readArray(files).pipe(result);
})
.catch(err => result.emit('error', err));
return result.pipe((0, stats_1.createStatsStream)(path.basename(extensionPath)));
}
@@ -209,7 +212,10 @@ const ghApiHeaders = {
if (process.env.GITHUB_TOKEN) {
ghApiHeaders.Authorization = 'Basic ' + Buffer.from(process.env.GITHUB_TOKEN).toString('base64');
}
const ghDownloadHeaders = Object.assign(Object.assign({}, ghApiHeaders), { Accept: 'application/octet-stream' });
const ghDownloadHeaders = {
...ghApiHeaders,
Accept: 'application/octet-stream',
};
function fromGithub({ name, version, repo, metadata }) {
const remote = require('gulp-remote-retry-src');
const json = require('gulp-json-editor');
@@ -244,6 +250,7 @@ const excludedExtensions = [
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
'vscode-custom-editor-tests',
'vscode-notebook-tests',
'integration-tests', // {{SQL CARBON EDIT}}
];
// {{SQL CARBON EDIT}}
@@ -258,7 +265,6 @@ const externalExtensions = [
'arc',
'asde-deployment',
'azcli',
'azurehybridtoolkit',
'azuremonitor',
'cms',
'dacpac',
@@ -324,11 +330,11 @@ function isWebExtension(manifest) {
function packageLocalExtensionsStream(forWeb) {
const localExtensionsDescriptions = (glob.sync('extensions/*/package.json')
.map(manifestPath => {
const absoluteManifestPath = path.join(root, manifestPath);
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath, manifestPath: absoluteManifestPath };
})
const absoluteManifestPath = path.join(root, manifestPath);
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath, manifestPath: absoluteManifestPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => externalExtensions.indexOf(name) === -1) // {{SQL CARBON EDIT}} Remove external Extensions with separate package
@@ -359,15 +365,15 @@ function packageMarketplaceExtensionsStream(forWeb, galleryServiceUrl) {
];
const marketplaceExtensionsStream = minifyExtensionResources(es.merge(...marketplaceExtensionsDescriptions
.map(extension => {
const input = (galleryServiceUrl ? fromMarketplace(galleryServiceUrl, extension) : fromGithub(extension))
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
return updateExtensionPackageJSON(input, (data) => {
delete data.scripts;
delete data.dependencies;
delete data.devDependencies;
return data;
});
})));
const input = (galleryServiceUrl ? fromMarketplace(galleryServiceUrl, extension) : fromGithub(extension))
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
return updateExtensionPackageJSON(input, (data) => {
delete data.scripts;
delete data.dependencies;
delete data.devDependencies;
return data;
});
})));
return (marketplaceExtensionsStream
.pipe(util2.setExecutableBit(['**/*.sh'])));
}
@@ -412,10 +418,10 @@ exports.scanBuiltinExtensions = scanBuiltinExtensions;
function packageExternalExtensionsStream() {
const extenalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => externalExtensions.indexOf(name) >= 0 || exports.vscodeExternalExtensions.indexOf(name) >= 0);
const builtExtensions = extenalExtensionDescriptions.map(extension => {
return fromLocal(extension.path, false)
@@ -433,10 +439,10 @@ exports.cleanRebuildExtensions = cleanRebuildExtensions;
function packageRebuildExtensionsStream() {
const extenalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => rebuildExtensions.indexOf(name) >= 0);
const builtExtensions = extenalExtensionDescriptions.map(extension => {
return fromLocal(extension.path, false)
@@ -530,7 +536,7 @@ async function webpackExtensions(taskName, isWatch, webpackConfigLocations) {
reject();
}
else {
reporter(stats === null || stats === void 0 ? void 0 : stats.toJson());
reporter(stats?.toJson());
}
});
}
@@ -541,7 +547,7 @@ async function webpackExtensions(taskName, isWatch, webpackConfigLocations) {
reject();
}
else {
reporter(stats === null || stats === void 0 ? void 0 : stats.toJson());
reporter(stats?.toJson());
resolve();
}
});

View File

@@ -1012,7 +1012,7 @@ function prepareI18nFiles() {
}
exports.prepareI18nFiles = prepareI18nFiles;
function createI18nFile(originalFilePath, messages) {
const result = Object.create(null);
let result = Object.create(null);
result[''] = [
'--------------------------------------------------------------------------------------------',
'Copyright (c) Microsoft Corporation. All rights reserved.',
@@ -1035,16 +1035,16 @@ function createI18nFile(originalFilePath, messages) {
exports.createI18nFile = createI18nFile;
exports.i18nPackVersion = '1.0.0'; // {{SQL CARBON EDIT}} Needed in locfunc.
function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pseudo = false) {
let parsePromises = [];
let mainPack = { version: exports.i18nPackVersion, contents: {} };
let extensionsPacks = {};
let errors = [];
const parsePromises = [];
const mainPack = { version: exports.i18nPackVersion, contents: {} };
const extensionsPacks = {};
const errors = [];
return (0, event_stream_1.through)(function (xlf) {
let project = path.basename(path.dirname(path.dirname(xlf.relative)));
let resource = path.basename(xlf.relative, '.xlf');
let contents = xlf.contents.toString();
const project = path.basename(path.dirname(path.dirname(xlf.relative)));
const resource = path.basename(xlf.relative, '.xlf');
const contents = xlf.contents.toString();
log(`Found ${project}: ${resource}`);
let parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
const parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
parsePromises.push(parsePromise);
parsePromise.then(resolvedFiles => {
resolvedFiles.forEach(file => {

View File

@@ -113,7 +113,7 @@ const RULES = [
},
// Common: vs/platform/native/common/native.ts
{
target: '**/vs/platform/native/common/native.ts',
target: '**/{vs,sql}/platform/native/common/native.ts',
allowedTypes: CORE_TYPES,
disallowedTypes: [ /* Ignore native types that are defined from here */],
disallowedDefinitions: [
@@ -209,7 +209,6 @@ let hasErrors = false;
function checkFile(program, sourceFile, rule) {
checkNode(sourceFile);
function checkNode(node) {
var _a, _b;
if (node.kind !== ts.SyntaxKind.Identifier) {
return ts.forEachChild(node, checkNode); // recurse down
}
@@ -224,10 +223,10 @@ function checkFile(program, sourceFile, rule) {
}
const parentSymbol = _parentSymbol;
const text = parentSymbol.getName();
if ((_a = rule.allowedTypes) === null || _a === void 0 ? void 0 : _a.some(allowed => allowed === text)) {
if (rule.allowedTypes?.some(allowed => allowed === text)) {
return; // override
}
if ((_b = rule.disallowedTypes) === null || _b === void 0 ? void 0 : _b.some(disallowed => disallowed === text)) {
if (rule.disallowedTypes?.some(disallowed => disallowed === text)) {
const { line, character } = sourceFile.getLineAndCharacterOfPosition(node.getStart());
console.log(`[build/lib/layersChecker.ts]: Reference to type '${text}' violates layer '${rule.target}' (${sourceFile.fileName} (${line + 1},${character + 1})`);
hasErrors = true;

View File

@@ -77,6 +77,12 @@ const RULES: IRule[] = [
skip: true // -> skip all test files
},
// TODO@bpasero remove me once electron utility process has landed
{
target: '**/vs/workbench/services/extensions/electron-sandbox/nativeLocalProcessExtensionHost.ts',
skip: true
},
// Common: vs/base/common/platform.ts
{
target: '**/{vs,sql}/base/common/platform.ts',
@@ -117,7 +123,7 @@ const RULES: IRule[] = [
// Common: vs/platform/native/common/native.ts
{
target: '**/vs/platform/native/common/native.ts',
target: '**/{vs,sql}/platform/native/common/native.ts',
allowedTypes: CORE_TYPES,
disallowedTypes: [/* Ignore native types that are defined from here */],
disallowedDefinitions: [

View File

@@ -71,7 +71,7 @@ function updateMainI18nFile(existingTranslationFilePath, originalFilePath, messa
delete objectContents[`${contentKey}`];
}
}
messages.contents = Object.assign(Object.assign({}, objectContents), messages.contents);
messages.contents = { ...objectContents, ...messages.contents };
result[''] = [
'--------------------------------------------------------------------------------------------',
'Copyright (c) Microsoft Corporation. All rights reserved.',
@@ -141,9 +141,7 @@ function modifyI18nPackFiles(existingTranslationFolder, resultingTranslationPath
this.queue(translatedExtFile);
// exclude altered vscode extensions from having a new path even if we provide a new I18n file.
if (alteredVSCodeExtensions.indexOf(extension) === -1) {
//handle edge case for 'Microsoft.sqlservernotebook' where extension name is the same as extension ID.
//(Other extensions need to have publisher appended in front as their ID.)
let adsExtensionId = (extension === 'Microsoft.sqlservernotebook') ? extension : 'Microsoft.' + extension;
let adsExtensionId = 'Microsoft.' + extension;
resultingTranslationPaths.push({ id: adsExtensionId, resourceName: `extensions/${extension}.i18n.json` });
}
}
@@ -248,7 +246,7 @@ function refreshLangpacks() {
try {
fs.statSync(locExtFolder);
}
catch (_a) {
catch {
console.log('Language is not included in ADS yet: ' + langId);
continue;
}
@@ -311,7 +309,7 @@ function refreshLangpacks() {
}
fs.statSync(path.join(translationDataFolder, curr.path.replace('./translations', '')));
}
catch (_a) {
catch {
nonExistantExtensions.push(curr);
}
}
@@ -365,7 +363,7 @@ function renameVscodeLangpacks() {
try {
fs.statSync(locVSCODEFolder);
}
catch (_a) {
catch {
console.log('vscode pack is not in ADS yet: ' + langId);
continue;
}

View File

@@ -1,7 +1,7 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const child_process_1 = require("child_process");
@@ -95,10 +95,6 @@ class BooleanPolicy extends BasePolicy {
}
}
class IntPolicy extends BasePolicy {
constructor(name, category, minimumVersion, description, moduleName, defaultValue) {
super(PolicyType.StringEnum, name, category, minimumVersion, description, moduleName);
this.defaultValue = defaultValue;
}
static from(name, category, minimumVersion, description, moduleName, settingNode) {
const type = getStringProperty(settingNode, 'type');
if (type !== 'number') {
@@ -110,6 +106,10 @@ class IntPolicy extends BasePolicy {
}
return new IntPolicy(name, category, minimumVersion, description, moduleName, defaultValue);
}
constructor(name, category, minimumVersion, description, moduleName, defaultValue) {
super(PolicyType.StringEnum, name, category, minimumVersion, description, moduleName);
this.defaultValue = defaultValue;
}
renderADMXElements() {
return [
`<decimal id="${this.name}" valueName="${this.name}" />`
@@ -139,11 +139,6 @@ class StringPolicy extends BasePolicy {
}
}
class StringEnumPolicy extends BasePolicy {
constructor(name, category, minimumVersion, description, moduleName, enum_, enumDescriptions) {
super(PolicyType.StringEnum, name, category, minimumVersion, description, moduleName);
this.enum_ = enum_;
this.enumDescriptions = enumDescriptions;
}
static from(name, category, minimumVersion, description, moduleName, settingNode) {
const type = getStringProperty(settingNode, 'type');
if (type !== 'string') {
@@ -165,6 +160,11 @@ class StringEnumPolicy extends BasePolicy {
}
return new StringEnumPolicy(name, category, minimumVersion, description, moduleName, enum_, enumDescriptions);
}
constructor(name, category, minimumVersion, description, moduleName, enum_, enumDescriptions) {
super(PolicyType.StringEnum, name, category, minimumVersion, description, moduleName);
this.enum_ = enum_;
this.enumDescriptions = enumDescriptions;
}
renderADMXElements() {
return [
`<enum id="${this.name}" valueName="${this.name}">`,

View File

@@ -13,7 +13,7 @@ const rootDir = path.resolve(__dirname, '..', '..');
function runProcess(command, args = []) {
return new Promise((resolve, reject) => {
const child = (0, child_process_1.spawn)(command, args, { cwd: rootDir, stdio: 'inherit', env: process.env });
child.on('exit', err => !err ? resolve() : process.exit(err !== null && err !== void 0 ? err : 1));
child.on('exit', err => !err ? resolve() : process.exit(err ?? 1));
child.on('error', reject);
});
}
@@ -22,7 +22,7 @@ async function exists(subdir) {
await fs_1.promises.stat(path.join(rootDir, subdir));
return true;
}
catch (_a) {
catch {
return false;
}
}

View File

@@ -27,7 +27,6 @@ function writeFile(filePath, contents) {
fs.writeFileSync(filePath, contents);
}
function extractEditor(options) {
var _a;
const ts = require('typescript');
const tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.monaco.json')).toString());
let compilerOptions;
@@ -49,7 +48,7 @@ function extractEditor(options) {
// Take the extra included .d.ts files from `tsconfig.monaco.json`
options.typings = tsConfig.include.filter(includedFile => /\.d\.ts$/.test(includedFile));
// Add extra .d.ts files from `node_modules/@types/`
if (Array.isArray((_a = options.compilerOptions) === null || _a === void 0 ? void 0 : _a.types)) {
if (Array.isArray(options.compilerOptions?.types)) {
options.compilerOptions.types.forEach((type) => {
options.typings.push(`../node_modules/@types/${type}/index.d.ts`);
});

View File

@@ -16,11 +16,11 @@ var ShakeLevel;
})(ShakeLevel = exports.ShakeLevel || (exports.ShakeLevel = {}));
function toStringShakeLevel(shakeLevel) {
switch (shakeLevel) {
case 0 /* Files */:
case 0 /* ShakeLevel.Files */:
return 'Files (0)';
case 1 /* InnerFile */:
case 1 /* ShakeLevel.InnerFile */:
return 'InnerFile (1)';
case 2 /* ClassMembers */:
case 2 /* ShakeLevel.ClassMembers */:
return 'ClassMembers (2)';
}
}
@@ -223,7 +223,7 @@ var NodeColor;
NodeColor[NodeColor["Black"] = 2] = "Black";
})(NodeColor || (NodeColor = {}));
function getColor(node) {
return node.$$$color || 0 /* White */;
return node.$$$color || 0 /* NodeColor.White */;
}
function setColor(node, color) {
node.$$$color = color;
@@ -237,7 +237,7 @@ function isNeededSourceFile(node) {
function nodeOrParentIsBlack(node) {
while (node) {
const color = getColor(node);
if (color === 2 /* Black */) {
if (color === 2 /* NodeColor.Black */) {
return true;
}
node = node.parent;
@@ -245,7 +245,7 @@ function nodeOrParentIsBlack(node) {
return false;
}
function nodeOrChildIsBlack(node) {
if (getColor(node) === 2 /* Black */) {
if (getColor(node) === 2 /* NodeColor.Black */) {
return true;
}
for (const child of node.getChildren()) {
@@ -309,10 +309,10 @@ function markNodes(ts, languageService, options) {
if (!program) {
throw new Error('Could not get program from language service');
}
if (options.shakeLevel === 0 /* Files */) {
if (options.shakeLevel === 0 /* ShakeLevel.Files */) {
// Mark all source files Black
program.getSourceFiles().forEach((sourceFile) => {
setColor(sourceFile, 2 /* Black */);
setColor(sourceFile, 2 /* NodeColor.Black */);
});
return;
}
@@ -324,7 +324,7 @@ function markNodes(ts, languageService, options) {
sourceFile.forEachChild((node) => {
if (ts.isImportDeclaration(node)) {
if (!node.importClause && ts.isStringLiteral(node.moduleSpecifier)) {
setColor(node, 2 /* Black */);
setColor(node, 2 /* NodeColor.Black */);
enqueueImport(node, node.moduleSpecifier.text);
}
return;
@@ -332,7 +332,7 @@ function markNodes(ts, languageService, options) {
if (ts.isExportDeclaration(node)) {
if (!node.exportClause && node.moduleSpecifier && ts.isStringLiteral(node.moduleSpecifier)) {
// export * from "foo";
setColor(node, 2 /* Black */);
setColor(node, 2 /* NodeColor.Black */);
enqueueImport(node, node.moduleSpecifier.text);
}
if (node.exportClause && ts.isNamedExports(node.exportClause)) {
@@ -373,21 +373,21 @@ function markNodes(ts, languageService, options) {
return null;
}
function enqueue_gray(node) {
if (nodeOrParentIsBlack(node) || getColor(node) === 1 /* Gray */) {
if (nodeOrParentIsBlack(node) || getColor(node) === 1 /* NodeColor.Gray */) {
return;
}
setColor(node, 1 /* Gray */);
setColor(node, 1 /* NodeColor.Gray */);
gray_queue.push(node);
}
function enqueue_black(node) {
const previousColor = getColor(node);
if (previousColor === 2 /* Black */) {
if (previousColor === 2 /* NodeColor.Black */) {
return;
}
if (previousColor === 1 /* Gray */) {
if (previousColor === 1 /* NodeColor.Gray */) {
// remove from gray queue
gray_queue.splice(gray_queue.indexOf(node), 1);
setColor(node, 0 /* White */);
setColor(node, 0 /* NodeColor.White */);
// add to black queue
enqueue_black(node);
// move from one queue to the other
@@ -400,7 +400,7 @@ function markNodes(ts, languageService, options) {
}
const fileName = node.getSourceFile().fileName;
if (/^defaultLib:/.test(fileName) || /\.d\.ts$/.test(fileName)) {
setColor(node, 2 /* Black */);
setColor(node, 2 /* NodeColor.Black */);
return;
}
const sourceFile = node.getSourceFile();
@@ -411,9 +411,9 @@ function markNodes(ts, languageService, options) {
if (ts.isSourceFile(node)) {
return;
}
setColor(node, 2 /* Black */);
setColor(node, 2 /* NodeColor.Black */);
black_queue.push(node);
if (options.shakeLevel === 2 /* ClassMembers */ && (ts.isMethodDeclaration(node) || ts.isMethodSignature(node) || ts.isPropertySignature(node) || ts.isPropertyDeclaration(node) || ts.isGetAccessor(node) || ts.isSetAccessor(node))) {
if (options.shakeLevel === 2 /* ShakeLevel.ClassMembers */ && (ts.isMethodDeclaration(node) || ts.isMethodSignature(node) || ts.isPropertySignature(node) || ts.isPropertyDeclaration(node) || ts.isGetAccessor(node) || ts.isSetAccessor(node))) {
const references = languageService.getReferencesAtPosition(node.getSourceFile().fileName, node.name.pos + node.name.getLeadingTriviaWidth());
if (references) {
for (let i = 0, len = references.length; i < len; i++) {
@@ -476,7 +476,7 @@ function markNodes(ts, languageService, options) {
if ((ts.isClassDeclaration(nodeParent) || ts.isInterfaceDeclaration(nodeParent)) && nodeOrChildIsBlack(nodeParent)) {
gray_queue.splice(i, 1);
black_queue.push(node);
setColor(node, 2 /* Black */);
setColor(node, 2 /* NodeColor.Black */);
i--;
}
}
@@ -506,7 +506,7 @@ function markNodes(ts, languageService, options) {
// (they can be the declaration of a module import)
continue;
}
if (options.shakeLevel === 2 /* ClassMembers */ && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration)) && !isLocalCodeExtendingOrInheritingFromDefaultLibSymbol(ts, program, checker, declaration)) {
if (options.shakeLevel === 2 /* ShakeLevel.ClassMembers */ && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration)) && !isLocalCodeExtendingOrInheritingFromDefaultLibSymbol(ts, program, checker, declaration)) {
enqueue_black(declaration.name);
for (let j = 0; j < declaration.members.length; j++) {
const member = declaration.members[j];
@@ -556,7 +556,7 @@ function markNodes(ts, languageService, options) {
const aliased = checker.getAliasedSymbol(symbol);
if (aliased.declarations && aliased.declarations.length > 0) {
if (nodeOrParentIsBlack(aliased.declarations[0]) || nodeOrChildIsBlack(aliased.declarations[0])) {
setColor(node, 2 /* Black */);
setColor(node, 2 /* NodeColor.Black */);
}
}
}
@@ -603,7 +603,7 @@ function generateResult(ts, languageService, shakeLevel) {
result += data;
}
function writeMarkedNodes(node) {
if (getColor(node) === 2 /* Black */) {
if (getColor(node) === 2 /* NodeColor.Black */) {
return keep(node);
}
// Always keep certain top-level statements
@@ -619,34 +619,34 @@ function generateResult(ts, languageService, shakeLevel) {
if (ts.isImportDeclaration(node)) {
if (node.importClause && node.importClause.namedBindings) {
if (ts.isNamespaceImport(node.importClause.namedBindings)) {
if (getColor(node.importClause.namedBindings) === 2 /* Black */) {
if (getColor(node.importClause.namedBindings) === 2 /* NodeColor.Black */) {
return keep(node);
}
}
else {
const survivingImports = [];
for (const importNode of node.importClause.namedBindings.elements) {
if (getColor(importNode) === 2 /* Black */) {
if (getColor(importNode) === 2 /* NodeColor.Black */) {
survivingImports.push(importNode.getFullText(sourceFile));
}
}
const leadingTriviaWidth = node.getLeadingTriviaWidth();
const leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth);
if (survivingImports.length > 0) {
if (node.importClause && node.importClause.name && getColor(node.importClause) === 2 /* Black */) {
if (node.importClause && node.importClause.name && getColor(node.importClause) === 2 /* NodeColor.Black */) {
return write(`${leadingTrivia}import ${node.importClause.name.text}, {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
}
return write(`${leadingTrivia}import {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
}
else {
if (node.importClause && node.importClause.name && getColor(node.importClause) === 2 /* Black */) {
if (node.importClause && node.importClause.name && getColor(node.importClause) === 2 /* NodeColor.Black */) {
return write(`${leadingTrivia}import ${node.importClause.name.text} from${node.moduleSpecifier.getFullText(sourceFile)};`);
}
}
}
}
else {
if (node.importClause && getColor(node.importClause) === 2 /* Black */) {
if (node.importClause && getColor(node.importClause) === 2 /* NodeColor.Black */) {
return keep(node);
}
}
@@ -655,7 +655,7 @@ function generateResult(ts, languageService, shakeLevel) {
if (node.exportClause && node.moduleSpecifier && ts.isNamedExports(node.exportClause)) {
const survivingExports = [];
for (const exportSpecifier of node.exportClause.elements) {
if (getColor(exportSpecifier) === 2 /* Black */) {
if (getColor(exportSpecifier) === 2 /* NodeColor.Black */) {
survivingExports.push(exportSpecifier.getFullText(sourceFile));
}
}
@@ -666,11 +666,11 @@ function generateResult(ts, languageService, shakeLevel) {
}
}
}
if (shakeLevel === 2 /* ClassMembers */ && (ts.isClassDeclaration(node) || ts.isInterfaceDeclaration(node)) && nodeOrChildIsBlack(node)) {
if (shakeLevel === 2 /* ShakeLevel.ClassMembers */ && (ts.isClassDeclaration(node) || ts.isInterfaceDeclaration(node)) && nodeOrChildIsBlack(node)) {
let toWrite = node.getFullText();
for (let i = node.members.length - 1; i >= 0; i--) {
const member = node.members[i];
if (getColor(member) === 2 /* Black */ || !member.name) {
if (getColor(member) === 2 /* NodeColor.Black */ || !member.name) {
// keep method
continue;
}
@@ -686,7 +686,7 @@ function generateResult(ts, languageService, shakeLevel) {
}
node.forEachChild(writeMarkedNodes);
}
if (getColor(sourceFile) !== 2 /* Black */) {
if (getColor(sourceFile) !== 2 /* NodeColor.Black */) {
if (!nodeOrChildIsBlack(sourceFile)) {
// none of the elements are reachable
if (isNeededSourceFile(sourceFile)) {

View File

@@ -1,7 +1,7 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.createTypeScriptBuilder = exports.CancellationToken = void 0;

View File

@@ -1,7 +1,7 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.create = void 0;

View File

@@ -1,7 +1,7 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.Transpiler = void 0;

View File

@@ -1,7 +1,7 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.graph = exports.strings = exports.collections = void 0;

View File

@@ -304,7 +304,6 @@ function getElectronVersion() {
}
exports.getElectronVersion = getElectronVersion;
function acquireWebNodePaths() {
var _a;
const root = path.join(__dirname, '..', '..');
const webPackageJSON = path.join(root, '/remote/web', 'package.json');
const webPackages = JSON.parse(fs.readFileSync(webPackageJSON, 'utf8')).dependencies;
@@ -312,7 +311,7 @@ function acquireWebNodePaths() {
for (const key of Object.keys(webPackages)) {
const packageJSON = path.join(root, 'node_modules', key, 'package.json');
const packageData = JSON.parse(fs.readFileSync(packageJSON, 'utf8'));
let entryPoint = typeof packageData.browser === 'string' ? packageData.browser : (_a = packageData.main) !== null && _a !== void 0 ? _a : packageData.main; // {{SQL CARBON EDIT}} Some packages (like Turndown) have objects in this field instead of the entry point, fall back to main in that case
let entryPoint = typeof packageData.browser === 'string' ? packageData.browser : packageData.main ?? packageData.main; // {{SQL CARBON EDIT}} Some packages (like Turndown) have objects in this field instead of the entry point, fall back to main in that case
// On rare cases a package doesn't have an entrypoint so we assume it has a dist folder with a min.js
if (!entryPoint) {
// TODO @lramos15 remove this when jschardet adds an entrypoint so we can warn on all packages w/out entrypoint