Revert build folder update to fix it properly (#22981)

* Revert "Disable publishing crash reports temporarily (#22950)"

This reverts commit 13a791d14e.

* Revert "Compile build folder (#22811)"

This reverts commit 2c07c09d0d.
This commit is contained in:
Cheena Malhotra
2023-05-05 08:50:23 -07:00
committed by GitHub
parent 70e756b82d
commit 9af7a049e6
33 changed files with 1467 additions and 1482 deletions

View File

@@ -0,0 +1,109 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { DocumentClient } from 'documentdb';
interface Config {
id: string;
frozen: boolean;
}
function createDefaultConfig(quality: string): Config {
return {
id: quality,
frozen: false
};
}
function getConfig(quality: string): Promise<Config> {
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/config';
const query = {
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
parameters: [
{ name: '@quality', value: quality }
]
};
return new Promise<Config>((c, e) => {
client.queryDocuments(collection, query).toArray((err, results) => {
if (err && err.code !== 409) { return e(err); }
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0] as any as Config);
});
});
}
function doRelease(commit: string, quality: string): Promise<void> {
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/' + quality;
const query = {
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
parameters: [{ name: '@id', value: commit }]
};
let updateTries = 0;
function update(): Promise<void> {
updateTries++;
return new Promise<void>((c, e) => {
client.queryDocuments(collection, query).toArray((err, results) => {
if (err) { return e(err); }
if (results.length !== 1) { return e(new Error('No documents')); }
const release = results[0];
release.isReleased = true;
client.replaceDocument(release._self, release, err => {
if (err && err.code === 409 && updateTries < 5) { return c(update()); }
if (err) { return e(err); }
console.log('Build successfully updated.');
c();
});
});
});
}
return update();
}
async function release(commit: string, quality: string): Promise<void> {
const config = await getConfig(quality);
console.log('Quality config:', config);
if (config.frozen) {
console.log(`Skipping release because quality ${quality} is frozen.`);
return;
}
await doRelease(commit, quality);
}
function env(name: string): string {
const result = process.env[name];
if (!result) {
throw new Error(`Skipping release due to missing env: ${name}`);
}
return result;
}
async function main(): Promise<void> {
const commit = env('BUILD_SOURCEVERSION');
const quality = env('VSCODE_QUALITY');
await release(commit, quality);
}
main().catch(err => {
console.error(err);
process.exit(1);
});

View File

@@ -123,21 +123,20 @@ steps:
displayName: Run core unit tests
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'), ne(variables['EXTENSIONS_ONLY'], 'true'))
# {{SQL CARBON EDIT}} - Reenable "Run core integration tests" step
# - script: |
# # Figure out the full absolute path of the product we just built
# # including the remote server and configure the integration tests
# # to run with these builds instead of running out of sources.
# set -e
# APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
# APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
# INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
# VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-linux-x64" \
# DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
# displayName: Run core integration tests
# condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'), ne(variables['EXTENSIONS_ONLY'], 'true'))
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the integration tests
# to run with these builds instead of running out of sources.
set -e
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-linux-x64" \
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run core integration tests
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'), ne(variables['EXTENSIONS_ONLY'], 'true'))
# {{SQL CARBON TODO}} Reenable "Run Extension Unit Tests (Continue on Error)" and "Run Extension Unit Tests (Fail on Error)" and "Archive Logs" and "Copy Coverage"
# {{SQL CARBON TODO}} Reenable "Run Extension Unit Tests (Continue on Error)" and "Run Extension Unit Tests (Fail on Error)" and "Archive Logs"
# - script: |
# # Figure out the full absolute path of the product we just built
# # including the remote server and configure the unit tests
@@ -181,11 +180,12 @@ steps:
# continueOnError: true
# condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- script: |
set -e
yarn gulp vscode-linux-x64-build-deb
displayName: Build Deb
condition: and(succeeded(), ne(variables['EXTENSIONS_ONLY'], 'true'))
# {{SQL CARBON TODO}} - Reenable
# - script: |
# set -e
# yarn gulp vscode-linux-x64-build-deb
# displayName: Build Deb
# condition: and(succeeded(), ne(variables['EXTENSIONS_ONLY'], 'true'))
- script: |
set -e
@@ -237,29 +237,28 @@ steps:
./build/azure-pipelines/linux/createDrop.sh
displayName: Create Drop
# {{SQL CARBON TODO}} Reenable "Run Extension Unit Tests (Continue on Error)" and "Run Extension Unit Tests (Fail on Error)" and "Archive Logs" and "Copy Coverage" and "Publish test results/crash reports"
# - script: |
# set -e
# shopt -s globstar
# mkdir -p $(Build.ArtifactStagingDirectory)/test-results/coverage
# cp --parents -r $(Build.SourcesDirectory)/extensions/*/coverage/** $(Build.ArtifactStagingDirectory)/test-results/coverage
# displayName: Copy Coverage
# condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- script: |
set -e
shopt -s globstar
mkdir -p $(Build.ArtifactStagingDirectory)/test-results/coverage
cp --parents -r $(Build.SourcesDirectory)/extensions/*/coverage/** $(Build.ArtifactStagingDirectory)/test-results/coverage
displayName: Copy Coverage
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
# - task: PublishTestResults@2
# displayName: 'Publish Test Results test-results.xml'
# inputs:
# testResultsFiles: '*.xml'
# searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
# continueOnError: true
# condition: and(succeededOrFailed(), eq(variables['RUN_TESTS'], 'true'))
- task: PublishTestResults@2
displayName: 'Publish Test Results test-results.xml'
inputs:
testResultsFiles: '*.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
continueOnError: true
condition: and(succeededOrFailed(), eq(variables['RUN_TESTS'], 'true'))
# - task: PublishBuildArtifacts@1
# displayName: 'Publish Artifact: crash reports'
# inputs:
# PathtoPublish: '$(Build.SourcesDirectory)/.build/crashes'
# ArtifactName: crashes
# condition: and(succeededOrFailed(), eq(variables['RUN_TESTS'], 'true'))
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: crash reports'
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/.build/crashes'
ArtifactName: crashes
condition: and(succeededOrFailed(), eq(variables['RUN_TESTS'], 'true'))
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: drop'

View File

@@ -27,16 +27,16 @@ If (-NOT ($Quality -eq "stable")) {
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformLinux archive-unsigned "$TarballUploadName.tar.gz" $Version true $TarballPath $CommitId
# Publish DEB
$PlatformDeb = "linux-deb-$Arch"
$DebFilename = "$(Get-ChildItem -File -Name $artifactsDir\linux\deb\amd64\deb\*.deb)"
$DebPath = "$artifactsDir\linux\deb\amd64\deb\$DebFilename"
$DebUploadName = "azuredatastudio-linux-$Version"
# $PlatformDeb = "linux-deb-$Arch"
# $DebFilename = "$(Get-ChildItem -File -Name $artifactsDir\linux\deb\amd64\deb\*.deb)"
# $DebPath = "$artifactsDir\linux\deb\amd64\deb\$DebFilename"
# $DebUploadName = "azuredatastudio-linux-$Version"
If (-NOT ($Quality -eq "stable")) {
$DebUploadName = "$DebUploadName-$Quality"
}
# If (-NOT ($Quality -eq "stable")) {
# $DebUploadName = "$DebUploadName-$Quality"
# }
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformDeb package "$DebUploadName.deb" $Version true $DebPath $CommitId
# node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformDeb package "$DebUploadName.deb" $Version true $DebPath $CommitId
# Publish RPM
$PlatformRpm = "linux-rpm-$Arch"

View File

@@ -1,8 +1,8 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.watchApiProposalNamesTask = exports.compileApiProposalNamesTask = exports.watchTask = exports.compileTask = exports.transpileTask = void 0;
const es = require("event-stream");
@@ -52,7 +52,7 @@ function createCompile(src, build, emitError, transpileOnly) {
console.warn('* and re-run the build/watch task *');
console.warn('********************************************************************************************');
}
const compilation = tsb.create(projectPath, overrideOptions, { verbose: false }, err => reporter(err));
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
function pipeline(token) {
const bom = require('gulp-bom');
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
@@ -204,7 +204,7 @@ function generateApiProposalNames() {
const match = /\r?\n/m.exec(src);
eol = match ? match[0] : os.EOL;
}
catch (_a) {
catch {
eol = os.EOL;
}
const pattern = /vscode\.proposed\.([a-zA-Z]+)\.d\.ts$/;

View File

@@ -60,7 +60,7 @@ function createCompile(src: string, build: boolean, emitError: boolean, transpil
}
const compilation = tsb.create(projectPath, overrideOptions, { verbose: false }, err => reporter(err));
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
function pipeline(token?: util.ICancellationToken) {
const bom = require('gulp-bom') as typeof import('gulp-bom');

View File

@@ -77,7 +77,7 @@ module.exports = new (_a = class ApiEventNaming {
if (def.type === experimental_utils_1.AST_NODE_TYPES.Identifier) {
return def;
}
else if ((def.type === experimental_utils_1.AST_NODE_TYPES.TSPropertySignature || def.type === experimental_utils_1.AST_NODE_TYPES.Property) && def.key.type === experimental_utils_1.AST_NODE_TYPES.Identifier) {
else if ((def.type === experimental_utils_1.AST_NODE_TYPES.TSPropertySignature || def.type === experimental_utils_1.AST_NODE_TYPES.PropertyDefinition) && def.key.type === experimental_utils_1.AST_NODE_TYPES.Identifier) {
return def.key;
}
return this.getIdent(def.parent);

View File

@@ -88,7 +88,7 @@ export = new class ApiEventNaming implements eslint.Rule.RuleModule {
if (def.type === AST_NODE_TYPES.Identifier) {
return def;
} else if ((def.type === AST_NODE_TYPES.TSPropertySignature || def.type === AST_NODE_TYPES.Property) && def.key.type === AST_NODE_TYPES.Identifier) {
} else if ((def.type === AST_NODE_TYPES.TSPropertySignature || def.type === AST_NODE_TYPES.PropertyDefinition) && def.key.type === AST_NODE_TYPES.Identifier) {
return def.key;
}

View File

@@ -34,14 +34,14 @@ function minifyExtensionResources(input) {
.pipe(jsonFilter)
.pipe(buffer())
.pipe(es.mapSync((f) => {
const errors = [];
const value = jsoncParser.parse(f.contents.toString('utf8'), errors);
if (errors.length === 0) {
// file parsed OK => just stringify to drop whitespace and comments
f.contents = Buffer.from(JSON.stringify(value));
}
return f;
}))
const errors = [];
const value = jsoncParser.parse(f.contents.toString('utf8'), errors);
if (errors.length === 0) {
// file parsed OK => just stringify to drop whitespace and comments
f.contents = Buffer.from(JSON.stringify(value));
}
return f;
}))
.pipe(jsonFilter.restore);
}
function updateExtensionPackageJSON(input, update) {
@@ -50,10 +50,10 @@ function updateExtensionPackageJSON(input, update) {
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(es.mapSync((f) => {
const data = JSON.parse(f.contents.toString('utf8'));
f.contents = Buffer.from(JSON.stringify(update(data)));
return f;
}))
const data = JSON.parse(f.contents.toString('utf8'));
f.contents = Buffer.from(JSON.stringify(update(data)));
return f;
}))
.pipe(packageJsonFilter.restore);
}
function fromLocal(extensionPath, forWeb) {
@@ -95,11 +95,11 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName) {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
// check for a webpack configuration files, then invoke webpack
// and merge its output with the files stream.
const webpackConfigLocations = glob.sync(path.join(extensionPath, '**', webpackConfigFileName), { ignore: ['**/node_modules'] });
@@ -123,20 +123,20 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName) {
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(es.through(function (data) {
data.stat = data.stat || {};
data.base = extensionPath;
this.emit('data', data);
}))
data.stat = data.stat || {};
data.base = extensionPath;
this.emit('data', data);
}))
.pipe(es.through(function (data) {
// source map handling:
// * rewrite sourceMappingURL
// * save to disk so that upload-task picks this up
const contents = data.contents.toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
this.emit('data', data);
}));
// source map handling:
// * rewrite sourceMappingURL
// * save to disk so that upload-task picks this up
const contents = data.contents.toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
this.emit('data', data);
}));
});
});
es.merge(...webpackStreams, es.readArray(files))
@@ -158,16 +158,16 @@ function fromLocalNormal(extensionPath) {
const result = es.through();
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
.then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
es.readArray(files).pipe(result);
})
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
es.readArray(files).pipe(result);
})
.catch(err => result.emit('error', err));
return result.pipe((0, stats_1.createStatsStream)(path.basename(extensionPath)));
}
@@ -244,7 +244,6 @@ const excludedExtensions = [
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
'vscode-custom-editor-tests',
'vscode-notebook-tests',
'integration-tests', // {{SQL CARBON EDIT}}
];
// {{SQL CARBON EDIT}}
@@ -325,11 +324,11 @@ function isWebExtension(manifest) {
function packageLocalExtensionsStream(forWeb) {
const localExtensionsDescriptions = (glob.sync('extensions/*/package.json')
.map(manifestPath => {
const absoluteManifestPath = path.join(root, manifestPath);
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath, manifestPath: absoluteManifestPath };
})
const absoluteManifestPath = path.join(root, manifestPath);
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath, manifestPath: absoluteManifestPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => externalExtensions.indexOf(name) === -1) // {{SQL CARBON EDIT}} Remove external Extensions with separate package
@@ -360,15 +359,15 @@ function packageMarketplaceExtensionsStream(forWeb, galleryServiceUrl) {
];
const marketplaceExtensionsStream = minifyExtensionResources(es.merge(...marketplaceExtensionsDescriptions
.map(extension => {
const input = (galleryServiceUrl ? fromMarketplace(galleryServiceUrl, extension) : fromGithub(extension))
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
return updateExtensionPackageJSON(input, (data) => {
delete data.scripts;
delete data.dependencies;
delete data.devDependencies;
return data;
});
})));
const input = (galleryServiceUrl ? fromMarketplace(galleryServiceUrl, extension) : fromGithub(extension))
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
return updateExtensionPackageJSON(input, (data) => {
delete data.scripts;
delete data.dependencies;
delete data.devDependencies;
return data;
});
})));
return (marketplaceExtensionsStream
.pipe(util2.setExecutableBit(['**/*.sh'])));
}
@@ -413,10 +412,10 @@ exports.scanBuiltinExtensions = scanBuiltinExtensions;
function packageExternalExtensionsStream() {
const extenalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => externalExtensions.indexOf(name) >= 0 || exports.vscodeExternalExtensions.indexOf(name) >= 0);
const builtExtensions = extenalExtensionDescriptions.map(extension => {
return fromLocal(extension.path, false)
@@ -434,10 +433,10 @@ exports.cleanRebuildExtensions = cleanRebuildExtensions;
function packageRebuildExtensionsStream() {
const extenalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => rebuildExtensions.indexOf(name) >= 0);
const builtExtensions = extenalExtensionDescriptions.map(extension => {
return fromLocal(extension.path, false)

View File

@@ -1012,7 +1012,7 @@ function prepareI18nFiles() {
}
exports.prepareI18nFiles = prepareI18nFiles;
function createI18nFile(originalFilePath, messages) {
let result = Object.create(null);
const result = Object.create(null);
result[''] = [
'--------------------------------------------------------------------------------------------',
'Copyright (c) Microsoft Corporation. All rights reserved.',
@@ -1035,16 +1035,16 @@ function createI18nFile(originalFilePath, messages) {
exports.createI18nFile = createI18nFile;
exports.i18nPackVersion = '1.0.0'; // {{SQL CARBON EDIT}} Needed in locfunc.
function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pseudo = false) {
const parsePromises = [];
const mainPack = { version: exports.i18nPackVersion, contents: {} };
const extensionsPacks = {};
const errors = [];
let parsePromises = [];
let mainPack = { version: exports.i18nPackVersion, contents: {} };
let extensionsPacks = {};
let errors = [];
return (0, event_stream_1.through)(function (xlf) {
const project = path.basename(path.dirname(path.dirname(xlf.relative)));
const resource = path.basename(xlf.relative, '.xlf');
const contents = xlf.contents.toString();
let project = path.basename(path.dirname(path.dirname(xlf.relative)));
let resource = path.basename(xlf.relative, '.xlf');
let contents = xlf.contents.toString();
log(`Found ${project}: ${resource}`);
const parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
let parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
parsePromises.push(parsePromise);
parsePromise.then(resolvedFiles => {
resolvedFiles.forEach(file => {

View File

@@ -77,12 +77,6 @@ const RULES: IRule[] = [
skip: true // -> skip all test files
},
// TODO@bpasero remove me once electron utility process has landed
{
target: '**/vs/workbench/services/extensions/electron-sandbox/nativeLocalProcessExtensionHost.ts',
skip: true
},
// Common: vs/base/common/platform.ts
{
target: '**/{vs,sql}/base/common/platform.ts',

View File

@@ -104,12 +104,11 @@ function hasModifier(modifiers, kind) {
return false;
}
function isStatic(ts, member) {
const modifiers = ts.canHaveModifiers(member) ? ts.getModifiers(member) : undefined;
return hasModifier(modifiers, ts.SyntaxKind.StaticKeyword);
return hasModifier(member.modifiers, ts.SyntaxKind.StaticKeyword);
}
function isDefaultExport(ts, declaration) {
const modifiers = ts.canHaveModifiers(declaration) ? ts.getModifiers(declaration) : undefined;
return hasModifier(modifiers, ts.SyntaxKind.DefaultKeyword) && hasModifier(modifiers, ts.SyntaxKind.ExportKeyword);
return (hasModifier(declaration.modifiers, ts.SyntaxKind.DefaultKeyword)
&& hasModifier(declaration.modifiers, ts.SyntaxKind.ExportKeyword));
}
function getMassagedTopLevelDeclarationText(ts, sourceFile, declaration, importName, usage, enums) {
let result = getNodeText(sourceFile, declaration);

View File

@@ -115,7 +115,7 @@ function getNodeText(sourceFile: ts.SourceFile, node: { pos: number; end: number
return sourceFile.getFullText().substring(node.pos, node.end);
}
function hasModifier(modifiers: readonly ts.Modifier[] | undefined, kind: ts.SyntaxKind): boolean {
function hasModifier(modifiers: ts.NodeArray<ts.ModifierLike> | undefined, kind: ts.SyntaxKind): boolean {
if (modifiers) {
for (let i = 0; i < modifiers.length; i++) {
const mod = modifiers[i];
@@ -128,13 +128,14 @@ function hasModifier(modifiers: readonly ts.Modifier[] | undefined, kind: ts.Syn
}
function isStatic(ts: typeof import('typescript'), member: ts.ClassElement | ts.TypeElement): boolean {
const modifiers = ts.canHaveModifiers(member) ? ts.getModifiers(member) : undefined;
return hasModifier(modifiers, ts.SyntaxKind.StaticKeyword);
return hasModifier(member.modifiers, ts.SyntaxKind.StaticKeyword);
}
function isDefaultExport(ts: typeof import('typescript'), declaration: ts.InterfaceDeclaration | ts.ClassDeclaration): boolean {
const modifiers = ts.canHaveModifiers(declaration) ? ts.getModifiers(declaration) : undefined;
return hasModifier(modifiers, ts.SyntaxKind.DefaultKeyword) && hasModifier(modifiers, ts.SyntaxKind.ExportKeyword);
return (
hasModifier(declaration.modifiers, ts.SyntaxKind.DefaultKeyword)
&& hasModifier(declaration.modifiers, ts.SyntaxKind.ExportKeyword)
);
}
function getMassagedTopLevelDeclarationText(ts: typeof import('typescript'), sourceFile: ts.SourceFile, declaration: TSTopLevelDeclare, importName: string, usage: string[], enums: IEnumEntry[]): string {

View File

@@ -1,7 +1,7 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const child_process_1 = require("child_process");
@@ -95,6 +95,10 @@ class BooleanPolicy extends BasePolicy {
}
}
class IntPolicy extends BasePolicy {
constructor(name, category, minimumVersion, description, moduleName, defaultValue) {
super(PolicyType.StringEnum, name, category, minimumVersion, description, moduleName);
this.defaultValue = defaultValue;
}
static from(name, category, minimumVersion, description, moduleName, settingNode) {
const type = getStringProperty(settingNode, 'type');
if (type !== 'number') {
@@ -106,10 +110,6 @@ class IntPolicy extends BasePolicy {
}
return new IntPolicy(name, category, minimumVersion, description, moduleName, defaultValue);
}
constructor(name, category, minimumVersion, description, moduleName, defaultValue) {
super(PolicyType.StringEnum, name, category, minimumVersion, description, moduleName);
this.defaultValue = defaultValue;
}
renderADMXElements() {
return [
`<decimal id="${this.name}" valueName="${this.name}" />`
@@ -139,6 +139,11 @@ class StringPolicy extends BasePolicy {
}
}
class StringEnumPolicy extends BasePolicy {
constructor(name, category, minimumVersion, description, moduleName, enum_, enumDescriptions) {
super(PolicyType.StringEnum, name, category, minimumVersion, description, moduleName);
this.enum_ = enum_;
this.enumDescriptions = enumDescriptions;
}
static from(name, category, minimumVersion, description, moduleName, settingNode) {
const type = getStringProperty(settingNode, 'type');
if (type !== 'string') {
@@ -160,11 +165,6 @@ class StringEnumPolicy extends BasePolicy {
}
return new StringEnumPolicy(name, category, minimumVersion, description, moduleName, enum_, enumDescriptions);
}
constructor(name, category, minimumVersion, description, moduleName, enum_, enumDescriptions) {
super(PolicyType.StringEnum, name, category, minimumVersion, description, moduleName);
this.enum_ = enum_;
this.enumDescriptions = enumDescriptions;
}
renderADMXElements() {
return [
`<enum id="${this.name}" valueName="${this.name}">`,
@@ -185,12 +185,11 @@ class StringEnumPolicy extends BasePolicy {
const IntQ = {
Q: `(number) @value`,
value(matches) {
var _a;
const match = matches[0];
if (!match) {
return undefined;
}
const value = (_a = match.captures.filter((c) => c.name === 'value')[0]) === null || _a === void 0 ? void 0 : _a.node.text;
const value = match.captures.filter(c => c.name === 'value')[0]?.node.text;
if (!value) {
throw new Error(`Missing required 'value' property.`);
}
@@ -203,16 +202,15 @@ const StringQ = {
(call_expression function: (identifier) @localizeFn arguments: (arguments (string (string_fragment) @nlsKey) (string (string_fragment) @value)) (#eq? @localizeFn localize))
]`,
value(matches) {
var _a, _b;
const match = matches[0];
if (!match) {
return undefined;
}
const value = (_a = match.captures.filter((c) => c.name === 'value')[0]) === null || _a === void 0 ? void 0 : _a.node.text;
const value = match.captures.filter(c => c.name === 'value')[0]?.node.text;
if (!value) {
throw new Error(`Missing required 'value' property.`);
}
const nlsKey = (_b = match.captures.filter((c) => c.name === 'nlsKey')[0]) === null || _b === void 0 ? void 0 : _b.node.text;
const nlsKey = match.captures.filter(c => c.name === 'nlsKey')[0]?.node.text;
if (nlsKey) {
return { value, nlsKey };
}
@@ -323,7 +321,7 @@ function getPolicies(moduleName, node) {
)
`);
const categories = new Map();
return query.matches(node).map((m) => {
return query.matches(node).map(m => {
const configurationNode = m.captures.filter(c => c.name === 'configuration')[0].node;
const settingNode = m.captures.filter(c => c.name === 'setting')[0].node;
const policyNode = m.captures.filter(c => c.name === 'policy')[0].node;
@@ -463,13 +461,12 @@ async function parsePolicies() {
return policies;
}
async function getTranslations() {
var _a;
const updateUrl = product.updateUrl;
if (!updateUrl) {
console.warn(`Skipping policy localization: No 'updateUrl' found in 'product.json'.`);
return [];
}
const resourceUrlTemplate = (_a = product.extensionsGallery) === null || _a === void 0 ? void 0 : _a.resourceUrlTemplate;
const resourceUrlTemplate = product.extensionsGallery?.resourceUrlTemplate;
if (!resourceUrlTemplate) {
console.warn(`Skipping policy localization: No 'resourceUrlTemplate' found in 'product.json'.`);
return [];

View File

@@ -316,7 +316,7 @@ const IntQ: QType<number> = {
return undefined;
}
const value = match.captures.filter((c: { name: string; }) => c.name === 'value')[0]?.node.text;
const value = match.captures.filter(c => c.name === 'value')[0]?.node.text;
if (!value) {
throw new Error(`Missing required 'value' property.`);
@@ -339,13 +339,13 @@ const StringQ: QType<string | NlsString> = {
return undefined;
}
const value = match.captures.filter((c: { name: string; }) => c.name === 'value')[0]?.node.text;
const value = match.captures.filter(c => c.name === 'value')[0]?.node.text;
if (!value) {
throw new Error(`Missing required 'value' property.`);
}
const nlsKey = match.captures.filter((c: { name: string; }) => c.name === 'nlsKey')[0]?.node.text;
const nlsKey = match.captures.filter(c => c.name === 'nlsKey')[0]?.node.text;
if (nlsKey) {
return { value, nlsKey };
@@ -487,7 +487,7 @@ function getPolicies(moduleName: string, node: Parser.SyntaxNode): Policy[] {
const categories = new Map<string, Category>();
return query.matches(node).map((m: { captures: any[]; }) => {
return query.matches(node).map(m => {
const configurationNode = m.captures.filter(c => c.name === 'configuration')[0].node;
const settingNode = m.captures.filter(c => c.name === 'setting')[0].node;
const policyNode = m.captures.filter(c => c.name === 'policy')[0].node;

View File

@@ -16,11 +16,11 @@ var ShakeLevel;
})(ShakeLevel = exports.ShakeLevel || (exports.ShakeLevel = {}));
function toStringShakeLevel(shakeLevel) {
switch (shakeLevel) {
case 0 /* ShakeLevel.Files */:
case 0 /* Files */:
return 'Files (0)';
case 1 /* ShakeLevel.InnerFile */:
case 1 /* InnerFile */:
return 'InnerFile (1)';
case 2 /* ShakeLevel.ClassMembers */:
case 2 /* ClassMembers */:
return 'ClassMembers (2)';
}
}
@@ -223,7 +223,7 @@ var NodeColor;
NodeColor[NodeColor["Black"] = 2] = "Black";
})(NodeColor || (NodeColor = {}));
function getColor(node) {
return node.$$$color || 0 /* NodeColor.White */;
return node.$$$color || 0 /* White */;
}
function setColor(node, color) {
node.$$$color = color;
@@ -237,7 +237,7 @@ function isNeededSourceFile(node) {
function nodeOrParentIsBlack(node) {
while (node) {
const color = getColor(node);
if (color === 2 /* NodeColor.Black */) {
if (color === 2 /* Black */) {
return true;
}
node = node.parent;
@@ -245,7 +245,7 @@ function nodeOrParentIsBlack(node) {
return false;
}
function nodeOrChildIsBlack(node) {
if (getColor(node) === 2 /* NodeColor.Black */) {
if (getColor(node) === 2 /* Black */) {
return true;
}
for (const child of node.getChildren()) {
@@ -309,10 +309,10 @@ function markNodes(ts, languageService, options) {
if (!program) {
throw new Error('Could not get program from language service');
}
if (options.shakeLevel === 0 /* ShakeLevel.Files */) {
if (options.shakeLevel === 0 /* Files */) {
// Mark all source files Black
program.getSourceFiles().forEach((sourceFile) => {
setColor(sourceFile, 2 /* NodeColor.Black */);
setColor(sourceFile, 2 /* Black */);
});
return;
}
@@ -324,7 +324,7 @@ function markNodes(ts, languageService, options) {
sourceFile.forEachChild((node) => {
if (ts.isImportDeclaration(node)) {
if (!node.importClause && ts.isStringLiteral(node.moduleSpecifier)) {
setColor(node, 2 /* NodeColor.Black */);
setColor(node, 2 /* Black */);
enqueueImport(node, node.moduleSpecifier.text);
}
return;
@@ -332,7 +332,7 @@ function markNodes(ts, languageService, options) {
if (ts.isExportDeclaration(node)) {
if (!node.exportClause && node.moduleSpecifier && ts.isStringLiteral(node.moduleSpecifier)) {
// export * from "foo";
setColor(node, 2 /* NodeColor.Black */);
setColor(node, 2 /* Black */);
enqueueImport(node, node.moduleSpecifier.text);
}
if (node.exportClause && ts.isNamedExports(node.exportClause)) {
@@ -373,21 +373,21 @@ function markNodes(ts, languageService, options) {
return null;
}
function enqueue_gray(node) {
if (nodeOrParentIsBlack(node) || getColor(node) === 1 /* NodeColor.Gray */) {
if (nodeOrParentIsBlack(node) || getColor(node) === 1 /* Gray */) {
return;
}
setColor(node, 1 /* NodeColor.Gray */);
setColor(node, 1 /* Gray */);
gray_queue.push(node);
}
function enqueue_black(node) {
const previousColor = getColor(node);
if (previousColor === 2 /* NodeColor.Black */) {
if (previousColor === 2 /* Black */) {
return;
}
if (previousColor === 1 /* NodeColor.Gray */) {
if (previousColor === 1 /* Gray */) {
// remove from gray queue
gray_queue.splice(gray_queue.indexOf(node), 1);
setColor(node, 0 /* NodeColor.White */);
setColor(node, 0 /* White */);
// add to black queue
enqueue_black(node);
// move from one queue to the other
@@ -400,7 +400,7 @@ function markNodes(ts, languageService, options) {
}
const fileName = node.getSourceFile().fileName;
if (/^defaultLib:/.test(fileName) || /\.d\.ts$/.test(fileName)) {
setColor(node, 2 /* NodeColor.Black */);
setColor(node, 2 /* Black */);
return;
}
const sourceFile = node.getSourceFile();
@@ -411,9 +411,9 @@ function markNodes(ts, languageService, options) {
if (ts.isSourceFile(node)) {
return;
}
setColor(node, 2 /* NodeColor.Black */);
setColor(node, 2 /* Black */);
black_queue.push(node);
if (options.shakeLevel === 2 /* ShakeLevel.ClassMembers */ && (ts.isMethodDeclaration(node) || ts.isMethodSignature(node) || ts.isPropertySignature(node) || ts.isPropertyDeclaration(node) || ts.isGetAccessor(node) || ts.isSetAccessor(node))) {
if (options.shakeLevel === 2 /* ClassMembers */ && (ts.isMethodDeclaration(node) || ts.isMethodSignature(node) || ts.isPropertySignature(node) || ts.isPropertyDeclaration(node) || ts.isGetAccessor(node) || ts.isSetAccessor(node))) {
const references = languageService.getReferencesAtPosition(node.getSourceFile().fileName, node.name.pos + node.name.getLeadingTriviaWidth());
if (references) {
for (let i = 0, len = references.length; i < len; i++) {
@@ -476,7 +476,7 @@ function markNodes(ts, languageService, options) {
if ((ts.isClassDeclaration(nodeParent) || ts.isInterfaceDeclaration(nodeParent)) && nodeOrChildIsBlack(nodeParent)) {
gray_queue.splice(i, 1);
black_queue.push(node);
setColor(node, 2 /* NodeColor.Black */);
setColor(node, 2 /* Black */);
i--;
}
}
@@ -506,7 +506,7 @@ function markNodes(ts, languageService, options) {
// (they can be the declaration of a module import)
continue;
}
if (options.shakeLevel === 2 /* ShakeLevel.ClassMembers */ && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration)) && !isLocalCodeExtendingOrInheritingFromDefaultLibSymbol(ts, program, checker, declaration)) {
if (options.shakeLevel === 2 /* ClassMembers */ && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration)) && !isLocalCodeExtendingOrInheritingFromDefaultLibSymbol(ts, program, checker, declaration)) {
enqueue_black(declaration.name);
for (let j = 0; j < declaration.members.length; j++) {
const member = declaration.members[j];
@@ -556,7 +556,7 @@ function markNodes(ts, languageService, options) {
const aliased = checker.getAliasedSymbol(symbol);
if (aliased.declarations && aliased.declarations.length > 0) {
if (nodeOrParentIsBlack(aliased.declarations[0]) || nodeOrChildIsBlack(aliased.declarations[0])) {
setColor(node, 2 /* NodeColor.Black */);
setColor(node, 2 /* Black */);
}
}
}
@@ -603,7 +603,7 @@ function generateResult(ts, languageService, shakeLevel) {
result += data;
}
function writeMarkedNodes(node) {
if (getColor(node) === 2 /* NodeColor.Black */) {
if (getColor(node) === 2 /* Black */) {
return keep(node);
}
// Always keep certain top-level statements
@@ -619,34 +619,34 @@ function generateResult(ts, languageService, shakeLevel) {
if (ts.isImportDeclaration(node)) {
if (node.importClause && node.importClause.namedBindings) {
if (ts.isNamespaceImport(node.importClause.namedBindings)) {
if (getColor(node.importClause.namedBindings) === 2 /* NodeColor.Black */) {
if (getColor(node.importClause.namedBindings) === 2 /* Black */) {
return keep(node);
}
}
else {
const survivingImports = [];
for (const importNode of node.importClause.namedBindings.elements) {
if (getColor(importNode) === 2 /* NodeColor.Black */) {
if (getColor(importNode) === 2 /* Black */) {
survivingImports.push(importNode.getFullText(sourceFile));
}
}
const leadingTriviaWidth = node.getLeadingTriviaWidth();
const leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth);
if (survivingImports.length > 0) {
if (node.importClause && node.importClause.name && getColor(node.importClause) === 2 /* NodeColor.Black */) {
if (node.importClause && node.importClause.name && getColor(node.importClause) === 2 /* Black */) {
return write(`${leadingTrivia}import ${node.importClause.name.text}, {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
}
return write(`${leadingTrivia}import {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
}
else {
if (node.importClause && node.importClause.name && getColor(node.importClause) === 2 /* NodeColor.Black */) {
if (node.importClause && node.importClause.name && getColor(node.importClause) === 2 /* Black */) {
return write(`${leadingTrivia}import ${node.importClause.name.text} from${node.moduleSpecifier.getFullText(sourceFile)};`);
}
}
}
}
else {
if (node.importClause && getColor(node.importClause) === 2 /* NodeColor.Black */) {
if (node.importClause && getColor(node.importClause) === 2 /* Black */) {
return keep(node);
}
}
@@ -655,7 +655,7 @@ function generateResult(ts, languageService, shakeLevel) {
if (node.exportClause && node.moduleSpecifier && ts.isNamedExports(node.exportClause)) {
const survivingExports = [];
for (const exportSpecifier of node.exportClause.elements) {
if (getColor(exportSpecifier) === 2 /* NodeColor.Black */) {
if (getColor(exportSpecifier) === 2 /* Black */) {
survivingExports.push(exportSpecifier.getFullText(sourceFile));
}
}
@@ -666,11 +666,11 @@ function generateResult(ts, languageService, shakeLevel) {
}
}
}
if (shakeLevel === 2 /* ShakeLevel.ClassMembers */ && (ts.isClassDeclaration(node) || ts.isInterfaceDeclaration(node)) && nodeOrChildIsBlack(node)) {
if (shakeLevel === 2 /* ClassMembers */ && (ts.isClassDeclaration(node) || ts.isInterfaceDeclaration(node)) && nodeOrChildIsBlack(node)) {
let toWrite = node.getFullText();
for (let i = node.members.length - 1; i >= 0; i--) {
const member = node.members[i];
if (getColor(member) === 2 /* NodeColor.Black */ || !member.name) {
if (getColor(member) === 2 /* Black */ || !member.name) {
// keep method
continue;
}
@@ -686,7 +686,7 @@ function generateResult(ts, languageService, shakeLevel) {
}
node.forEachChild(writeMarkedNodes);
}
if (getColor(sourceFile) !== 2 /* NodeColor.Black */) {
if (getColor(sourceFile) !== 2 /* Black */) {
if (!nodeOrChildIsBlack(sourceFile)) {
// none of the elements are reachable
if (isNeededSourceFile(sourceFile)) {

View File

@@ -1,7 +1,7 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.createTypeScriptBuilder = exports.CancellationToken = void 0;

View File

@@ -1,7 +1,7 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.create = void 0;

View File

@@ -1,9 +1,8 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var _a;
Object.defineProperty(exports, "__esModule", { value: true });
exports.Transpiler = void 0;
const ts = require("typescript");
@@ -11,21 +10,20 @@ const threads = require("node:worker_threads");
const Vinyl = require("vinyl");
const node_os_1 = require("node:os");
function transpile(tsSrc, options) {
var _a, _b;
const isAmd = /\n(import|export)/m.test(tsSrc);
if (!isAmd && ((_a = options.compilerOptions) === null || _a === void 0 ? void 0 : _a.module) === ts.ModuleKind.AMD) {
if (!isAmd && options.compilerOptions?.module === ts.ModuleKind.AMD) {
// enforce NONE module-system for not-amd cases
options = Object.assign(Object.assign({}, options), { compilerOptions: Object.assign(Object.assign({}, options.compilerOptions), { module: ts.ModuleKind.None }) });
options = { ...options, ...{ compilerOptions: { ...options.compilerOptions, module: ts.ModuleKind.None } } };
}
const out = ts.transpileModule(tsSrc, options);
return {
jsSrc: out.outputText,
diag: (_b = out.diagnostics) !== null && _b !== void 0 ? _b : []
diag: out.diagnostics ?? []
};
}
if (!threads.isMainThread) {
// WORKER
(_a = threads.parentPort) === null || _a === void 0 ? void 0 : _a.addListener('message', (req) => {
threads.parentPort?.addListener('message', (req) => {
const res = {
jsSrcs: [],
diagnostics: []
@@ -44,7 +42,6 @@ class TranspileWorker {
this._worker = new threads.Worker(__filename);
this._durations = [];
this._worker.addListener('message', (res) => {
var _a, _b;
if (!this._pending) {
console.error('RECEIVING data WITHOUT request');
return;
@@ -75,7 +72,7 @@ class TranspileWorker {
if (suffixLen === 5 /* SuffixTypes.Dts */ && _isDefaultEmpty(jsSrc)) {
continue;
}
const outBase = (_b = (_a = options.compilerOptions) === null || _a === void 0 ? void 0 : _a.outDir) !== null && _b !== void 0 ? _b : file.base;
const outBase = options.compilerOptions?.outDir ?? file.base;
const outPath = outFileFn(file.path);
outFiles.push(new Vinyl({
path: outPath,

View File

@@ -1,7 +1,7 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.graph = exports.strings = exports.collections = void 0;

View File

@@ -1,7 +1,7 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.referenceGeneratedDepsByArch = exports.bundledDeps = exports.recommendedDeps = exports.additionalDeps = void 0;

View File

@@ -1,6 +1,6 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
@@ -17,7 +17,7 @@ const dep_lists_1 = require("./dep-lists");
// If true, we fail the build if there are new dependencies found during that task.
// The reference dependencies, which one has to update when the new dependencies
// are valid, are in dep-lists.ts
// const FAIL_BUILD_FOR_NEW_DEPENDENCIES: boolean = true;
const FAIL_BUILD_FOR_NEW_DEPENDENCIES = true;
function getDependencies(buildDir, applicationName, arch, sysroot) {
// Get the files for which we want to find dependencies.
const nativeModulesPath = path.join(buildDir, 'resources', 'app', 'node_modules.asar.unpacked');
@@ -49,19 +49,18 @@ function getDependencies(buildDir, applicationName, arch, sysroot) {
sortedDependencies = sortedDependencies.filter(dependency => {
return !dep_lists_1.bundledDeps.some(bundledDep => dependency.startsWith(bundledDep));
});
/* {{SQL CARBON EDIT}} - Not needed for SQL
const referenceGeneratedDeps = referenceGeneratedDepsByArch[arch];
const referenceGeneratedDeps = dep_lists_1.referenceGeneratedDepsByArch[arch];
if (JSON.stringify(sortedDependencies) !== JSON.stringify(referenceGeneratedDeps)) {
const failMessage = 'The dependencies list has changed.'
+ '\nOld:\n' + referenceGeneratedDeps.join('\n')
+ '\nNew:\n' + sortedDependencies.join('\n');
if (FAIL_BUILD_FOR_NEW_DEPENDENCIES) {
throw new Error(failMessage);
} else {
}
else {
console.warn(failMessage);
}
}
*/
return sortedDependencies;
}
exports.getDependencies = getDependencies;

View File

@@ -9,7 +9,7 @@ import { spawnSync } from 'child_process';
import { constants, statSync } from 'fs';
import { tmpdir } from 'os';
import path = require('path');
import { additionalDeps, bundledDeps } from './dep-lists';
import { additionalDeps, bundledDeps, referenceGeneratedDepsByArch } from './dep-lists';
import { ArchString } from './types';
// A flag that can easily be toggled.
@@ -19,7 +19,7 @@ import { ArchString } from './types';
// If true, we fail the build if there are new dependencies found during that task.
// The reference dependencies, which one has to update when the new dependencies
// are valid, are in dep-lists.ts
// const FAIL_BUILD_FOR_NEW_DEPENDENCIES: boolean = true;
const FAIL_BUILD_FOR_NEW_DEPENDENCIES: boolean = true;
export function getDependencies(buildDir: string, applicationName: string, arch: ArchString, sysroot: string): string[] {
// Get the files for which we want to find dependencies.
@@ -59,7 +59,6 @@ export function getDependencies(buildDir: string, applicationName: string, arch:
return !bundledDeps.some(bundledDep => dependency.startsWith(bundledDep));
});
/* {{SQL CARBON EDIT}} - Not needed for SQL
const referenceGeneratedDeps = referenceGeneratedDepsByArch[arch];
if (JSON.stringify(sortedDependencies) !== JSON.stringify(referenceGeneratedDeps)) {
const failMessage = 'The dependencies list has changed.'
@@ -71,7 +70,7 @@ export function getDependencies(buildDir: string, applicationName: string, arch:
console.warn(failMessage);
}
}
*/
return sortedDependencies;
}

View File

@@ -1,7 +1,7 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.getSysroot = void 0;

View File

@@ -1,7 +1,7 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.sysrootInfo = void 0;

View File

@@ -1,6 +1,6 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });

View File

@@ -16,13 +16,13 @@ const dep_lists_1 = require("./dep-lists");
// If true, we fail the build if there are new dependencies found during that task.
// The reference dependencies, which one has to update when the new dependencies
// are valid, are in dep-lists.ts
// const FAIL_BUILD_FOR_NEW_DEPENDENCIES: boolean = false;
const FAIL_BUILD_FOR_NEW_DEPENDENCIES = false;
function getDependencies(buildDir, applicationName, arch) {
// Get the files for which we want to find dependencies.
const nativeModulesPath = path.join(buildDir, 'resources', 'app', 'node_modules.asar.unpacked');
const findResult = (0, child_process_1.spawnSync)('find', [nativeModulesPath, '-name', '*.node']);
if (findResult.status) {
console.error(`Error finding files for ${arch}:`);
console.error('Error finding files:');
console.error(findResult.stderr.toString());
return [];
}
@@ -48,19 +48,18 @@ function getDependencies(buildDir, applicationName, arch) {
sortedDependencies = sortedDependencies.filter(dependency => {
return !dep_lists_1.bundledDeps.some(bundledDep => dependency.startsWith(bundledDep));
});
/* {{SQL CARBON EDIT}} - Not needed for SQL
const referenceGeneratedDeps = referenceGeneratedDepsByArch[arch];
const referenceGeneratedDeps = dep_lists_1.referenceGeneratedDepsByArch[arch];
if (JSON.stringify(sortedDependencies) !== JSON.stringify(referenceGeneratedDeps)) {
const failMessage = 'The dependencies list has changed. '
+ 'Printing newer dependencies list that one can use to compare against referenceGeneratedDeps:\n'
+ sortedDependencies.join('\n');
if (FAIL_BUILD_FOR_NEW_DEPENDENCIES) {
throw new Error(failMessage);
} else {
}
else {
console.warn(failMessage);
}
}
*/
return sortedDependencies;
}
exports.getDependencies = getDependencies;

View File

@@ -6,7 +6,7 @@
import { spawnSync } from 'child_process';
import { constants, statSync } from 'fs';
import path = require('path');
import { additionalDeps, bundledDeps } from './dep-lists';
import { additionalDeps, bundledDeps, referenceGeneratedDepsByArch } from './dep-lists';
import { ArchString } from './types';
// A flag that can easily be toggled.
@@ -16,14 +16,14 @@ import { ArchString } from './types';
// If true, we fail the build if there are new dependencies found during that task.
// The reference dependencies, which one has to update when the new dependencies
// are valid, are in dep-lists.ts
// const FAIL_BUILD_FOR_NEW_DEPENDENCIES: boolean = false;
const FAIL_BUILD_FOR_NEW_DEPENDENCIES: boolean = false;
export function getDependencies(buildDir: string, applicationName: string, arch: ArchString): string[] {
// Get the files for which we want to find dependencies.
const nativeModulesPath = path.join(buildDir, 'resources', 'app', 'node_modules.asar.unpacked');
const findResult = spawnSync('find', [nativeModulesPath, '-name', '*.node']);
if (findResult.status) {
console.error(`Error finding files for ${arch}:`);
console.error('Error finding files:');
console.error(findResult.stderr.toString());
return [];
}
@@ -57,7 +57,6 @@ export function getDependencies(buildDir: string, applicationName: string, arch:
return !bundledDeps.some(bundledDep => dependency.startsWith(bundledDep));
});
/* {{SQL CARBON EDIT}} - Not needed for SQL
const referenceGeneratedDeps = referenceGeneratedDepsByArch[arch];
if (JSON.stringify(sortedDependencies) !== JSON.stringify(referenceGeneratedDeps)) {
const failMessage = 'The dependencies list has changed. '
@@ -69,7 +68,7 @@ export function getDependencies(buildDir: string, applicationName: string, arch:
console.warn(failMessage);
}
}
*/
return sortedDependencies;
}

View File

@@ -105,12 +105,11 @@ function hasModifier(modifiers, kind) {
return false;
}
function isStatic(member) {
const modifiers = ts.canHaveModifiers(member) ? ts.getModifiers(member) : undefined;
return hasModifier(modifiers, ts.SyntaxKind.StaticKeyword);
return hasModifier(member.modifiers, ts.SyntaxKind.StaticKeyword);
}
function isDefaultExport(declaration) {
const modifiers = ts.canHaveModifiers(declaration) ? ts.getModifiers(declaration) : undefined;
return hasModifier(modifiers, ts.SyntaxKind.DefaultKeyword) && hasModifier(modifiers, ts.SyntaxKind.ExportKeyword);
return (hasModifier(declaration.modifiers, ts.SyntaxKind.DefaultKeyword)
&& hasModifier(declaration.modifiers, ts.SyntaxKind.ExportKeyword));
}
function getMassagedTopLevelDeclarationText(sourceFile, declaration, importName, usage, enums) {
let result = getNodeText(sourceFile, declaration);

View File

@@ -115,7 +115,7 @@ function getNodeText(sourceFile: ts.SourceFile, node: { pos: number; end: number
return sourceFile.getFullText().substring(node.pos, node.end);
}
function hasModifier(modifiers: readonly ts.Modifier[] | undefined, kind: ts.SyntaxKind): boolean {
function hasModifier(modifiers: ts.NodeArray<ts.Modifier> | undefined, kind: ts.SyntaxKind): boolean {
if (modifiers) {
for (let i = 0; i < modifiers.length; i++) {
let mod = modifiers[i];
@@ -128,13 +128,14 @@ function hasModifier(modifiers: readonly ts.Modifier[] | undefined, kind: ts.Syn
}
function isStatic(member: ts.ClassElement | ts.TypeElement): boolean {
const modifiers = ts.canHaveModifiers(member) ? ts.getModifiers(member) : undefined;
return hasModifier(modifiers, ts.SyntaxKind.StaticKeyword);
return hasModifier(member.modifiers, ts.SyntaxKind.StaticKeyword);
}
function isDefaultExport(declaration: ts.InterfaceDeclaration | ts.ClassDeclaration): boolean {
const modifiers = ts.canHaveModifiers(declaration) ? ts.getModifiers(declaration) : undefined;
return hasModifier(modifiers, ts.SyntaxKind.DefaultKeyword) && hasModifier(modifiers, ts.SyntaxKind.ExportKeyword);
return (
hasModifier(declaration.modifiers, ts.SyntaxKind.DefaultKeyword)
&& hasModifier(declaration.modifiers, ts.SyntaxKind.ExportKeyword)
);
}
function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declaration: TSTopLevelDeclare, importName: string, usage: string[], enums: IEnumEntry[]): string {
@@ -611,7 +612,7 @@ class CacheEntry {
constructor(
public readonly sourceFile: ts.SourceFile,
public readonly mtime: number
) { }
) {}
}
export class DeclarationResolver {

View File

@@ -9,18 +9,20 @@
"@vscode/vsce": "2.16.0",
"@electron/get": "^1.12.4",
"@types/ansi-colors": "^3.2.0",
"@types/azure": "0.9.19",
"@types/byline": "^4.2.32",
"@types/cssnano": "^4.0.0",
"@types/debounce": "^1.0.0",
"@types/debug": "4.1.5",
"@types/documentdb": "^1.10.9",
"@types/documentdb": "^1.10.5",
"@types/eslint": "4.16.1",
"@types/fancy-log": "^1.3.0",
"@types/eslint-visitor-keys": "^1.0.0",
"@types/fancy-log": "^1.3.1",
"@types/fs-extra": "^9.0.12",
"@types/glob": "^7.1.1",
"@types/gulp": "^4.0.5",
"@types/gulp": "^4.0.10",
"@types/gulp-concat": "^0.0.32",
"@types/gulp-filter": "^3.0.32",
"@types/gulp-filter": "^3.0.35",
"@types/gulp-gzip": "^0.0.31",
"@types/gulp-json-editor": "^2.2.31",
"@types/gulp-postcss": "^8.0.0",
@@ -33,47 +35,51 @@
"@types/mocha": "^9.1.1",
"@types/node": "16.x",
"@types/p-limit": "^2.2.0",
"@types/plist": "^3.0.2",
"@types/pump": "^1.0.1",
"@types/request": "^2.47.0",
"@types/rimraf": "^2.0.4",
"@types/through": "^0.0.29",
"@types/through2": "^2.0.36",
"@types/through2": "^2.0.34",
"@types/tmp": "^0.2.1",
"@types/underscore": "^1.8.9",
"@types/webpack": "^4.41.25",
"@types/xml2js": "0.4.11",
"@typescript-eslint/experimental-utils": "^5.10.0",
"@typescript-eslint/experimental-utils": "~2.13.0",
"@typescript-eslint/parser": "^5.10.0",
"@vscode/iconv-lite-umd": "0.7.0",
"applicationinsights": "1.0.8",
"byline": "^5.0.0",
"colors": "^1.4.0",
"commander": "^7.0.0",
"debug": "^4.3.2",
"documentdb": "^1.15.3",
"documentdb": "1.13.0",
"electron-osx-sign": "^0.4.16",
"esbuild": "^0.14.2",
"esbuild": "^0.12.6",
"extract-zip": "^2.0.1",
"fs-extra": "^9.1.0",
"got": "11.8.5",
"gulp-merge-json": "^2.1.1",
"gulp-shell": "^0.8.0",
"iconv-lite-umd": "0.6.8",
"jsonc-parser": "^2.3.0",
"mime": "^1.4.1",
"mkdirp": "^1.0.4",
"node-fetch": "2",
"p-limit": "^3.1.0",
"plist": "^3.0.5",
"rollup": "^1.20.3",
"rollup-plugin-commonjs": "^10.1.0",
"rollup-plugin-node-resolve": "^5.2.0",
"source-map": "0.6.1",
"through2": "^4.0.2",
"tmp": "^0.2.1",
"typescript": "^4.8.0-dev.20220518",
"vsce": "2.8.0",
"vscode-universal-bundler": "^0.0.2"
},
"scripts": {
"compile": "../node_modules/.bin/tsc -p tsconfig.build.json",
"watch": "../node_modules/.bin/tsc -p tsconfig.build.json --watch",
"npmCheckJs": "../node_modules/.bin/tsc --noEmit"
"compile": "tsc -p tsconfig.build.json",
"watch": "tsc -p tsconfig.build.json --watch",
"npmCheckJs": "tsc --noEmit"
},
"optionalDependencies": {
"tree-sitter": "https://github.com/joaomoreno/node-tree-sitter/releases/download/v0.20.0/tree-sitter-0.20.0.tgz",

File diff suppressed because it is too large Load Diff

View File

@@ -198,7 +198,7 @@
"gulp-remote-retry-src": "^0.8.0",
"gulp-rename": "^1.2.0",
"gulp-replace": "^0.5.4",
"gulp-shell": "^0.8.0",
"gulp-shell": "^0.6.5",
"gulp-sourcemaps": "^3.0.0",
"gulp-svgmin": "^4.1.0",
"gulp-untar": "^0.0.7",

View File

@@ -742,7 +742,21 @@
dependencies:
type-detect "4.0.8"
"@sinonjs/fake-timers@^7.0.4", "@sinonjs/fake-timers@^7.1.0", "@sinonjs/fake-timers@^7.1.2":
"@sinonjs/commons@^2.0.0":
version "2.0.0"
resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-2.0.0.tgz#fd4ca5b063554307e8327b4564bd56d3b73924a3"
integrity sha512-uLa0j859mMrg2slwQYdO/AkrOfmH+X6LTVmNTS9CqexuE2IvVORIkSpJLqePAbEnKJ77aMmCwr1NUZ57120Xcg==
dependencies:
type-detect "4.0.8"
"@sinonjs/fake-timers@^10.0.2":
version "10.0.2"
resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-10.0.2.tgz#d10549ed1f423d80639c528b6c7f5a1017747d0c"
integrity sha512-SwUDyjWnah1AaNl7kxsa7cfLhlTYoiyhDAIgyh+El30YvXs/o7OLXpYH88Zdhyx9JExKrmHDJ+10bwIcY80Jmw==
dependencies:
"@sinonjs/commons" "^2.0.0"
"@sinonjs/fake-timers@^7.1.2":
version "7.1.2"
resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-7.1.2.tgz#2524eae70c4910edccf99b2f4e6efc5894aff7b5"
integrity sha512-iQADsW4LBMISqZ6Ci1dupJL9pprqwcVFTcOsEmQOEhW+KLCVn/Y4Jrvg2k19fIHCp+iFprriYPTdRcQR8NbUPg==
@@ -1002,11 +1016,16 @@
"@types/sinon" "*"
"@types/sinon@*", "@types/sinon@^10.0.2":
version "10.0.2"
resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-10.0.2.tgz#f360d2f189c0fd433d14aeb97b9d705d7e4cc0e4"
integrity sha512-BHn8Bpkapj8Wdfxvh2jWIUoaYB/9/XhsL0oOvBfRagJtKlSl9NWPcFOz2lRukI9szwGxFtYZCTejJSqsGDbdmw==
version "10.0.13"
resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-10.0.13.tgz#60a7a87a70d9372d0b7b38cc03e825f46981fb83"
integrity sha512-UVjDqJblVNQYvVNUsj0PuYYw0ELRmgt1Nt5Vk0pT5f16ROGfcKJY8o1HVuMOJOpD727RrGB9EGvoaTQE5tgxZQ==
dependencies:
"@sinonjs/fake-timers" "^7.1.0"
"@types/sinonjs__fake-timers" "*"
"@types/sinonjs__fake-timers@*":
version "8.1.2"
resolved "https://registry.yarnpkg.com/@types/sinonjs__fake-timers/-/sinonjs__fake-timers-8.1.2.tgz#bf2e02a3dbd4aecaf95942ecd99b7402e03fad5e"
integrity sha512-9GcLXF0/v3t80caGs5p2rRfkB+a8VBGLJZVih6CNFkx8IZ994wiKKLSRs9nuFwk1HevWs/1mnUmkApGrSGsShA==
"@types/sizzle@*":
version "2.3.3"
@@ -2040,6 +2059,13 @@ async-settle@^1.0.0:
dependencies:
async-done "^1.2.2"
async@^2.1.5:
version "2.6.4"
resolved "https://registry.yarnpkg.com/async/-/async-2.6.4.tgz#706b7ff6084664cd7eae713f6f965433b5504221"
integrity sha512-mzo5dfJYwAn29PeiJ0zvwTo04zj8HDJj0Mn8TD7sno7q12prdbnasKJHhkm2c1LgrhlJ0teaea8860oxi51mGA==
dependencies:
lodash "^4.17.14"
asynckit@^0.4.0:
version "0.4.0"
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
@@ -2554,14 +2580,6 @@ chalk@^2.0.0, chalk@^2.1.0, chalk@^2.3.0, chalk@^2.4.1, chalk@^2.4.2:
escape-string-regexp "^1.0.5"
supports-color "^5.3.0"
chalk@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4"
integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg==
dependencies:
ansi-styles "^4.1.0"
supports-color "^7.1.0"
chalk@^4.0.0, chalk@^4.1.0:
version "4.1.2"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01"
@@ -5414,17 +5432,18 @@ gulp-replace@^0.5.4:
readable-stream "^2.0.1"
replacestream "^4.0.0"
gulp-shell@^0.8.0:
version "0.8.0"
resolved "https://registry.yarnpkg.com/gulp-shell/-/gulp-shell-0.8.0.tgz#0ed4980de1d0c67e5f6cce971d7201fd0be50555"
integrity sha512-wHNCgmqbWkk1c6Gc2dOL5SprcoeujQdeepICwfQRo91DIylTE7a794VEE+leq3cE2YDoiS5ulvRfKVIEMazcTQ==
gulp-shell@^0.6.5:
version "0.6.5"
resolved "https://registry.yarnpkg.com/gulp-shell/-/gulp-shell-0.6.5.tgz#f07b204ad8ad1c2659f7a1b6d76efa16d416a759"
integrity sha512-f3m1WcS0o2B72/PGj1Jbv9zYR9rynBh/EQJv64n01xQUo7j7anols0eww9GG/WtDTzGVQLrupVDYkifRFnj5Zg==
dependencies:
chalk "^3.0.0"
fancy-log "^1.3.3"
lodash.template "^4.5.0"
plugin-error "^1.0.1"
through2 "^3.0.1"
tslib "^1.10.0"
async "^2.1.5"
chalk "^2.3.0"
fancy-log "^1.3.2"
lodash "^4.17.4"
lodash.template "^4.4.0"
plugin-error "^0.1.2"
through2 "^2.0.3"
gulp-sourcemaps@^3.0.0:
version "3.0.0"
@@ -6863,7 +6882,7 @@ lodash.some@^4.2.2:
resolved "https://registry.yarnpkg.com/lodash.some/-/lodash.some-4.6.0.tgz#1bb9f314ef6b8baded13b549169b2a945eb68e4d"
integrity sha512-j7MJE+TuT51q9ggt4fSgVqro163BEFjAt3u97IqU+JA2DkWl80nFTrowzLpZ/BnpN7rrl0JA/593NAdd8p/scQ==
lodash.template@^4.5.0:
lodash.template@^4.4.0:
version "4.5.0"
resolved "https://registry.yarnpkg.com/lodash.template/-/lodash.template-4.5.0.tgz#f976195cf3f347d0d5f52483569fe8031ccce8ab"
integrity sha512-84vYFxIkmidUiFxidA/KjjH9pAycqW+h980j7Fuz5qxRtO9pgB7MDFTdys1N7A5mcucRiDyEq4fusljItR1T/A==
@@ -6883,7 +6902,7 @@ lodash.uniq@^4.5.0:
resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
integrity sha512-xfBaXQd9ryd9dlSDvnvI0lvxfLJlYAZzXomUYzLKtUeOQvOP5piqAWuGtrhWeqaXK9hhoM/iyJc5AV+XfsX3HQ==
lodash@^4.17.10, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19:
lodash@^4.17.10, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.19, lodash@^4.17.4:
version "4.17.21"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c"
integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==
@@ -7480,12 +7499,12 @@ nice-try@^1.0.4:
integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
nise@^5.1.0:
version "5.1.0"
resolved "https://registry.yarnpkg.com/nise/-/nise-5.1.0.tgz#713ef3ed138252daef20ec035ab62b7a28be645c"
integrity sha512-W5WlHu+wvo3PaKLsJJkgPup2LrsXCcm7AWwyNZkUnn5rwPkuPBi3Iwk5SQtN0mv+K65k7nKKjwNQ30wg3wLAQQ==
version "5.1.4"
resolved "https://registry.yarnpkg.com/nise/-/nise-5.1.4.tgz#491ce7e7307d4ec546f5a659b2efe94a18b4bbc0"
integrity sha512-8+Ib8rRJ4L0o3kfmyVCL7gzrohyDe0cMFTBa2d364yIrEGMEoetznKJx899YxjybU6bL9SQkYPSBBs1gyYs8Xg==
dependencies:
"@sinonjs/commons" "^1.7.0"
"@sinonjs/fake-timers" "^7.0.4"
"@sinonjs/commons" "^2.0.0"
"@sinonjs/fake-timers" "^10.0.2"
"@sinonjs/text-encoding" "^0.7.1"
just-extend "^4.0.2"
path-to-regexp "^1.7.0"
@@ -10467,7 +10486,7 @@ tsec@0.1.4:
glob "^7.1.1"
minimatch "^3.0.3"
tslib@^1.10.0, tslib@^1.8.1, tslib@^1.9.0:
tslib@^1.8.1, tslib@^1.9.0:
version "1.14.1"
resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00"
integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==