Merge from vscode 2c306f762bf9c3db82dc06c7afaa56ef46d72f79 (#14050)

* Merge from vscode 2c306f762bf9c3db82dc06c7afaa56ef46d72f79

* Fix breaks

* Extension management fixes

* Fix breaks in windows bundling

* Fix/skip failing tests

* Update distro

* Add clear to nuget.config

* Add hygiene task

* Bump distro

* Fix hygiene issue

* Add build to hygiene exclusion

* Update distro

* Update hygiene

* Hygiene exclusions

* Update tsconfig

* Bump distro for server breaks

* Update build config

* Update darwin path

* Add done calls to notebook tests

* Skip failing tests

* Disable smoke tests
This commit is contained in:
Karl Burtram
2021-02-09 16:15:05 -08:00
committed by GitHub
parent 6f192f9af5
commit ce612a3d96
1929 changed files with 68012 additions and 34564 deletions

View File

@@ -70,8 +70,7 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
const relative = path.relative(folderPath, file.path);
this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
base: '.',
path: path.join(destFilename + '.unpacked', relative),
stat: file.stat,
contents: file.contents
@@ -96,8 +95,7 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
const contents = Buffer.concat(out);
out.length = 0;
this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
base: '.',
path: destFilename,
contents: contents
}));

View File

@@ -87,8 +87,7 @@ export function createAsar(folderPath: string, unpackGlobs: string[], destFilena
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
const relative = path.relative(folderPath, file.path);
this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
base: '.',
path: path.join(destFilename + '.unpacked', relative),
stat: file.stat,
contents: file.contents
@@ -117,8 +116,7 @@ export function createAsar(folderPath: string, unpackGlobs: string[], destFilena
out.length = 0;
this.queue(new VinylFile({
cwd: folderPath,
base: folderPath,
base: '.',
path: destFilename,
contents: contents
}));

View File

@@ -21,7 +21,7 @@ function darwinBundleDocumentType(extensions, icon) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
ostypes: ['TEXT', 'utxt', 'TUTX', '****'],
extensions: extensions,
iconFile: icon
};

View File

@@ -25,7 +25,7 @@ function darwinBundleDocumentType(extensions: string[], icon: string) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
ostypes: ['TEXT', 'utxt', 'TUTX', '****'],
extensions: extensions,
iconFile: icon
};

View File

@@ -37,7 +37,7 @@ module.exports = new (_a = class NoUnexternalizedStrings {
// extract key so that it can be checked later
let key;
if (isStringLiteral(keyNode)) {
doubleQuotedStringLiterals.delete(keyNode); //todo@joh reconsider
doubleQuotedStringLiterals.delete(keyNode);
key = keyNode.value;
}
else if (keyNode.type === experimental_utils_1.AST_NODE_TYPES.ObjectExpression) {
@@ -45,7 +45,7 @@ module.exports = new (_a = class NoUnexternalizedStrings {
if (property.type === experimental_utils_1.AST_NODE_TYPES.Property && !property.computed) {
if (property.key.type === experimental_utils_1.AST_NODE_TYPES.Identifier && property.key.name === 'key') {
if (isStringLiteral(property.value)) {
doubleQuotedStringLiterals.delete(property.value); //todo@joh reconsider
doubleQuotedStringLiterals.delete(property.value);
key = property.value.value;
break;
}

View File

@@ -47,7 +47,7 @@ export = new class NoUnexternalizedStrings implements eslint.Rule.RuleModule {
// extract key so that it can be checked later
let key: string | undefined;
if (isStringLiteral(keyNode)) {
doubleQuotedStringLiterals.delete(keyNode); //todo@joh reconsider
doubleQuotedStringLiterals.delete(keyNode);
key = keyNode.value;
} else if (keyNode.type === AST_NODE_TYPES.ObjectExpression) {
@@ -55,7 +55,7 @@ export = new class NoUnexternalizedStrings implements eslint.Rule.RuleModule {
if (property.type === AST_NODE_TYPES.Property && !property.computed) {
if (property.key.type === AST_NODE_TYPES.Identifier && property.key.name === 'key') {
if (isStringLiteral(property.value)) {
doubleQuotedStringLiterals.delete(property.value); //todo@joh reconsider
doubleQuotedStringLiterals.delete(property.value);
key = property.value.value;
break;
}
@@ -123,4 +123,3 @@ export = new class NoUnexternalizedStrings implements eslint.Rule.RuleModule {
};
}
};

View File

@@ -199,7 +199,7 @@ const excludedExtensions = [
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
'vscode-notebook-tests',
'integration-tests',
'integration-tests', // {{SQL CARBON EDIT}}
];
// {{SQL CARBON EDIT}}
const externalExtensions = [

View File

@@ -1038,7 +1038,7 @@ function createI18nFile(originalFilePath, messages) {
contents: Buffer.from(content, 'utf8')
});
}
const i18nPackVersion = "1.0.0";
const i18nPackVersion = '1.0.0';
function pullI18nPackFiles(apiHostname, username, password, language, resultingTranslationPaths) {
return pullCoreAndExtensionsXlfFiles(apiHostname, username, password, language, exports.externalExtensionsWithTranslations)
.pipe(prepareI18nPackFiles(exports.externalExtensionsWithTranslations, resultingTranslationPaths, language.id === 'ps'));

View File

@@ -62,6 +62,10 @@
"name": "vs/workbench/contrib/debug",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/dialogs",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/emmet",
"project": "vscode-workbench"
@@ -210,6 +214,10 @@
"name": "vs/workbench/contrib/webviewPanel",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/workspaces",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/customEditor",
"project": "vscode-workbench"
@@ -361,6 +369,14 @@
{
"name": "vs/workbench/services/authentication",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/extensionRecommendations",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/gettingStarted",
"project": "vscode-workbench"
}
]
}

View File

@@ -1196,7 +1196,7 @@ interface I18nPack {
};
}
const i18nPackVersion = "1.0.0";
const i18nPackVersion = '1.0.0';
export interface TranslationPath {
id: string;

View File

@@ -24,8 +24,8 @@ const minimatch_1 = require("minimatch");
// Feel free to add more core types as you see needed if present in node.js and browsers
const CORE_TYPES = [
'require',
'atob',
'btoa',
// 'atob',
// 'btoa',
'setTimeout',
'clearTimeout',
'setInterval',

View File

@@ -25,8 +25,8 @@ import { match } from 'minimatch';
// Feel free to add more core types as you see needed if present in node.js and browsers
const CORE_TYPES = [
'require', // from our AMD loader
'atob',
'btoa',
// 'atob',
// 'btoa',
'setTimeout',
'clearTimeout',
'setInterval',

627
build/lib/monaco-api.js Normal file
View File

@@ -0,0 +1,627 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.execute = exports.run3 = exports.DeclarationResolver = exports.FSProvider = exports.RECIPE_PATH = void 0;
const fs = require("fs");
const ts = require("typescript");
const path = require("path");
const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors");
const dtsv = '3';
const tsfmt = require('../../tsfmt.json');
const SRC = path.join(__dirname, '../../src');
exports.RECIPE_PATH = path.join(__dirname, '../monaco/monaco.d.ts.recipe');
const DECLARATION_PATH = path.join(__dirname, '../../src/vs/monaco.d.ts');
function logErr(message, ...rest) {
fancyLog(ansiColors.yellow(`[monaco.d.ts]`), message, ...rest);
}
function isDeclaration(a) {
return (a.kind === ts.SyntaxKind.InterfaceDeclaration
|| a.kind === ts.SyntaxKind.EnumDeclaration
|| a.kind === ts.SyntaxKind.ClassDeclaration
|| a.kind === ts.SyntaxKind.TypeAliasDeclaration
|| a.kind === ts.SyntaxKind.FunctionDeclaration
|| a.kind === ts.SyntaxKind.ModuleDeclaration);
}
function visitTopLevelDeclarations(sourceFile, visitor) {
let stop = false;
let visit = (node) => {
if (stop) {
return;
}
switch (node.kind) {
case ts.SyntaxKind.InterfaceDeclaration:
case ts.SyntaxKind.EnumDeclaration:
case ts.SyntaxKind.ClassDeclaration:
case ts.SyntaxKind.VariableStatement:
case ts.SyntaxKind.TypeAliasDeclaration:
case ts.SyntaxKind.FunctionDeclaration:
case ts.SyntaxKind.ModuleDeclaration:
stop = visitor(node);
}
if (stop) {
return;
}
ts.forEachChild(node, visit);
};
visit(sourceFile);
}
function getAllTopLevelDeclarations(sourceFile) {
let all = [];
visitTopLevelDeclarations(sourceFile, (node) => {
if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) {
let interfaceDeclaration = node;
let triviaStart = interfaceDeclaration.pos;
let triviaEnd = interfaceDeclaration.name.pos;
let triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd });
if (triviaText.indexOf('@internal') === -1) {
all.push(node);
}
}
else {
let nodeText = getNodeText(sourceFile, node);
if (nodeText.indexOf('@internal') === -1) {
all.push(node);
}
}
return false /*continue*/;
});
return all;
}
function getTopLevelDeclaration(sourceFile, typeName) {
let result = null;
visitTopLevelDeclarations(sourceFile, (node) => {
if (isDeclaration(node) && node.name) {
if (node.name.text === typeName) {
result = node;
return true /*stop*/;
}
return false /*continue*/;
}
// node is ts.VariableStatement
if (getNodeText(sourceFile, node).indexOf(typeName) >= 0) {
result = node;
return true /*stop*/;
}
return false /*continue*/;
});
return result;
}
function getNodeText(sourceFile, node) {
return sourceFile.getFullText().substring(node.pos, node.end);
}
function hasModifier(modifiers, kind) {
if (modifiers) {
for (let i = 0; i < modifiers.length; i++) {
let mod = modifiers[i];
if (mod.kind === kind) {
return true;
}
}
}
return false;
}
function isStatic(member) {
return hasModifier(member.modifiers, ts.SyntaxKind.StaticKeyword);
}
function isDefaultExport(declaration) {
return (hasModifier(declaration.modifiers, ts.SyntaxKind.DefaultKeyword)
&& hasModifier(declaration.modifiers, ts.SyntaxKind.ExportKeyword));
}
function getMassagedTopLevelDeclarationText(sourceFile, declaration, importName, usage, enums) {
let result = getNodeText(sourceFile, declaration);
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
let interfaceDeclaration = declaration;
const staticTypeName = (isDefaultExport(interfaceDeclaration)
? `${importName}.default`
: `${importName}.${declaration.name.text}`);
let instanceTypeName = staticTypeName;
const typeParametersCnt = (interfaceDeclaration.typeParameters ? interfaceDeclaration.typeParameters.length : 0);
if (typeParametersCnt > 0) {
let arr = [];
for (let i = 0; i < typeParametersCnt; i++) {
arr.push('any');
}
instanceTypeName = `${instanceTypeName}<${arr.join(',')}>`;
}
const members = interfaceDeclaration.members;
members.forEach((member) => {
try {
let memberText = getNodeText(sourceFile, member);
if (memberText.indexOf('@internal') >= 0 || memberText.indexOf('private') >= 0) {
result = result.replace(memberText, '');
}
else {
const memberName = member.name.text;
const memberAccess = (memberName.indexOf('.') >= 0 ? `['${memberName}']` : `.${memberName}`);
if (isStatic(member)) {
usage.push(`a = ${staticTypeName}${memberAccess};`);
}
else {
usage.push(`a = (<${instanceTypeName}>b)${memberAccess};`);
}
}
}
catch (err) {
// life..
}
});
}
else if (declaration.kind === ts.SyntaxKind.VariableStatement) {
const jsDoc = result.substr(0, declaration.getLeadingTriviaWidth(sourceFile));
if (jsDoc.indexOf('@monacodtsreplace') >= 0) {
const jsDocLines = jsDoc.split(/\r\n|\r|\n/);
let directives = [];
for (const jsDocLine of jsDocLines) {
const m = jsDocLine.match(/^\s*\* \/([^/]+)\/([^/]+)\/$/);
if (m) {
directives.push([new RegExp(m[1], 'g'), m[2]]);
}
}
// remove the jsdoc
result = result.substr(jsDoc.length);
if (directives.length > 0) {
// apply replace directives
const replacer = createReplacerFromDirectives(directives);
result = replacer(result);
}
}
}
result = result.replace(/export default /g, 'export ');
result = result.replace(/export declare /g, 'export ');
result = result.replace(/declare /g, '');
let lines = result.split(/\r\n|\r|\n/);
for (let i = 0; i < lines.length; i++) {
if (/\s*\*/.test(lines[i])) {
// very likely a comment
continue;
}
lines[i] = lines[i].replace(/"/g, '\'');
}
result = lines.join('\n');
if (declaration.kind === ts.SyntaxKind.EnumDeclaration) {
result = result.replace(/const enum/, 'enum');
enums.push({
enumName: declaration.name.getText(sourceFile),
text: result
});
}
return result;
}
function format(text, endl) {
const REALLY_FORMAT = false;
text = preformat(text, endl);
if (!REALLY_FORMAT) {
return text;
}
// Parse the source text
let sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
// Get the formatting edits on the input sources
let edits = ts.formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt);
// Apply the edits on the input code
return applyEdits(text, edits);
function countParensCurly(text) {
let cnt = 0;
for (let i = 0; i < text.length; i++) {
if (text.charAt(i) === '(' || text.charAt(i) === '{') {
cnt++;
}
if (text.charAt(i) === ')' || text.charAt(i) === '}') {
cnt--;
}
}
return cnt;
}
function repeatStr(s, cnt) {
let r = '';
for (let i = 0; i < cnt; i++) {
r += s;
}
return r;
}
function preformat(text, endl) {
let lines = text.split(endl);
let inComment = false;
let inCommentDeltaIndent = 0;
let indent = 0;
for (let i = 0; i < lines.length; i++) {
let line = lines[i].replace(/\s$/, '');
let repeat = false;
let lineIndent = 0;
do {
repeat = false;
if (line.substring(0, 4) === ' ') {
line = line.substring(4);
lineIndent++;
repeat = true;
}
if (line.charAt(0) === '\t') {
line = line.substring(1);
lineIndent++;
repeat = true;
}
} while (repeat);
if (line.length === 0) {
continue;
}
if (inComment) {
if (/\*\//.test(line)) {
inComment = false;
}
lines[i] = repeatStr('\t', lineIndent + inCommentDeltaIndent) + line;
continue;
}
if (/\/\*/.test(line)) {
inComment = true;
inCommentDeltaIndent = indent - lineIndent;
lines[i] = repeatStr('\t', indent) + line;
continue;
}
const cnt = countParensCurly(line);
let shouldUnindentAfter = false;
let shouldUnindentBefore = false;
if (cnt < 0) {
if (/[({]/.test(line)) {
shouldUnindentAfter = true;
}
else {
shouldUnindentBefore = true;
}
}
else if (cnt === 0) {
shouldUnindentBefore = /^\}/.test(line);
}
let shouldIndentAfter = false;
if (cnt > 0) {
shouldIndentAfter = true;
}
else if (cnt === 0) {
shouldIndentAfter = /{$/.test(line);
}
if (shouldUnindentBefore) {
indent--;
}
lines[i] = repeatStr('\t', indent) + line;
if (shouldUnindentAfter) {
indent--;
}
if (shouldIndentAfter) {
indent++;
}
}
return lines.join(endl);
}
function getRuleProvider(options) {
// Share this between multiple formatters using the same options.
// This represents the bulk of the space the formatter uses.
return ts.formatting.getFormatContext(options);
}
function applyEdits(text, edits) {
// Apply edits in reverse on the existing text
let result = text;
for (let i = edits.length - 1; i >= 0; i--) {
let change = edits[i];
let head = result.slice(0, change.span.start);
let tail = result.slice(change.span.start + change.span.length);
result = head + change.newText + tail;
}
return result;
}
}
function createReplacerFromDirectives(directives) {
return (str) => {
for (let i = 0; i < directives.length; i++) {
str = str.replace(directives[i][0], directives[i][1]);
}
return str;
};
}
function createReplacer(data) {
data = data || '';
let rawDirectives = data.split(';');
let directives = [];
rawDirectives.forEach((rawDirective) => {
if (rawDirective.length === 0) {
return;
}
let pieces = rawDirective.split('=>');
let findStr = pieces[0];
let replaceStr = pieces[1];
findStr = findStr.replace(/[\-\\\{\}\*\+\?\|\^\$\.\,\[\]\(\)\#\s]/g, '\\$&');
findStr = '\\b' + findStr + '\\b';
directives.push([new RegExp(findStr, 'g'), replaceStr]);
});
return createReplacerFromDirectives(directives);
}
function generateDeclarationFile(recipe, sourceFileGetter) {
const endl = /\r\n/.test(recipe) ? '\r\n' : '\n';
let lines = recipe.split(endl);
let result = [];
let usageCounter = 0;
let usageImports = [];
let usage = [];
let failed = false;
usage.push(`var a: any;`);
usage.push(`var b: any;`);
const generateUsageImport = (moduleId) => {
let importName = 'm' + (++usageCounter);
usageImports.push(`import * as ${importName} from './${moduleId.replace(/\.d\.ts$/, '')}';`);
return importName;
};
let enums = [];
let version = null;
lines.forEach(line => {
if (failed) {
return;
}
let m0 = line.match(/^\/\/dtsv=(\d+)$/);
if (m0) {
version = m0[1];
}
let m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m1) {
let moduleId = m1[1];
const sourceFile = sourceFileGetter(moduleId);
if (!sourceFile) {
logErr(`While handling ${line}`);
logErr(`Cannot find ${moduleId}`);
failed = true;
return;
}
const importName = generateUsageImport(moduleId);
let replacer = createReplacer(m1[2]);
let typeNames = m1[3].split(/,/);
typeNames.forEach((typeName) => {
typeName = typeName.trim();
if (typeName.length === 0) {
return;
}
let declaration = getTopLevelDeclaration(sourceFile, typeName);
if (!declaration) {
logErr(`While handling ${line}`);
logErr(`Cannot find ${typeName}`);
failed = true;
return;
}
result.push(replacer(getMassagedTopLevelDeclarationText(sourceFile, declaration, importName, usage, enums)));
});
return;
}
let m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m2) {
let moduleId = m2[1];
const sourceFile = sourceFileGetter(moduleId);
if (!sourceFile) {
logErr(`While handling ${line}`);
logErr(`Cannot find ${moduleId}`);
failed = true;
return;
}
const importName = generateUsageImport(moduleId);
let replacer = createReplacer(m2[2]);
let typeNames = m2[3].split(/,/);
let typesToExcludeMap = {};
let typesToExcludeArr = [];
typeNames.forEach((typeName) => {
typeName = typeName.trim();
if (typeName.length === 0) {
return;
}
typesToExcludeMap[typeName] = true;
typesToExcludeArr.push(typeName);
});
getAllTopLevelDeclarations(sourceFile).forEach((declaration) => {
if (isDeclaration(declaration) && declaration.name) {
if (typesToExcludeMap[declaration.name.text]) {
return;
}
}
else {
// node is ts.VariableStatement
let nodeText = getNodeText(sourceFile, declaration);
for (let i = 0; i < typesToExcludeArr.length; i++) {
if (nodeText.indexOf(typesToExcludeArr[i]) >= 0) {
return;
}
}
}
result.push(replacer(getMassagedTopLevelDeclarationText(sourceFile, declaration, importName, usage, enums)));
});
return;
}
result.push(line);
});
if (failed) {
return null;
}
if (version !== dtsv) {
if (!version) {
logErr(`gulp watch restart required. 'monaco.d.ts.recipe' is written before versioning was introduced.`);
}
else {
logErr(`gulp watch restart required. 'monaco.d.ts.recipe' v${version} does not match runtime v${dtsv}.`);
}
return null;
}
let resultTxt = result.join(endl);
resultTxt = resultTxt.replace(/\bURI\b/g, 'Uri');
resultTxt = resultTxt.replace(/\bEvent</g, 'IEvent<');
resultTxt = resultTxt.split(/\r\n|\n|\r/).join(endl);
resultTxt = format(resultTxt, endl);
resultTxt = resultTxt.split(/\r\n|\n|\r/).join(endl);
enums.sort((e1, e2) => {
if (e1.enumName < e2.enumName) {
return -1;
}
if (e1.enumName > e2.enumName) {
return 1;
}
return 0;
});
let resultEnums = [
'/*---------------------------------------------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.',
' * Licensed under the Source EULA. See License.txt in the project root for license information.',
' *--------------------------------------------------------------------------------------------*/',
'',
'// THIS IS A GENERATED FILE. DO NOT EDIT DIRECTLY.',
''
].concat(enums.map(e => e.text)).join(endl);
resultEnums = resultEnums.split(/\r\n|\n|\r/).join(endl);
resultEnums = format(resultEnums, endl);
resultEnums = resultEnums.split(/\r\n|\n|\r/).join(endl);
return {
result: resultTxt,
usageContent: `${usageImports.join('\n')}\n\n${usage.join('\n')}`,
enums: resultEnums
};
}
function _run(sourceFileGetter) {
const recipe = fs.readFileSync(exports.RECIPE_PATH).toString();
const t = generateDeclarationFile(recipe, sourceFileGetter);
if (!t) {
return null;
}
const result = t.result;
const usageContent = t.usageContent;
const enums = t.enums;
const currentContent = fs.readFileSync(DECLARATION_PATH).toString();
const one = currentContent.replace(/\r\n/gm, '\n');
const other = result.replace(/\r\n/gm, '\n');
const isTheSame = (one === other);
return {
content: result,
usageContent: usageContent,
enums: enums,
filePath: DECLARATION_PATH,
isTheSame
};
}
class FSProvider {
existsSync(filePath) {
return fs.existsSync(filePath);
}
statSync(filePath) {
return fs.statSync(filePath);
}
readFileSync(_moduleId, filePath) {
return fs.readFileSync(filePath);
}
}
exports.FSProvider = FSProvider;
class CacheEntry {
constructor(sourceFile, mtime) {
this.sourceFile = sourceFile;
this.mtime = mtime;
}
}
class DeclarationResolver {
constructor(_fsProvider) {
this._fsProvider = _fsProvider;
this._sourceFileCache = Object.create(null);
}
invalidateCache(moduleId) {
this._sourceFileCache[moduleId] = null;
}
getDeclarationSourceFile(moduleId) {
if (this._sourceFileCache[moduleId]) {
// Since we cannot trust file watching to invalidate the cache, check also the mtime
const fileName = this._getFileName(moduleId);
const mtime = this._fsProvider.statSync(fileName).mtime.getTime();
if (this._sourceFileCache[moduleId].mtime !== mtime) {
this._sourceFileCache[moduleId] = null;
}
}
if (!this._sourceFileCache[moduleId]) {
this._sourceFileCache[moduleId] = this._getDeclarationSourceFile(moduleId);
}
return this._sourceFileCache[moduleId] ? this._sourceFileCache[moduleId].sourceFile : null;
}
_getFileName(moduleId) {
if (/\.d\.ts$/.test(moduleId)) {
return path.join(SRC, moduleId);
}
return path.join(SRC, `${moduleId}.ts`);
}
_getDeclarationSourceFile(moduleId) {
const fileName = this._getFileName(moduleId);
if (!this._fsProvider.existsSync(fileName)) {
return null;
}
const mtime = this._fsProvider.statSync(fileName).mtime.getTime();
if (/\.d\.ts$/.test(moduleId)) {
// const mtime = this._fsProvider.statFileSync()
const fileContents = this._fsProvider.readFileSync(moduleId, fileName).toString();
return new CacheEntry(ts.createSourceFile(fileName, fileContents, ts.ScriptTarget.ES5), mtime);
}
const fileContents = this._fsProvider.readFileSync(moduleId, fileName).toString();
const fileMap = {
'file.ts': fileContents
};
const service = ts.createLanguageService(new TypeScriptLanguageServiceHost({}, fileMap, {}));
const text = service.getEmitOutput('file.ts', true, true).outputFiles[0].text;
return new CacheEntry(ts.createSourceFile(fileName, text, ts.ScriptTarget.ES5), mtime);
}
}
exports.DeclarationResolver = DeclarationResolver;
function run3(resolver) {
const sourceFileGetter = (moduleId) => resolver.getDeclarationSourceFile(moduleId);
return _run(sourceFileGetter);
}
exports.run3 = run3;
class TypeScriptLanguageServiceHost {
constructor(libs, files, compilerOptions) {
this._libs = libs;
this._files = files;
this._compilerOptions = compilerOptions;
}
// --- language service host ---------------
getCompilationSettings() {
return this._compilerOptions;
}
getScriptFileNames() {
return ([]
.concat(Object.keys(this._libs))
.concat(Object.keys(this._files)));
}
getScriptVersion(_fileName) {
return '1';
}
getProjectVersion() {
return '1';
}
getScriptSnapshot(fileName) {
if (this._files.hasOwnProperty(fileName)) {
return ts.ScriptSnapshot.fromString(this._files[fileName]);
}
else if (this._libs.hasOwnProperty(fileName)) {
return ts.ScriptSnapshot.fromString(this._libs[fileName]);
}
else {
return ts.ScriptSnapshot.fromString('');
}
}
getScriptKind(_fileName) {
return ts.ScriptKind.TS;
}
getCurrentDirectory() {
return '';
}
getDefaultLibFileName(_options) {
return 'defaultLib:es5';
}
isDefaultLibFileName(fileName) {
return fileName === this.getDefaultLibFileName(this._compilerOptions);
}
}
function execute() {
let r = run3(new DeclarationResolver(new FSProvider()));
if (!r) {
throw new Error(`monaco.d.ts generation error - Cannot continue`);
}
return r;
}
exports.execute = execute;

View File

@@ -61,7 +61,7 @@ function loader(src, bundledFileHeader, bundleLoader) {
isFirst = false;
this.emit('data', new VinylFile({
path: 'fake',
base: '',
base: '.',
contents: Buffer.from(bundledFileHeader)
}));
this.emit('data', data);
@@ -92,7 +92,7 @@ function toConcatStream(src, bundledFileHeader, sources, dest, fileContentMapper
}
const treatedSources = sources.map(function (source) {
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
const base = source.path ? root + `/${src}` : '';
const base = source.path ? root + `/${src}` : '.';
const path = source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake';
const contents = source.path ? fileContentMapper(source.contents, path) : source.contents;
return new VinylFile({

View File

@@ -69,7 +69,7 @@ function loader(src: string, bundledFileHeader: string, bundleLoader: boolean):
isFirst = false;
this.emit('data', new VinylFile({
path: 'fake',
base: '',
base: '.',
contents: Buffer.from(bundledFileHeader)
}));
this.emit('data', data);
@@ -104,7 +104,7 @@ function toConcatStream(src: string, bundledFileHeader: string, sources: bundle.
const treatedSources = sources.map(function (source) {
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
const base = source.path ? root + `/${src}` : '';
const base = source.path ? root + `/${src}` : '.';
const path = source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake';
const contents = source.path ? fileContentMapper(source.contents, path) : source.contents;

View File

@@ -11,65 +11,81 @@ const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors");
const fs = require("fs");
const path = require("path");
const allErrors = [];
let startTime = null;
let count = 0;
function onStart() {
if (count++ > 0) {
return;
class ErrorLog {
constructor(id) {
this.id = id;
this.allErrors = [];
this.startTime = null;
this.count = 0;
}
startTime = new Date().getTime();
fancyLog(`Starting ${ansiColors.green('compilation')}...`);
}
function onEnd() {
if (--count > 0) {
return;
onStart() {
if (this.count++ > 0) {
return;
}
this.startTime = new Date().getTime();
fancyLog(`Starting ${ansiColors.green('compilation')}${this.id ? ansiColors.blue(` ${this.id}`) : ''}...`);
}
onEnd() {
if (--this.count > 0) {
return;
}
this.log();
}
log() {
const errors = _.flatten(this.allErrors);
const seen = new Set();
errors.map(err => {
if (!seen.has(err)) {
seen.add(err);
fancyLog(`${ansiColors.red('Error')}: ${err}`);
}
});
fancyLog(`Finished ${ansiColors.green('compilation')}${this.id ? ansiColors.blue(` ${this.id}`) : ''} with ${errors.length} errors after ${ansiColors.magenta((new Date().getTime() - this.startTime) + ' ms')}`);
const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/s;
const messages = errors
.map(err => regex.exec(err))
.filter(match => !!match)
.map(x => x)
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message }));
try {
const logFileName = 'log' + (this.id ? `_${this.id}` : '');
fs.writeFileSync(path.join(buildLogFolder, logFileName), JSON.stringify(messages));
}
catch (err) {
//noop
}
}
log();
}
const buildLogPath = path.join(path.dirname(path.dirname(__dirname)), '.build', 'log');
const errorLogsById = new Map();
function getErrorLog(id = '') {
let errorLog = errorLogsById.get(id);
if (!errorLog) {
errorLog = new ErrorLog(id);
errorLogsById.set(id, errorLog);
}
return errorLog;
}
const buildLogFolder = path.join(path.dirname(path.dirname(__dirname)), '.build');
try {
fs.mkdirSync(path.dirname(buildLogPath));
fs.mkdirSync(buildLogFolder);
}
catch (err) {
// ignore
}
function log() {
const errors = _.flatten(allErrors);
const seen = new Set();
errors.map(err => {
if (!seen.has(err)) {
seen.add(err);
fancyLog(`${ansiColors.red('Error')}: ${err}`);
}
});
const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
const messages = errors
.map(err => regex.exec(err))
.filter(match => !!match)
.map(x => x)
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message }));
try {
fs.writeFileSync(buildLogPath, JSON.stringify(messages));
}
catch (err) {
//noop
}
fancyLog(`Finished ${ansiColors.green('compilation')} with ${errors.length} errors after ${ansiColors.magenta((new Date().getTime() - startTime) + ' ms')}`);
}
function createReporter() {
function createReporter(id) {
const errorLog = getErrorLog(id);
const errors = [];
allErrors.push(errors);
errorLog.allErrors.push(errors);
const result = (err) => errors.push(err);
result.hasErrors = () => errors.length > 0;
result.end = (emitError) => {
errors.length = 0;
onStart();
errorLog.onStart();
return es.through(undefined, function () {
onEnd();
errorLog.onEnd();
if (emitError && errors.length > 0) {
if (!errors.__logged__) {
log();
errorLog.log();
}
errors.__logged__ = true;
const err = new Error(`Found ${errors.length} errors`);

View File

@@ -12,72 +12,89 @@ import * as ansiColors from 'ansi-colors';
import * as fs from 'fs';
import * as path from 'path';
const allErrors: string[][] = [];
let startTime: number | null = null;
let count = 0;
class ErrorLog {
constructor(public id: string) {
}
allErrors: string[][] = [];
startTime: number | null = null;
count = 0;
function onStart(): void {
if (count++ > 0) {
return;
onStart(): void {
if (this.count++ > 0) {
return;
}
this.startTime = new Date().getTime();
fancyLog(`Starting ${ansiColors.green('compilation')}${this.id ? ansiColors.blue(` ${this.id}`) : ''}...`);
}
startTime = new Date().getTime();
fancyLog(`Starting ${ansiColors.green('compilation')}...`);
}
onEnd(): void {
if (--this.count > 0) {
return;
}
function onEnd(): void {
if (--count > 0) {
return;
this.log();
}
log(): void {
const errors = _.flatten(this.allErrors);
const seen = new Set<string>();
errors.map(err => {
if (!seen.has(err)) {
seen.add(err);
fancyLog(`${ansiColors.red('Error')}: ${err}`);
}
});
fancyLog(`Finished ${ansiColors.green('compilation')}${this.id ? ansiColors.blue(` ${this.id}`) : ''} with ${errors.length} errors after ${ansiColors.magenta((new Date().getTime() - this.startTime!) + ' ms')}`);
const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/s;
const messages = errors
.map(err => regex.exec(err))
.filter(match => !!match)
.map(x => x as string[])
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message }));
try {
const logFileName = 'log' + (this.id ? `_${this.id}` : '');
fs.writeFileSync(path.join(buildLogFolder, logFileName), JSON.stringify(messages));
} catch (err) {
//noop
}
}
log();
}
const buildLogPath = path.join(path.dirname(path.dirname(__dirname)), '.build', 'log');
const errorLogsById = new Map<string, ErrorLog>();
function getErrorLog(id: string = '') {
let errorLog = errorLogsById.get(id);
if (!errorLog) {
errorLog = new ErrorLog(id);
errorLogsById.set(id, errorLog);
}
return errorLog;
}
const buildLogFolder = path.join(path.dirname(path.dirname(__dirname)), '.build');
try {
fs.mkdirSync(path.dirname(buildLogPath));
fs.mkdirSync(buildLogFolder);
} catch (err) {
// ignore
}
function log(): void {
const errors = _.flatten(allErrors);
const seen = new Set<string>();
errors.map(err => {
if (!seen.has(err)) {
seen.add(err);
fancyLog(`${ansiColors.red('Error')}: ${err}`);
}
});
const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
const messages = errors
.map(err => regex.exec(err))
.filter(match => !!match)
.map(x => x as string[])
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message }));
try {
fs.writeFileSync(buildLogPath, JSON.stringify(messages));
} catch (err) {
//noop
}
fancyLog(`Finished ${ansiColors.green('compilation')} with ${errors.length} errors after ${ansiColors.magenta((new Date().getTime() - startTime!) + ' ms')}`);
}
export interface IReporter {
(err: string): void;
hasErrors(): boolean;
end(emitError: boolean): NodeJS.ReadWriteStream;
}
export function createReporter(): IReporter {
export function createReporter(id?: string): IReporter {
const errorLog = getErrorLog(id);
const errors: string[] = [];
allErrors.push(errors);
errorLog.allErrors.push(errors);
const result = (err: string) => errors.push(err);
@@ -85,14 +102,14 @@ export function createReporter(): IReporter {
result.end = (emitError: boolean): NodeJS.ReadWriteStream => {
errors.length = 0;
onStart();
errorLog.onStart();
return es.through(undefined, function () {
onEnd();
errorLog.onEnd();
if (emitError && errors.length > 0) {
if (!(errors as any).__logged__) {
log();
errorLog.log();
}
(errors as any).__logged__ = true;

View File

@@ -28,6 +28,7 @@ function writeFile(filePath, contents) {
fs.writeFileSync(filePath, contents);
}
function extractEditor(options) {
var _a;
const tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.monaco.json')).toString());
let compilerOptions;
if (tsConfig.extends) {
@@ -47,6 +48,12 @@ function extractEditor(options) {
console.log(`Running tree shaker with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
// Take the extra included .d.ts files from `tsconfig.monaco.json`
options.typings = tsConfig.include.filter(includedFile => /\.d\.ts$/.test(includedFile));
// Add extra .d.ts files from `node_modules/@types/`
if (Array.isArray((_a = options.compilerOptions) === null || _a === void 0 ? void 0 : _a.types)) {
options.compilerOptions.types.forEach((type) => {
options.typings.push(`../node_modules/@types/${type}/index.d.ts`);
});
}
let result = tss.shake(options);
for (let fileName in result) {
if (result.hasOwnProperty(fileName)) {

View File

@@ -55,6 +55,13 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
// Take the extra included .d.ts files from `tsconfig.monaco.json`
options.typings = (<string[]>tsConfig.include).filter(includedFile => /\.d\.ts$/.test(includedFile));
// Add extra .d.ts files from `node_modules/@types/`
if (Array.isArray(options.compilerOptions?.types)) {
options.compilerOptions.types.forEach((type: string) => {
options.typings.push(`../node_modules/@types/${type}/index.d.ts`);
});
}
let result = tss.shake(options);
for (let fileName in result) {
if (result.hasOwnProperty(fileName)) {

View File

@@ -4,7 +4,7 @@
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.getElectronVersion = exports.streamToPromise = exports.versionStringToNumber = exports.filter = exports.rebase = exports.getVersion = exports.ensureDir = exports.rreddir = exports.rimraf = exports.stripSourceMappingURL = exports.loadSourcemaps = exports.cleanNodeModules = exports.skipDirectories = exports.toFileUri = exports.setExecutableBit = exports.fixWin32DirectoryPermissions = exports.incremental = void 0;
exports.getElectronVersion = exports.streamToPromise = exports.versionStringToNumber = exports.filter = exports.rebase = exports.getVersion = exports.ensureDir = exports.rreddir = exports.rimraf = exports.rewriteSourceMappingURL = exports.stripSourceMappingURL = exports.loadSourcemaps = exports.cleanNodeModules = exports.skipDirectories = exports.toFileUri = exports.setExecutableBit = exports.fixWin32DirectoryPermissions = exports.incremental = void 0;
const es = require("event-stream");
const debounce = require("debounce");
const _filter = require("gulp-filter");
@@ -167,6 +167,18 @@ function stripSourceMappingURL() {
return es.duplex(input, output);
}
exports.stripSourceMappingURL = stripSourceMappingURL;
function rewriteSourceMappingURL(sourceMappingURLBase) {
const input = es.through();
const output = input
.pipe(es.mapSync(f => {
const contents = f.contents.toString('utf8');
const str = `//# sourceMappingURL=${sourceMappingURLBase}/${path.dirname(f.relative).replace(/\\/g, '/')}/$1`;
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, str));
return f;
}));
return es.duplex(input, output);
}
exports.rewriteSourceMappingURL = rewriteSourceMappingURL;
function rimraf(dir) {
const result = () => new Promise((c, e) => {
let retries = 0;

View File

@@ -220,6 +220,20 @@ export function stripSourceMappingURL(): NodeJS.ReadWriteStream {
return es.duplex(input, output);
}
export function rewriteSourceMappingURL(sourceMappingURLBase: string): NodeJS.ReadWriteStream {
const input = es.through();
const output = input
.pipe(es.mapSync<VinylFile, VinylFile>(f => {
const contents = (<Buffer>f.contents).toString('utf8');
const str = `//# sourceMappingURL=${sourceMappingURLBase}/${path.dirname(f.relative).replace(/\\/g, '/')}/$1`;
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, str));
return f;
}));
return es.duplex(input, output);
}
export function rimraf(dir: string): () => Promise<void> {
const result = () => new Promise<void>((c, e) => {
let retries = 0;