mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 18:46:40 -05:00
Merge from vscode bead496a613e475819f89f08e9e882b841bc1fe8 (#14883)
* Merge from vscode bead496a613e475819f89f08e9e882b841bc1fe8 * Bump distro * Upgrade GCC to 4.9 due to yarn install errors * Update build image * Fix bootstrap base url * Bump distro * Fix build errors * Update source map file * Disable checkbox for blocking migration issues (#15131) * disable checkbox for blocking issues * wip * disable checkbox fixes * fix strings * Remove duplicate tsec command * Default to off for tab color if settings not present * re-skip failing tests * Fix mocha error * Bump sqlite version & fix notebooks search view * Turn off esbuild warnings * Update esbuild log level * Fix overflowactionbar tests * Fix ts-ignore in dropdown tests * cleanup/fixes * Fix hygiene * Bundle in entire zone.js module * Remove extra constructor param * bump distro for web compile break * bump distro for web compile break v2 * Undo log level change * New distro * Fix integration test scripts * remove the "no yarn.lock changes" workflow * fix scripts v2 * Update unit test scripts * Ensure ads-kerberos2 updates in .vscodeignore * Try fix unit tests * Upload crash reports * remove nogpu * always upload crashes * Use bash script * Consolidate data/ext dir names * Create in tmp directory Co-authored-by: chlafreniere <hichise@gmail.com> Co-authored-by: Christopher Suh <chsuh@microsoft.com> Co-authored-by: chgagnon <chgagnon@microsoft.com>
This commit is contained in:
@@ -3,7 +3,8 @@
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const ts = require("typescript");
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.nls = void 0;
|
||||
const lazy = require("lazy.js");
|
||||
const event_stream_1 = require("event-stream");
|
||||
const File = require("vinyl");
|
||||
@@ -16,7 +17,7 @@ var CollectStepResult;
|
||||
CollectStepResult[CollectStepResult["No"] = 2] = "No";
|
||||
CollectStepResult[CollectStepResult["NoAndRecurse"] = 3] = "NoAndRecurse";
|
||||
})(CollectStepResult || (CollectStepResult = {}));
|
||||
function collect(node, fn) {
|
||||
function collect(ts, node, fn) {
|
||||
const result = [];
|
||||
function loop(node) {
|
||||
const stepResult = fn(node);
|
||||
@@ -69,14 +70,16 @@ function nls() {
|
||||
if (!typescript) {
|
||||
return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`));
|
||||
}
|
||||
nls.patchFiles(f, typescript).forEach(f => this.emit('data', f));
|
||||
_nls.patchFiles(f, typescript).forEach(f => this.emit('data', f));
|
||||
}));
|
||||
return event_stream_1.duplex(input, output);
|
||||
}
|
||||
function isImportNode(node) {
|
||||
exports.nls = nls;
|
||||
function isImportNode(ts, node) {
|
||||
return node.kind === ts.SyntaxKind.ImportDeclaration || node.kind === ts.SyntaxKind.ImportEqualsDeclaration;
|
||||
}
|
||||
(function (nls_1) {
|
||||
var _nls;
|
||||
(function (_nls) {
|
||||
function fileFrom(file, contents, path = file.path) {
|
||||
return new File({
|
||||
contents: Buffer.from(contents),
|
||||
@@ -85,17 +88,14 @@ function isImportNode(node) {
|
||||
path: path
|
||||
});
|
||||
}
|
||||
nls_1.fileFrom = fileFrom;
|
||||
function mappedPositionFrom(source, lc) {
|
||||
return { source, line: lc.line + 1, column: lc.character };
|
||||
}
|
||||
nls_1.mappedPositionFrom = mappedPositionFrom;
|
||||
function lcFrom(position) {
|
||||
return { line: position.line - 1, character: position.column };
|
||||
}
|
||||
nls_1.lcFrom = lcFrom;
|
||||
class SingleFileServiceHost {
|
||||
constructor(options, filename, contents) {
|
||||
constructor(ts, options, filename, contents) {
|
||||
this.options = options;
|
||||
this.filename = filename;
|
||||
this.getCompilationSettings = () => this.options;
|
||||
@@ -108,20 +108,19 @@ function isImportNode(node) {
|
||||
this.lib = ts.ScriptSnapshot.fromString('');
|
||||
}
|
||||
}
|
||||
nls_1.SingleFileServiceHost = SingleFileServiceHost;
|
||||
function isCallExpressionWithinTextSpanCollectStep(textSpan, node) {
|
||||
function isCallExpressionWithinTextSpanCollectStep(ts, textSpan, node) {
|
||||
if (!ts.textSpanContainsTextSpan({ start: node.pos, length: node.end - node.pos }, textSpan)) {
|
||||
return CollectStepResult.No;
|
||||
}
|
||||
return node.kind === ts.SyntaxKind.CallExpression ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse;
|
||||
}
|
||||
function analyze(contents, options = {}) {
|
||||
function analyze(ts, contents, options = {}) {
|
||||
const filename = 'file.ts';
|
||||
const serviceHost = new SingleFileServiceHost(Object.assign(clone(options), { noResolve: true }), filename, contents);
|
||||
const serviceHost = new SingleFileServiceHost(ts, Object.assign(clone(options), { noResolve: true }), filename, contents);
|
||||
const service = ts.createLanguageService(serviceHost);
|
||||
const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
|
||||
// all imports
|
||||
const imports = lazy(collect(sourceFile, n => isImportNode(n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse));
|
||||
const imports = lazy(collect(ts, sourceFile, n => isImportNode(ts, n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse));
|
||||
// import nls = require('vs/nls');
|
||||
const importEqualsDeclarations = imports
|
||||
.filter(n => n.kind === ts.SyntaxKind.ImportEqualsDeclaration)
|
||||
@@ -152,7 +151,7 @@ function isImportNode(node) {
|
||||
.flatten()
|
||||
.filter(r => !r.isWriteAccess)
|
||||
// find the deepest call expressions AST nodes that contain those references
|
||||
.map(r => collect(sourceFile, n => isCallExpressionWithinTextSpanCollectStep(r.textSpan, n)))
|
||||
.map(r => collect(ts, sourceFile, n => isCallExpressionWithinTextSpanCollectStep(ts, r.textSpan, n)))
|
||||
.map(a => lazy(a).last())
|
||||
.filter(n => !!n)
|
||||
.map(n => n)
|
||||
@@ -178,7 +177,7 @@ function isImportNode(node) {
|
||||
// find the deepest call expressions AST nodes that contain those references
|
||||
const localizeCallExpressions = localizeReferences
|
||||
.concat(namedLocalizeReferences)
|
||||
.map(r => collect(sourceFile, n => isCallExpressionWithinTextSpanCollectStep(r.textSpan, n)))
|
||||
.map(r => collect(ts, sourceFile, n => isCallExpressionWithinTextSpanCollectStep(ts, r.textSpan, n)))
|
||||
.map(a => lazy(a).last())
|
||||
.filter(n => !!n)
|
||||
.map(n => n);
|
||||
@@ -199,7 +198,6 @@ function isImportNode(node) {
|
||||
nlsExpressions: nlsExpressions.toArray()
|
||||
};
|
||||
}
|
||||
nls_1.analyze = analyze;
|
||||
class TextModel {
|
||||
constructor(contents) {
|
||||
const regex = /\r\n|\r|\n/g;
|
||||
@@ -250,9 +248,8 @@ function isImportNode(node) {
|
||||
.flatten().toArray().join('');
|
||||
}
|
||||
}
|
||||
nls_1.TextModel = TextModel;
|
||||
function patchJavascript(patches, contents, moduleId) {
|
||||
const model = new nls.TextModel(contents);
|
||||
const model = new TextModel(contents);
|
||||
// patch the localize calls
|
||||
lazy(patches).reverse().each(p => model.apply(p));
|
||||
// patch the 'vs/nls' imports
|
||||
@@ -261,7 +258,6 @@ function isImportNode(node) {
|
||||
model.set(0, patchedFirstLine);
|
||||
return model.toString();
|
||||
}
|
||||
nls_1.patchJavascript = patchJavascript;
|
||||
function patchSourcemap(patches, rsm, smc) {
|
||||
const smg = new sm.SourceMapGenerator({
|
||||
file: rsm.file,
|
||||
@@ -297,9 +293,8 @@ function isImportNode(node) {
|
||||
}
|
||||
return JSON.parse(smg.toString());
|
||||
}
|
||||
nls_1.patchSourcemap = patchSourcemap;
|
||||
function patch(moduleId, typescript, javascript, sourcemap) {
|
||||
const { localizeCalls, nlsExpressions } = analyze(typescript);
|
||||
function patch(ts, moduleId, typescript, javascript, sourcemap) {
|
||||
const { localizeCalls, nlsExpressions } = analyze(ts, typescript);
|
||||
if (localizeCalls.length === 0) {
|
||||
return { javascript, sourcemap };
|
||||
}
|
||||
@@ -332,13 +327,13 @@ function isImportNode(node) {
|
||||
sourcemap = patchSourcemap(patches, sourcemap, smc);
|
||||
return { javascript, sourcemap, nlsKeys, nls };
|
||||
}
|
||||
nls_1.patch = patch;
|
||||
function patchFiles(javascriptFile, typescript) {
|
||||
const ts = require('typescript');
|
||||
// hack?
|
||||
const moduleId = javascriptFile.relative
|
||||
.replace(/\.js$/, '')
|
||||
.replace(/\\/g, '/');
|
||||
const { javascript, sourcemap, nlsKeys, nls } = patch(moduleId, typescript, javascriptFile.contents.toString(), javascriptFile.sourceMap);
|
||||
const { javascript, sourcemap, nlsKeys, nls } = patch(ts, moduleId, typescript, javascriptFile.contents.toString(), javascriptFile.sourceMap);
|
||||
const result = [fileFrom(javascriptFile, javascript)];
|
||||
result[0].sourceMap = sourcemap;
|
||||
if (nlsKeys) {
|
||||
@@ -349,6 +344,5 @@ function isImportNode(node) {
|
||||
}
|
||||
return result;
|
||||
}
|
||||
nls_1.patchFiles = patchFiles;
|
||||
})(nls || (nls = {}));
|
||||
module.exports = nls;
|
||||
_nls.patchFiles = patchFiles;
|
||||
})(_nls || (_nls = {}));
|
||||
|
||||
Reference in New Issue
Block a user