mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-16 10:58:30 -05:00
Merge VS Code 1.26.1 (#2394)
* Squash merge commits for 1.26 (#1) (#2323) * Polish tag search as per feedback (#55269) * Polish tag search as per feedback * Updated regex * Allow users to opt-out of features that send online requests in the background (#55097) * settings sweep #54690 * Minor css tweaks to enable eoverflow elipsis in more places (#55277) * fix an issue with titlebarheight when not scaling with zoom * Settings descriptions update #54690 * fixes #55209 * Settings editor - many padding fixes * More space above level 2 label * Fixing Cannot debug npm script using Yarn #55103 * Settings editor - show ellipsis when description overflows * Settings editor - ... fix measuring around links, relayout * Setting descriptions * Settings editor - fix ... for some short lines, fix select container width * Settings editor - overlay trees so scrollable shadow is full width * Fix #54133 - missing extension settings after reload * Settings color token description tweak * Settings editor - disable overflow indicator temporarily, needs to be faster * Added command to Run the selected npm script * fixes #54452 * fixes #54929 * fixes #55248 * prefix command with extension name * Contribute run selected to the context menu * node-debug@1.26.6 * Allow terminal rendererType to be swapped out at runtime Part of #53274 Fixes #55344 * Settings editor - fix not focusing search when restoring editor setInput must be actually async. Will be fixed naturally when we aren't using winJS promises... * Settings editor - TOC should only expand the section with a selected item * Bump node-debug2 * Settings editor - Tree focus outlines * Settings editor - don't blink the scrollbar when toc selection changes And hide TOC correctly when the editor is narrow * Settings editor - header rows should not be selectable * fixes #54877 * change debug assignee to isi * Settings sweep (#54690) * workaround for #55051 * Settings sweep (#54690) * settings sweep #54690 * Don't try closing tags when you type > after another > * Describe what implementation code lens does Fixes #55370 * fix javadoc formatter setting description * fixes #55325 * update to officical TS version * Settings editor - Even more padding, use semibold instead of bold * Fix #55357 - fix TOC twistie * fixes #55288 * explorer: refresh on di change file system provider registration fixes #53256 * Disable push to Linux repo to test standalone publisher * New env var to notify log level to extensions #54001 * Disable snippets in extension search (when not in suggest dropdown) (#55281) * Disable snippits in extension search (when not in suggest dropdown) * Add monaco input contributions * Fix bug preventing snippetSuggestions from taking effect in sub-editors * Latest emmet helper to fix #52366 * Fix comment updates for threads within same file * Allow extensions to log telemetry to log files #54001 * Pull latest css grammar * files.exclude control - use same style for "add" vs "edit" * files.exclude control - focus/keyboard behavior * don't show menubar too early * files.exclude - better styling * Place cursor at end of extensions search box on autofill (#55254) * Place cursor at end of extensions search box on autofill * Use position instead of selection * fix linux build issue (empty if block) * Settings editor - fix extension category prefixes * Settings editor - add simple ellipsis for first line that overflows, doesn't cover case when first line does not overflow but there is more text, TODO * File/Text search provider docs * Fixes #52655 * Include epoch (#55008) * Fixes #53385 * Fixes #49480 * VS Code Insiders (Users) not opening Fixes #55353 * Better handling of the case when the extension host fails to start * Fixes #53966 * Remove confusing Start from wordPartLeft commands ID * vscode-xterm@3.6.0-beta12 Fixes #55488 * Initial size is set to infinity!! Fixes #55461 * Polish embeddedEditorBackground * configuration service misses event * Fix #55224 - fix duplicate results in multiroot workspace from splitting the diskseach query * Select all not working in issue reporter on mac, fixes #55424 * Disable fuzzy matching for extensions autosuggest (#55498) * Fix clipping of extensions search border in some third party themes (#55504) * fixes #55538 * Fix bug causing an aria alert to not be shown the third time (and odd numbers thereafter) * Settings editor - work around rendering glitch with webkit-line-clamp * Settings editor - revert earlier '...' changes * Settings editor - move enumDescription to its own div, because it disturbs -webkit-line-clamp for some reason * Settings editor - better overflow indicator * Don't show existing filters in autocomplete (#55495) * Dont show existing filters in autocomplete * Simplify * Settings Editor: Add aria labels for input elements Fixes: #54836 (#55543) * fixes #55223 * Update vscode-css-languageservice to 3.0.10-next.1 * Fix #55509 - settings navigation * Fix #55519 * Fix #55520 * FIx #55524 * Fix #55556 - include wordSeparators in all search queries, so findTextInFiles can respect isWordMatch correctly * oss updates for endgame * Fix unit tests * fixes #55522 * Avoid missing manifest error from bubbling up #54757 * Settings format crawl * Search provider - Fix FileSearchProvider to return array, not progress * Fix #55598 * Settings editor - fix NPE rendering settings with no description * dont render inden guides in search box (#55600) * fixes #55454 * More settings crawl * Another change for #55598 - maxResults applies to FileSearch and TextSearch but not FileIndex * Fix FileSearchProvider unit tests for progress change * fixes #55561 * Settings description update for #54690 * Update setting descriptions for online services * Minor edits * fixes #55513 * fixes #55451 * Fix #55612 - fix findTextInFiles cancellation * fixes #55539 * More setting description tweaks * Setting to disable online experiments #54354 * fixes #55507 * fixes #55515 * Show online services action only in Insiders for now * Settings editor - change toc behavior default to 'filter' * Settings editor - nicer filter count style during search * Fix #55617 - search viewlet icons * Settings editor - better styling for element count indicator * SearchProvider - fix NPE when searching extraFileResources * Allow extends to work without json suffix Fixes #16905 * Remove accessability options logic entirely Follow up on #55451 * use latest version of DAP * fixes #55490 * fixes #55122 * fixes #52332 * Avoid assumptions about git: URIs (fixes #36236) * relative path for descriptions * resourece: get rid of isFile context key fixes #48275 * Register previous ids for compatibility (#53497) * more tuning for #48275 * no need to always re-read "files explorer" fixes #52003 * read out active composites properly fixes #51967 * Update link colors for hc theme to meet color contrast ratio, fixes #55651 Also updated link color for `textLinkActiveForeground` to be the same as `textLinkForeground` as it wasn't properly updated * detect 'winpty-agent.exe'; fixes #55672 * node-debug@1.26.7 * reset counter on new label * Settings editor - fix multiple setting links in one description * Settings editor - color code blocks in setting descriptions, fix #55532 * Settings editor - hover color in TOC * Settings editor - fix navigation NPE * Settings editor - fix text control width * Settings editor - maybe fix #55684 * Fix bug causing cursor to not move on paste * fixes #53582 * Use ctrlCmd instead of ctrl for go down from search box * fixes #55264 * fixes #55456 * filter for spcaes before triggering search (#55611) * Fix #55698 - don't lose filtered TOC counts when refreshing TOC * fixes #55421 * fixes #28979 * fixes #55576 * only add check for updates to windows/linux help * readonly files: append decoration to label fixes #53022 * debug: do not show toolbar while initialising fixes #55026 * Opening launch.json should not activate debug extensions fixes #55029 * fixes #55435 * fixes #55434 * fixes #55439 * trigger menu only on altkey up * Fix #50555 - fix settings editor memory leak * Fix #55712 - no need to focus 'a' anymore when restoring control focus after tree render * fixes #55335 * proper fix for readonly model fixes #53022 * improve FoldingRangeKind spec (for #55686) * Use class with static fields (fixes #55494) * Fixes #53671 * fixes #54630 * [html] should disable ionic suggestions by default. Currently forces deprecated Ionic v1 suggestions in .html files while typing. Fixes #53324 * cleanup deps * debug issues back to andre * update electron for smoketest * Fix #55757 - prevent settings tabs from overflowing * Fix #53897 - revert setting menu defaults to old editor * Add enum descriptions to `typescript.preferences.importModuleSpecifier` * Fix #55767 - leaking style elements from settings editor * Fix #55521 - prevent flashing when clicking in exclude control * Update Git modified color for contrast ratio, fixes #53140 * Revert "Merge branch 'master' of github.com:Microsoft/vscode" This reverts commit bf46b6bfbae0cab99c2863e1244a916181fa9fbc, reversing changes made to e275a424483dfb4ed33b428c97d5e2c441d6b917. * Revert "Revert "Merge branch 'master' of github.com:Microsoft/vscode"" This reverts commit 53949d963f39e40757557c6526332354a31d9154. * don't ask to install an incomplete menu * Fix NPE in terminal AccessibilityManager Fixes #55744 * don't display fallback menu unless we've closed the last window * fixes #55547 * Fix smoke tests for extension search box * Update OSSREADME.json for Electron 2.0.5 * Update distro Includes Chromium license changes * fix #55455 * fix #55865 * fixes #55893 * Fix bug causing workspace recommendations to go away upon ignoring a recommendation (#55805) * Fix bug causing workspace recommendations to go away upon ignoring a recommendation * ONly show on @recommended or @recommended:workspace * Make more consistant * Fix #55911 * Understand json activity (#55926) * Understand json file activity * Refactoring * adding composer.json * Distro update for experiments * use terminal.processId for auto-attach; fixes #55918 * Reject invalid URI with vscode.openFolder (for #55891) * improve win32 setup system vs user detection fixes #55840 fixes #55840 delay winreg import related to #55840 show notification earlier related to #55840 fix #55840 update inno setup message related to #55840 * Fix #55593 - this code only operates on local paths, so use fsPath and Uri.file instead * Bring back the old menu due to electron 2.0 issues (#55913) * add the old menu back for native menus * make menu labels match * `vscode.openFolder`: treat missing URI schema gracefully (for #55891) * delay EH reattach; fixes #55955 * Mark all json files under appSettingsHome as settings * Use localized strings for telemetry opt-out * Exception when saving file editor opened from remote file provider (fixes #55051) * Remove terminal menu from stable Fixes 56003 * VSCode Insiders crashes on open with TypeError: Cannot read property 'lastIndexOf' of undefined. Fixes #54933 * improve fix for #55891 * fix #55916 * Improve #55891 * increase EH debugging restart delay; fixes #55955 * Revert "Don't include non-resource entries in history quick pick" This reverts commit 37209a838e9f7e9abe6dc53ed73cdf1e03b72060. * Diff editor: horizontal scrollbar height is smaller (fixes #56062) * improve openFolder uri fix (correctly treat backslashes) * fixes #56116 repair ipc for native menubar keybindings * Fix #56240 - Open the JSON settings editor instead of the UI editor * Fix #55536 * uriDisplay: if no formatter is registered fall back to getPathlabel fixes #56104 * VSCode hangs when opening python file. Fixes #56377 * VS Code Hangs When Opening Specific PowerShell File. Fixes #56430 * Fix #56433 - search extraFileResources even when no folders open * Workaround #55649 * Fix in master #56371 * Fix tests #56371 * Fix in master #56317 * increase version to 1.26.1 * Fixes #56387: Handle SIGPIPE in extension host * fixes #56185 * Fix merge issues (part 1) * Fix build breaks (part 1) * Build breaks (part 2) * Build breaks (part 3) * More build breaks (part 4) * Fix build breaks (part 5) * WIP * Fix menus * Render query result and message panels (#2363) * Put back query editor hot exit changes * Fix grid changes that broke profiler (#2365) * Update APIs for saving query editor state * Fix restore view state for profiler and edit data * Updating custom default themes to support 4.5:1 contrast ratio * Test updates * Fix Extension Manager and Windows Setup * Update license headers * Add appveyor and travis files back * Fix hidden modal dropdown issue
This commit is contained in:
@@ -1,12 +1,12 @@
|
||||
[
|
||||
{
|
||||
"name": "ms-vscode.node-debug",
|
||||
"version": "1.23.3",
|
||||
"version": "1.26.7",
|
||||
"repo": "https://github.com/Microsoft/vscode-node-debug"
|
||||
},
|
||||
{
|
||||
"name": "ms-vscode.node-debug2",
|
||||
"version": "1.23.5",
|
||||
"version": "1.26.8",
|
||||
"repo": "https://github.com/Microsoft/vscode-node-debug2"
|
||||
}
|
||||
]
|
||||
|
||||
@@ -12,10 +12,12 @@ const File = require('vinyl');
|
||||
const i18n = require('./lib/i18n');
|
||||
const standalone = require('./lib/standalone');
|
||||
const cp = require('child_process');
|
||||
const compilation = require('./lib/compilation');
|
||||
const monacoapi = require('./monaco/api');
|
||||
const fs = require('fs');
|
||||
|
||||
var root = path.dirname(__dirname);
|
||||
var sha1 = util.getVersion(root);
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
var semver = require('./monaco/package.json').version;
|
||||
var headerVersion = semver + '(' + sha1 + ')';
|
||||
|
||||
@@ -59,29 +61,56 @@ var BUNDLED_FILE_HEADER = [
|
||||
''
|
||||
].join('\n');
|
||||
|
||||
function editorLoaderConfig() {
|
||||
var result = common.loaderConfig();
|
||||
|
||||
// never ship octicons in editor
|
||||
result.paths['vs/base/browser/ui/octiconLabel/octiconLabel'] = 'out-build/vs/base/browser/ui/octiconLabel/octiconLabel.mock';
|
||||
|
||||
// force css inlining to use base64 -- see https://github.com/Microsoft/monaco-editor/issues/148
|
||||
result['vs/css'] = {
|
||||
inlineResources: 'base64',
|
||||
inlineResourcesLimit: 3000 // see https://github.com/Microsoft/monaco-editor/issues/336
|
||||
};
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
|
||||
|
||||
gulp.task('clean-editor-src', util.rimraf('out-editor-src'));
|
||||
gulp.task('extract-editor-src', ['clean-editor-src'], function () {
|
||||
console.log(`If the build fails, consider tweaking shakeLevel below to a lower value.`);
|
||||
const apiusages = monacoapi.execute().usageContent;
|
||||
const extrausages = fs.readFileSync(path.join(root, 'build', 'monaco', 'monaco.usage.recipe')).toString();
|
||||
standalone.extractEditor({
|
||||
sourcesRoot: path.join(root, 'src'),
|
||||
entryPoints: [
|
||||
'vs/editor/editor.main',
|
||||
'vs/editor/editor.worker',
|
||||
'vs/base/worker/workerMain',
|
||||
],
|
||||
inlineEntryPoints: [
|
||||
apiusages,
|
||||
extrausages
|
||||
],
|
||||
libs: [
|
||||
`lib.d.ts`,
|
||||
`lib.es2015.collection.d.ts`
|
||||
],
|
||||
redirects: {
|
||||
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
|
||||
},
|
||||
compilerOptions: {
|
||||
module: 2, // ModuleKind.AMD
|
||||
},
|
||||
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
|
||||
importIgnorePattern: /^vs\/css!/,
|
||||
destRoot: path.join(root, 'out-editor-src')
|
||||
});
|
||||
});
|
||||
|
||||
// Full compile, including nls and inline sources in sourcemaps, for build
|
||||
gulp.task('clean-editor-build', util.rimraf('out-editor-build'));
|
||||
gulp.task('compile-editor-build', ['clean-editor-build', 'extract-editor-src'], compilation.compileTask('out-editor-src', 'out-editor-build', true));
|
||||
|
||||
gulp.task('clean-optimized-editor', util.rimraf('out-editor'));
|
||||
gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-client-build'], common.optimizeTask({
|
||||
gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-editor-build'], common.optimizeTask({
|
||||
src: 'out-editor-build',
|
||||
entryPoints: editorEntryPoints,
|
||||
otherSources: editorOtherSources,
|
||||
resources: editorResources,
|
||||
loaderConfig: editorLoaderConfig(),
|
||||
loaderConfig: {
|
||||
paths: {
|
||||
'vs': 'out-editor-build/vs',
|
||||
'vscode': 'empty:'
|
||||
}
|
||||
},
|
||||
bundleLoader: false,
|
||||
header: BUNDLED_FILE_HEADER,
|
||||
bundleInfo: true,
|
||||
@@ -114,6 +143,41 @@ gulp.task('compile-editor-esm', ['extract-editor-esm', 'clean-editor-distro'], f
|
||||
console.log(result.stdout.toString());
|
||||
});
|
||||
|
||||
function toExternalDTS(contents) {
|
||||
let lines = contents.split('\n');
|
||||
let killNextCloseCurlyBrace = false;
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
let line = lines[i];
|
||||
|
||||
if (killNextCloseCurlyBrace) {
|
||||
if ('}' === line) {
|
||||
lines[i] = '';
|
||||
killNextCloseCurlyBrace = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (line.indexOf(' ') === 0) {
|
||||
lines[i] = line.substr(4);
|
||||
} else if (line.charAt(0) === '\t') {
|
||||
lines[i] = line.substr(1);
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if ('declare namespace monaco {' === line) {
|
||||
lines[i] = '';
|
||||
killNextCloseCurlyBrace = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (line.indexOf('declare namespace monaco.') === 0) {
|
||||
lines[i] = line.replace('declare namespace monaco.', 'export namespace ');
|
||||
}
|
||||
}
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
gulp.task('clean-editor-distro', util.rimraf('out-monaco-editor-core'));
|
||||
gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify-editor', 'optimize-editor'], function () {
|
||||
return es.merge(
|
||||
@@ -130,7 +194,7 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
|
||||
this.emit('data', new File({
|
||||
path: data.path.replace(/monaco\.d\.ts/, 'editor.api.d.ts'),
|
||||
base: data.base,
|
||||
contents: data.contents
|
||||
contents: new Buffer(toExternalDTS(data.contents.toString()))
|
||||
}));
|
||||
}))
|
||||
.pipe(gulp.dest('out-monaco-editor-core/esm/vs/editor')),
|
||||
@@ -195,7 +259,7 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
|
||||
});
|
||||
|
||||
gulp.task('analyze-editor-distro', function () {
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
// @ts-ignore
|
||||
var bundleInfo = require('../out-editor/bundleInfo.json');
|
||||
var graph = bundleInfo.graph;
|
||||
var bundles = bundleInfo.bundles;
|
||||
|
||||
@@ -20,7 +20,6 @@ const sourcemaps = require('gulp-sourcemaps');
|
||||
const nlsDev = require('vscode-nls-dev');
|
||||
const root = path.dirname(__dirname);
|
||||
const commit = util.getVersion(root);
|
||||
const i18n = require('./lib/i18n');
|
||||
const plumber = require('gulp-plumber');
|
||||
|
||||
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
|
||||
@@ -32,8 +31,6 @@ const compilations = glob.sync('**/tsconfig.json', {
|
||||
|
||||
const getBaseUrl = out => `https://ticino.blob.core.windows.net/sourcemaps/${commit}/${out}`;
|
||||
|
||||
const languages = i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
|
||||
|
||||
const tasks = compilations.map(function (tsconfigFile) {
|
||||
const absolutePath = path.join(extensionsPath, tsconfigFile);
|
||||
const relativeDirname = path.dirname(tsconfigFile);
|
||||
@@ -58,7 +55,6 @@ const tasks = compilations.map(function (tsconfigFile) {
|
||||
const srcBase = path.join(root, 'src');
|
||||
const src = path.join(srcBase, '**');
|
||||
const out = path.join(root, 'out');
|
||||
const i18nPath = path.join(__dirname, '..', 'i18n');
|
||||
const baseUrl = getBaseUrl(out);
|
||||
|
||||
let headerId, headerOut;
|
||||
@@ -102,9 +98,9 @@ const tasks = compilations.map(function (tsconfigFile) {
|
||||
sourceRoot: '../src'
|
||||
}))
|
||||
.pipe(tsFilter.restore)
|
||||
.pipe(build ? nlsDev.createAdditionalLanguageFiles(languages, i18nPath, out) : es.through())
|
||||
.pipe(build ? nlsDev.bundleMetaDataFiles(headerId, headerOut) : es.through())
|
||||
.pipe(build ? nlsDev.bundleLanguageFiles() : es.through())
|
||||
// Filter out *.nls.json file. We needed them only to bundle meta data file.
|
||||
.pipe(filter(['**', '!**/*.nls.json']))
|
||||
.pipe(reporter.end(emitError));
|
||||
|
||||
return es.duplex(input, output);
|
||||
|
||||
@@ -49,6 +49,7 @@ const indentationFilter = [
|
||||
'!src/vs/base/common/marked/marked.js',
|
||||
'!src/vs/base/common/winjs.base.js',
|
||||
'!src/vs/base/node/terminateProcess.sh',
|
||||
'!src/vs/base/node/cpuUsage.sh',
|
||||
'!test/assert.js',
|
||||
|
||||
// except specific folders
|
||||
@@ -81,8 +82,9 @@ const indentationFilter = [
|
||||
'!build/{lib,tslintRules}/**/*.js',
|
||||
'!build/**/*.sh',
|
||||
'!build/tfs/**/*.js',
|
||||
'!build/tfs/**/*.config',
|
||||
'!**/Dockerfile',
|
||||
'!extensions/markdown/media/*.js'
|
||||
'!extensions/markdown-language-features/media/*.js'
|
||||
];
|
||||
|
||||
const copyrightFilter = [
|
||||
@@ -103,8 +105,9 @@ const copyrightFilter = [
|
||||
'!**/*.code-workspace',
|
||||
'!build/**/*.init',
|
||||
'!resources/linux/snap/snapcraft.yaml',
|
||||
'!resources/linux/snap/electron-launch',
|
||||
'!resources/win32/bin/code.js',
|
||||
'!extensions/markdown-language-features/media/tomorrow.css',
|
||||
'!extensions/markdown-language-features/media/highlight.css',
|
||||
'!extensions/html-language-features/server/src/modes/typescript/*',
|
||||
'!extensions/*/server/bin/*'
|
||||
];
|
||||
@@ -136,6 +139,7 @@ const tslintFilter = [
|
||||
'!extensions/html-language-features/server/lib/jquery.d.ts'
|
||||
];
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const copyrightHeaderLines = [
|
||||
'/*---------------------------------------------------------------------------------------------',
|
||||
' * Copyright (c) Microsoft Corporation. All rights reserved.',
|
||||
|
||||
@@ -18,7 +18,6 @@ const assign = require('object-assign');
|
||||
// {{SQL CARBON EDIT}}
|
||||
const jeditor = require('gulp-json-editor');
|
||||
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const pkg = require('../package.json');
|
||||
|
||||
gulp.task('mixin', function () {
|
||||
@@ -52,4 +51,4 @@ gulp.task('mixin', function () {
|
||||
return gulp.src('./product.json')
|
||||
.pipe(jeditor(newValues))
|
||||
.pipe(gulp.dest('.'));
|
||||
});
|
||||
});
|
||||
|
||||
@@ -17,19 +17,15 @@ const vfs = require('vinyl-fs');
|
||||
const rename = require('gulp-rename');
|
||||
const replace = require('gulp-replace');
|
||||
const filter = require('gulp-filter');
|
||||
const buffer = require('gulp-buffer');
|
||||
const json = require('gulp-json-editor');
|
||||
const _ = require('underscore');
|
||||
const util = require('./lib/util');
|
||||
const ext = require('./lib/extensions');
|
||||
const buildfile = require('../src/buildfile');
|
||||
const common = require('./lib/optimize');
|
||||
const nlsDev = require('vscode-nls-dev');
|
||||
const root = path.dirname(__dirname);
|
||||
const commit = util.getVersion(root);
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const packageJson = require('../package.json');
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const product = require('../product.json');
|
||||
const crypto = require('crypto');
|
||||
const i18n = require('./lib/i18n');
|
||||
@@ -62,7 +58,6 @@ const nodeModules = [
|
||||
|
||||
|
||||
// Build
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const builtInExtensions = require('./builtInExtensions.json');
|
||||
|
||||
const excludedExtensions = [
|
||||
@@ -98,7 +93,7 @@ const vscodeResources = [
|
||||
'out-build/paths.js',
|
||||
'out-build/vs/**/*.{svg,png,cur,html}',
|
||||
'out-build/vs/base/common/performance.js',
|
||||
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh}',
|
||||
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh}',
|
||||
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
|
||||
'out-build/vs/workbench/browser/media/*-theme.css',
|
||||
'out-build/vs/workbench/electron-browser/bootstrap/**',
|
||||
@@ -107,7 +102,6 @@ const vscodeResources = [
|
||||
'out-build/vs/workbench/parts/webview/electron-browser/webview-pre.js',
|
||||
'out-build/vs/**/markdown.css',
|
||||
'out-build/vs/workbench/parts/tasks/**/*.json',
|
||||
'out-build/vs/workbench/parts/terminal/electron-browser/terminalProcess.js',
|
||||
'out-build/vs/workbench/parts/welcome/walkThrough/**/*.md',
|
||||
'out-build/vs/workbench/services/files/**/*.exe',
|
||||
'out-build/vs/workbench/services/files/**/*.md',
|
||||
@@ -143,17 +137,15 @@ const BUNDLED_FILE_HEADER = [
|
||||
' *--------------------------------------------------------*/'
|
||||
].join('\n');
|
||||
|
||||
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
|
||||
|
||||
gulp.task('clean-optimized-vscode', util.rimraf('out-vscode'));
|
||||
gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compile-extensions-build'], common.optimizeTask({
|
||||
src: 'out-build',
|
||||
entryPoints: vscodeEntryPoints,
|
||||
otherSources: [],
|
||||
resources: vscodeResources,
|
||||
loaderConfig: common.loaderConfig(nodeModules),
|
||||
header: BUNDLED_FILE_HEADER,
|
||||
out: 'out-vscode',
|
||||
languages: languages,
|
||||
bundleInfo: undefined
|
||||
}));
|
||||
|
||||
@@ -170,6 +162,8 @@ gulp.task('clean-minified-vscode', util.rimraf('out-vscode-min'));
|
||||
gulp.task('minify-vscode', ['clean-minified-vscode', 'optimize-index-js'], common.minifyTask('out-vscode', baseUrl));
|
||||
|
||||
// Package
|
||||
|
||||
// @ts-ignore JSON checking: darwinCredits is optional
|
||||
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
|
||||
|
||||
const config = {
|
||||
@@ -199,6 +193,8 @@ const config = {
|
||||
linuxExecutableName: product.applicationName,
|
||||
winIcon: 'resources/win32/code.ico',
|
||||
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || void 0,
|
||||
|
||||
// @ts-ignore JSON checking: electronRepository is optional
|
||||
repo: product.electronRepository || void 0
|
||||
};
|
||||
|
||||
@@ -315,15 +311,8 @@ function packageTask(platform, arch, opts) {
|
||||
packageBuiltInExtensions();
|
||||
|
||||
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
||||
const nlsFilter = filter('**/*.nls.json', { restore: true });
|
||||
|
||||
return ext.fromLocal(extension.path)
|
||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`))
|
||||
// // TODO@Dirk: this filter / buffer is here to make sure the nls.json files are buffered
|
||||
.pipe(nlsFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(nlsDev.createAdditionalLanguageFiles(languages, path.join(__dirname, '..', 'i18n')))
|
||||
.pipe(nlsFilter.restore);
|
||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
}));
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
@@ -342,6 +331,7 @@ function packageTask(platform, arch, opts) {
|
||||
.pipe(filter(['**', '!**/*.js.map']));
|
||||
|
||||
let version = packageJson.version;
|
||||
// @ts-ignore JSON checking: quality is optional
|
||||
const quality = product.quality;
|
||||
|
||||
if (quality && quality !== 'stable') {
|
||||
@@ -352,10 +342,15 @@ function packageTask(platform, arch, opts) {
|
||||
const packageJsonStream = gulp.src(['package.json'], { base: '.' })
|
||||
.pipe(json({ name, version }));
|
||||
|
||||
const settingsSearchBuildId = getSettingsSearchBuildId(packageJson);
|
||||
const date = new Date().toISOString();
|
||||
const productJsonUpdate = { commit, date, checksums };
|
||||
|
||||
if (shouldSetupSettingsSearch()) {
|
||||
productJsonUpdate.settingsSearchBuildId = getSettingsSearchBuildId(packageJson);
|
||||
}
|
||||
|
||||
const productJsonStream = gulp.src(['product.json'], { base: '.' })
|
||||
.pipe(json({ commit, date, checksums, settingsSearchBuildId }));
|
||||
.pipe(json(productJsonUpdate));
|
||||
|
||||
const license = gulp.src(['LICENSES.chromium.html', 'LICENSE.txt', 'ThirdPartyNotices.txt', 'licenses/**'], { base: '.' });
|
||||
|
||||
@@ -368,6 +363,7 @@ function packageTask(platform, arch, opts) {
|
||||
|
||||
const depsSrc = [
|
||||
..._.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
|
||||
// @ts-ignore JSON checking: dependencies is optional
|
||||
..._.flatten(Object.keys(product.dependencies || {}).map(d => [`node_modules/${d}/**`, `!node_modules/${d}/**/{test,tests}/**`]))
|
||||
];
|
||||
|
||||
@@ -578,9 +574,8 @@ gulp.task('upload-vscode-sourcemaps', ['minify-vscode'], () => {
|
||||
|
||||
const allConfigDetailsPath = path.join(os.tmpdir(), 'configuration.json');
|
||||
gulp.task('upload-vscode-configuration', ['generate-vscode-configuration'], () => {
|
||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||
|
||||
if (!/\/master$/.test(branch) && branch.indexOf('/release/') < 0) {
|
||||
if (!shouldSetupSettingsSearch()) {
|
||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||
console.log(`Only runs on master and release branches, not ${branch}`);
|
||||
return;
|
||||
}
|
||||
@@ -603,13 +598,24 @@ gulp.task('upload-vscode-configuration', ['generate-vscode-configuration'], () =
|
||||
}));
|
||||
});
|
||||
|
||||
function getSettingsSearchBuildId(packageJson) {
|
||||
const previous = util.getPreviousVersion(packageJson.version);
|
||||
function shouldSetupSettingsSearch() {
|
||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||
return branch && (/\/master$/.test(branch) || branch.indexOf('/release/') >= 0);
|
||||
}
|
||||
|
||||
function getSettingsSearchBuildId(packageJson) {
|
||||
try {
|
||||
const out = cp.execSync(`git rev-list ${previous}..HEAD --count`);
|
||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||
const branchId = branch.indexOf('/release/') >= 0 ? 0 :
|
||||
/\/master$/.test(branch) ? 1 :
|
||||
2; // Some unexpected branch
|
||||
|
||||
const out = cp.execSync(`git rev-list HEAD --count`);
|
||||
const count = parseInt(out.toString());
|
||||
return util.versionStringToNumber(packageJson.version) * 1e4 + count;
|
||||
|
||||
// <version number><commit count><branchId (avoid unlikely conflicts)>
|
||||
// 1.25.1, 1,234,567 commits, master = 1250112345671
|
||||
return util.versionStringToNumber(packageJson.version) * 1e8 + count * 10 + branchId;
|
||||
} catch (e) {
|
||||
throw new Error('Could not determine build number: ' + e.toString());
|
||||
}
|
||||
@@ -623,6 +629,10 @@ gulp.task('generate-vscode-configuration', () => {
|
||||
return reject(new Error('$AGENT_BUILDDIRECTORY not set'));
|
||||
}
|
||||
|
||||
if (process.env.VSCODE_QUALITY !== 'insider' && process.env.VSCODE_QUALITY !== 'stable') {
|
||||
return resolve();
|
||||
}
|
||||
|
||||
const userDataDir = path.join(os.tmpdir(), 'tmpuserdata');
|
||||
const extensionsDir = path.join(os.tmpdir(), 'tmpextdir');
|
||||
const appName = process.env.VSCODE_QUALITY === 'insider' ? 'Visual\\ Studio\\ Code\\ -\\ Insiders.app' : 'Visual\\ Studio\\ Code.app';
|
||||
|
||||
@@ -12,11 +12,8 @@ const shell = require('gulp-shell');
|
||||
const es = require('event-stream');
|
||||
const vfs = require('vinyl-fs');
|
||||
const util = require('./lib/util');
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const packageJson = require('../package.json');
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const product = require('../product.json');
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const rpmDependencies = require('../resources/linux/rpm/dependencies.json');
|
||||
|
||||
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
|
||||
@@ -76,7 +73,9 @@ function prepareDebPackage(arch) {
|
||||
const postinst = gulp.src('resources/linux/debian/postinst.template', { base: '.' })
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(replace('@@ARCHITECTURE@@', debArch))
|
||||
// @ts-ignore JSON checking: quality is optional
|
||||
.pipe(replace('@@QUALITY@@', product.quality || '@@QUALITY@@'))
|
||||
// @ts-ignore JSON checking: updateUrl is optional
|
||||
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
|
||||
.pipe(rename('DEBIAN/postinst'));
|
||||
|
||||
@@ -135,7 +134,9 @@ function prepareRpmPackage(arch) {
|
||||
.pipe(replace('@@RELEASE@@', linuxPackageRevision))
|
||||
.pipe(replace('@@ARCHITECTURE@@', rpmArch))
|
||||
.pipe(replace('@@LICENSE@@', product.licenseName))
|
||||
// @ts-ignore JSON checking: quality is optional
|
||||
.pipe(replace('@@QUALITY@@', product.quality || '@@QUALITY@@'))
|
||||
// @ts-ignore JSON checking: updateUrl is optional
|
||||
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
|
||||
.pipe(replace('@@DEPENDENCIES@@', rpmDependencies[rpmArch].join(', ')))
|
||||
.pipe(rename('SPECS/' + product.applicationName + '.spec'));
|
||||
|
||||
@@ -7,45 +7,71 @@
|
||||
|
||||
const gulp = require('gulp');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const assert = require('assert');
|
||||
const cp = require('child_process');
|
||||
const _7z = require('7zip')['7z'];
|
||||
const util = require('./lib/util');
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const pkg = require('../package.json');
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const product = require('../product.json');
|
||||
const vfs = require('vinyl-fs');
|
||||
const mkdirp = require('mkdirp');
|
||||
|
||||
const repoPath = path.dirname(__dirname);
|
||||
// {{SQL CARBON EDIT}}
|
||||
const buildPath = arch => path.join(path.dirname(repoPath), `sqlops-win32-${arch}`);
|
||||
const zipDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'archive');
|
||||
const zipPath = arch => path.join(zipDir(arch), `VSCode-win32-${arch}.zip`);
|
||||
const setupDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'setup');
|
||||
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
|
||||
const issPath = path.join(__dirname, 'win32', 'code.iss');
|
||||
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup-compiler'))), 'bin', 'ISCC.exe');
|
||||
const signPS1 = path.join(repoPath, 'build', 'tfs', 'win32', 'sign.ps1');
|
||||
|
||||
function packageInnoSetup(iss, options, cb) {
|
||||
options = options || {};
|
||||
|
||||
const definitions = options.definitions || {};
|
||||
const debug = process.argv.some(arg => arg === '--debug-inno');
|
||||
|
||||
if (debug) {
|
||||
definitions['Debug'] = 'true';
|
||||
}
|
||||
|
||||
const keys = Object.keys(definitions);
|
||||
|
||||
keys.forEach(key => assert(typeof definitions[key] === 'string', `Missing value for '${key}' in Inno Setup package step`));
|
||||
|
||||
const defs = keys.map(key => `/d${key}=${definitions[key]}`);
|
||||
const args = [iss].concat(defs);
|
||||
const args = [
|
||||
iss,
|
||||
...defs
|
||||
//,
|
||||
//`/sesrp=powershell.exe -ExecutionPolicy bypass ${signPS1} $f`
|
||||
];
|
||||
|
||||
cp.spawn(innoSetupPath, args, { stdio: 'inherit' })
|
||||
cp.spawn(innoSetupPath, args, { stdio: ['ignore', 'inherit', 'inherit'] })
|
||||
.on('error', cb)
|
||||
.on('exit', () => cb(null));
|
||||
}
|
||||
|
||||
function buildWin32Setup(arch) {
|
||||
function buildWin32Setup(arch, target) {
|
||||
if (target !== 'system' && target !== 'user') {
|
||||
throw new Error('Invalid setup target');
|
||||
}
|
||||
|
||||
return cb => {
|
||||
const ia32AppId = product.win32AppId;
|
||||
const x64AppId = product.win32x64AppId;
|
||||
const ia32AppId = target === 'system' ? product.win32AppId : product.win32UserAppId;
|
||||
const x64AppId = target === 'system' ? product.win32x64AppId : product.win32x64UserAppId;
|
||||
|
||||
const sourcePath = buildPath(arch);
|
||||
const outputPath = setupDir(arch, target);
|
||||
mkdirp.sync(outputPath);
|
||||
|
||||
const originalProductJsonPath = path.join(sourcePath, 'resources/app/product.json');
|
||||
const productJsonPath = path.join(outputPath, 'product.json');
|
||||
const productJson = JSON.parse(fs.readFileSync(originalProductJsonPath, 'utf8'));
|
||||
productJson['target'] = target;
|
||||
fs.writeFileSync(productJsonPath, JSON.stringify(productJson, undefined, '\t'));
|
||||
|
||||
const definitions = {
|
||||
NameLong: product.nameLong,
|
||||
@@ -53,35 +79,42 @@ function buildWin32Setup(arch) {
|
||||
DirName: product.win32DirName,
|
||||
Version: pkg.version,
|
||||
RawVersion: pkg.version.replace(/-\w+$/, ''),
|
||||
NameVersion: product.win32NameVersion,
|
||||
NameVersion: product.win32NameVersion + (target === 'user' ? ' (User)' : ''),
|
||||
ExeBasename: product.nameShort,
|
||||
RegValueName: product.win32RegValueName,
|
||||
ShellNameShort: product.win32ShellNameShort,
|
||||
AppMutex: product.win32MutexName,
|
||||
Arch: arch,
|
||||
AppId: arch === 'ia32' ? ia32AppId : x64AppId,
|
||||
IncompatibleAppId: arch === 'ia32' ? x64AppId : ia32AppId,
|
||||
IncompatibleTargetAppId: arch === 'ia32' ? product.win32AppId : product.win32x64AppId,
|
||||
IncompatibleArchAppId: arch === 'ia32' ? x64AppId : ia32AppId,
|
||||
AppUserId: product.win32AppUserModelId,
|
||||
ArchitecturesAllowed: arch === 'ia32' ? '' : 'x64',
|
||||
ArchitecturesInstallIn64BitMode: arch === 'ia32' ? '' : 'x64',
|
||||
SourceDir: buildPath(arch),
|
||||
SourceDir: sourcePath,
|
||||
RepoDir: repoPath,
|
||||
OutputDir: setupDir(arch)
|
||||
OutputDir: outputPath,
|
||||
InstallTarget: target,
|
||||
ProductJsonPath: productJsonPath
|
||||
};
|
||||
|
||||
packageInnoSetup(issPath, { definitions }, cb);
|
||||
};
|
||||
}
|
||||
|
||||
gulp.task('clean-vscode-win32-ia32-setup', util.rimraf(setupDir('ia32')));
|
||||
gulp.task('vscode-win32-ia32-setup', ['clean-vscode-win32-ia32-setup'], buildWin32Setup('ia32'));
|
||||
function defineWin32SetupTasks(arch, target) {
|
||||
gulp.task(`clean-vscode-win32-${arch}-${target}-setup`, util.rimraf(setupDir(arch, target)));
|
||||
gulp.task(`vscode-win32-${arch}-${target}-setup`, [`clean-vscode-win32-${arch}-${target}-setup`], buildWin32Setup(arch, target));
|
||||
}
|
||||
|
||||
gulp.task('clean-vscode-win32-x64-setup', util.rimraf(setupDir('x64')));
|
||||
gulp.task('vscode-win32-x64-setup', ['clean-vscode-win32-x64-setup'], buildWin32Setup('x64'));
|
||||
defineWin32SetupTasks('ia32', 'system');
|
||||
defineWin32SetupTasks('x64', 'system');
|
||||
defineWin32SetupTasks('ia32', 'user');
|
||||
defineWin32SetupTasks('x64', 'user');
|
||||
|
||||
function archiveWin32Setup(arch) {
|
||||
return cb => {
|
||||
const args = ['a', '-tzip', zipPath(arch), '.', '-r'];
|
||||
const args = ['a', '-tzip', zipPath(arch), '-x!CodeSignSummary*.md', '.', '-r'];
|
||||
|
||||
cp.spawn(_7z, args, { stdio: 'inherit', cwd: buildPath(arch) })
|
||||
.on('error', cb)
|
||||
|
||||
@@ -17,7 +17,6 @@ const ext = require('./extensions');
|
||||
const util = require('gulp-util');
|
||||
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const builtInExtensions = require('../builtInExtensions.json');
|
||||
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
||||
|
||||
|
||||
@@ -18,18 +18,21 @@ var _ = require("underscore");
|
||||
var monacodts = require("../monaco/api");
|
||||
var fs = require("fs");
|
||||
var reporter = reporter_1.createReporter();
|
||||
var rootDir = path.join(__dirname, '../../src');
|
||||
var options = require('../../src/tsconfig.json').compilerOptions;
|
||||
options.verbose = false;
|
||||
options.sourceMap = true;
|
||||
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
||||
options.sourceMap = false;
|
||||
function getTypeScriptCompilerOptions(src) {
|
||||
var rootDir = path.join(__dirname, "../../" + src);
|
||||
var options = require("../../" + src + "/tsconfig.json").compilerOptions;
|
||||
options.verbose = false;
|
||||
options.sourceMap = true;
|
||||
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
||||
options.sourceMap = false;
|
||||
}
|
||||
options.rootDir = rootDir;
|
||||
options.sourceRoot = util.toFileUri(rootDir);
|
||||
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
|
||||
return options;
|
||||
}
|
||||
options.rootDir = rootDir;
|
||||
options.sourceRoot = util.toFileUri(rootDir);
|
||||
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
|
||||
function createCompile(build, emitError) {
|
||||
var opts = _.clone(options);
|
||||
function createCompile(src, build, emitError) {
|
||||
var opts = _.clone(getTypeScriptCompilerOptions(src));
|
||||
opts.inlineSources = !!build;
|
||||
opts.noFilesystemLookup = true;
|
||||
var ts = tsb.create(opts, null, null, function (err) { return reporter(err.toString()); });
|
||||
@@ -51,31 +54,31 @@ function createCompile(build, emitError) {
|
||||
.pipe(sourcemaps.write('.', {
|
||||
addComment: false,
|
||||
includeContent: !!build,
|
||||
sourceRoot: options.sourceRoot
|
||||
sourceRoot: opts.sourceRoot
|
||||
}))
|
||||
.pipe(tsFilter.restore)
|
||||
.pipe(reporter.end(emitError));
|
||||
return es.duplex(input, output);
|
||||
};
|
||||
}
|
||||
function compileTask(out, build) {
|
||||
function compileTask(src, out, build) {
|
||||
return function () {
|
||||
var compile = createCompile(build, true);
|
||||
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
|
||||
var compile = createCompile(src, build, true);
|
||||
var srcPipe = es.merge(gulp.src(src + "/**", { base: "" + src }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
|
||||
return src
|
||||
return srcPipe
|
||||
.pipe(compile())
|
||||
.pipe(dtsFilter)
|
||||
.pipe(gulp.dest(out))
|
||||
.pipe(dtsFilter.restore)
|
||||
.pipe(monacodtsTask(out, false));
|
||||
.pipe(src !== 'src' ? es.through() : monacodtsTask(out, false));
|
||||
};
|
||||
}
|
||||
exports.compileTask = compileTask;
|
||||
function watchTask(out, build) {
|
||||
return function () {
|
||||
var compile = createCompile(build);
|
||||
var compile = createCompile('src', build);
|
||||
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
|
||||
var watchSrc = watch('src/**', { base: 'src' });
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
@@ -122,6 +125,7 @@ function monacodtsTask(out, isWatch) {
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
}
|
||||
else {
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,19 +21,22 @@ import * as fs from 'fs';
|
||||
|
||||
const reporter = createReporter();
|
||||
|
||||
const rootDir = path.join(__dirname, '../../src');
|
||||
const options = require('../../src/tsconfig.json').compilerOptions;
|
||||
options.verbose = false;
|
||||
options.sourceMap = true;
|
||||
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
||||
options.sourceMap = false;
|
||||
function getTypeScriptCompilerOptions(src: string) {
|
||||
const rootDir = path.join(__dirname, `../../${src}`);
|
||||
const options = require(`../../${src}/tsconfig.json`).compilerOptions;
|
||||
options.verbose = false;
|
||||
options.sourceMap = true;
|
||||
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
||||
options.sourceMap = false;
|
||||
}
|
||||
options.rootDir = rootDir;
|
||||
options.sourceRoot = util.toFileUri(rootDir);
|
||||
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
|
||||
return options;
|
||||
}
|
||||
options.rootDir = rootDir;
|
||||
options.sourceRoot = util.toFileUri(rootDir);
|
||||
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
|
||||
|
||||
function createCompile(build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream {
|
||||
const opts = _.clone(options);
|
||||
function createCompile(src: string, build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream {
|
||||
const opts = _.clone(getTypeScriptCompilerOptions(src));
|
||||
opts.inlineSources = !!build;
|
||||
opts.noFilesystemLookup = true;
|
||||
|
||||
@@ -59,7 +62,7 @@ function createCompile(build: boolean, emitError?: boolean): (token?: util.ICanc
|
||||
.pipe(sourcemaps.write('.', {
|
||||
addComment: false,
|
||||
includeContent: !!build,
|
||||
sourceRoot: options.sourceRoot
|
||||
sourceRoot: opts.sourceRoot
|
||||
}))
|
||||
.pipe(tsFilter.restore)
|
||||
.pipe(reporter.end(emitError));
|
||||
@@ -68,32 +71,32 @@ function createCompile(build: boolean, emitError?: boolean): (token?: util.ICanc
|
||||
};
|
||||
}
|
||||
|
||||
export function compileTask(out: string, build: boolean): () => NodeJS.ReadWriteStream {
|
||||
export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream {
|
||||
|
||||
return function () {
|
||||
const compile = createCompile(build, true);
|
||||
const compile = createCompile(src, build, true);
|
||||
|
||||
const src = es.merge(
|
||||
gulp.src('src/**', { base: 'src' }),
|
||||
const srcPipe = es.merge(
|
||||
gulp.src(`${src}/**`, { base: `${src}` }),
|
||||
gulp.src('node_modules/typescript/lib/lib.d.ts'),
|
||||
);
|
||||
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
|
||||
|
||||
return src
|
||||
return srcPipe
|
||||
.pipe(compile())
|
||||
.pipe(dtsFilter)
|
||||
.pipe(gulp.dest(out))
|
||||
.pipe(dtsFilter.restore)
|
||||
.pipe(monacodtsTask(out, false));
|
||||
.pipe(src !== 'src' ? es.through() : monacodtsTask(out, false));
|
||||
};
|
||||
}
|
||||
|
||||
export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteStream {
|
||||
|
||||
return function () {
|
||||
const compile = createCompile(build);
|
||||
const compile = createCompile('src', build);
|
||||
|
||||
const src = es.merge(
|
||||
gulp.src('src/**', { base: 'src' }),
|
||||
@@ -150,6 +153,7 @@ function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
|
||||
if (isWatch) {
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
} else {
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
|
||||
}
|
||||
}
|
||||
|
||||
@@ -44,6 +44,7 @@ function error(err) {
|
||||
var baseHeaders = {
|
||||
'X-Market-Client-Id': 'VSCode Build',
|
||||
'User-Agent': 'VSCode Build',
|
||||
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
|
||||
};
|
||||
function fromMarketplace(extensionName, version) {
|
||||
var filterType = 7;
|
||||
|
||||
@@ -49,6 +49,7 @@ function error(err: any): Stream {
|
||||
const baseHeaders = {
|
||||
'X-Market-Client-Id': 'VSCode Build',
|
||||
'User-Agent': 'VSCode Build',
|
||||
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
|
||||
};
|
||||
|
||||
export function fromMarketplace(extensionName: string, version: string): Stream {
|
||||
|
||||
@@ -34,6 +34,10 @@
|
||||
"name": "vs/workbench/parts/codeEditor",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/comments",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/debug",
|
||||
"project": "vscode-workbench"
|
||||
@@ -74,6 +78,10 @@
|
||||
"name": "vs/workbench/parts/logs",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/navigation",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/output",
|
||||
"project": "vscode-workbench"
|
||||
@@ -146,10 +154,18 @@
|
||||
"name": "vs/workbench/parts/welcome",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/outline",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/actions",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/bulkEdit",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/configuration",
|
||||
"project": "vscode-workbench"
|
||||
@@ -219,4 +235,4 @@
|
||||
"project": "vscode-preferences"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,7 +17,6 @@ var concat = require("gulp-concat");
|
||||
var VinylFile = require("vinyl");
|
||||
var bundle = require("./bundle");
|
||||
var util = require("./util");
|
||||
var i18n = require("./i18n");
|
||||
var gulpUtil = require("gulp-util");
|
||||
var flatmap = require("gulp-flatmap");
|
||||
var pump = require("pump");
|
||||
@@ -40,19 +39,19 @@ function loaderConfig(emptyPaths) {
|
||||
}
|
||||
exports.loaderConfig = loaderConfig;
|
||||
var IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i;
|
||||
function loader(bundledFileHeader, bundleLoader) {
|
||||
function loader(src, bundledFileHeader, bundleLoader) {
|
||||
var sources = [
|
||||
'out-build/vs/loader.js'
|
||||
src + "/vs/loader.js"
|
||||
];
|
||||
if (bundleLoader) {
|
||||
sources = sources.concat([
|
||||
'out-build/vs/css.js',
|
||||
'out-build/vs/nls.js'
|
||||
src + "/vs/css.js",
|
||||
src + "/vs/nls.js"
|
||||
]);
|
||||
}
|
||||
var isFirst = true;
|
||||
return (gulp
|
||||
.src(sources, { base: 'out-build' })
|
||||
.src(sources, { base: "" + src })
|
||||
.pipe(es.through(function (data) {
|
||||
if (isFirst) {
|
||||
isFirst = false;
|
||||
@@ -74,7 +73,7 @@ function loader(bundledFileHeader, bundleLoader) {
|
||||
return f;
|
||||
})));
|
||||
}
|
||||
function toConcatStream(bundledFileHeader, sources, dest) {
|
||||
function toConcatStream(src, bundledFileHeader, sources, dest) {
|
||||
var useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
|
||||
// If a bundle ends up including in any of the sources our copyright, then
|
||||
// insert a fake source at the beginning of each bundle with our copyright
|
||||
@@ -94,7 +93,7 @@ function toConcatStream(bundledFileHeader, sources, dest) {
|
||||
}
|
||||
var treatedSources = sources.map(function (source) {
|
||||
var root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
|
||||
var base = source.path ? root + '/out-build' : '';
|
||||
var base = source.path ? root + ("/" + src) : '';
|
||||
return new VinylFile({
|
||||
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
|
||||
base: base,
|
||||
@@ -105,12 +104,13 @@ function toConcatStream(bundledFileHeader, sources, dest) {
|
||||
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
|
||||
.pipe(concat(dest));
|
||||
}
|
||||
function toBundleStream(bundledFileHeader, bundles) {
|
||||
function toBundleStream(src, bundledFileHeader, bundles) {
|
||||
return es.merge(bundles.map(function (bundle) {
|
||||
return toConcatStream(bundledFileHeader, bundle.sources, bundle.dest);
|
||||
return toConcatStream(src, bundledFileHeader, bundle.sources, bundle.dest);
|
||||
}));
|
||||
}
|
||||
function optimizeTask(opts) {
|
||||
var src = opts.src;
|
||||
var entryPoints = opts.entryPoints;
|
||||
var otherSources = opts.otherSources;
|
||||
var resources = opts.resources;
|
||||
@@ -126,7 +126,7 @@ function optimizeTask(opts) {
|
||||
if (err) {
|
||||
return bundlesStream.emit('error', JSON.stringify(err));
|
||||
}
|
||||
toBundleStream(bundledFileHeader, result.files).pipe(bundlesStream);
|
||||
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
|
||||
// Remove css inlined resources
|
||||
var filteredResources = resources.slice();
|
||||
result.cssInlinedResources.forEach(function (resource) {
|
||||
@@ -135,7 +135,7 @@ function optimizeTask(opts) {
|
||||
}
|
||||
filteredResources.push('!' + resource);
|
||||
});
|
||||
gulp.src(filteredResources, { base: 'out-build' }).pipe(resourcesStream);
|
||||
gulp.src(filteredResources, { base: "" + src }).pipe(resourcesStream);
|
||||
var bundleInfoArray = [];
|
||||
if (opts.bundleInfo) {
|
||||
bundleInfoArray.push(new VinylFile({
|
||||
@@ -148,9 +148,9 @@ function optimizeTask(opts) {
|
||||
});
|
||||
var otherSourcesStream = es.through();
|
||||
var otherSourcesStreamArr = [];
|
||||
gulp.src(otherSources, { base: 'out-build' })
|
||||
gulp.src(otherSources, { base: "" + src })
|
||||
.pipe(es.through(function (data) {
|
||||
otherSourcesStreamArr.push(toConcatStream(bundledFileHeader, [data], data.relative));
|
||||
otherSourcesStreamArr.push(toConcatStream(src, bundledFileHeader, [data], data.relative));
|
||||
}, function () {
|
||||
if (!otherSourcesStreamArr.length) {
|
||||
setTimeout(function () { otherSourcesStream.emit('end'); }, 0);
|
||||
@@ -159,16 +159,12 @@ function optimizeTask(opts) {
|
||||
es.merge(otherSourcesStreamArr).pipe(otherSourcesStream);
|
||||
}
|
||||
}));
|
||||
var result = es.merge(loader(bundledFileHeader, bundleLoader), bundlesStream, otherSourcesStream, resourcesStream, bundleInfoStream);
|
||||
var result = es.merge(loader(src, bundledFileHeader, bundleLoader), bundlesStream, otherSourcesStream, resourcesStream, bundleInfoStream);
|
||||
return result
|
||||
.pipe(sourcemaps.write('./', {
|
||||
sourceRoot: null,
|
||||
addComment: true,
|
||||
includeContent: true
|
||||
}))
|
||||
.pipe(i18n.processNlsFiles({
|
||||
fileHeader: bundledFileHeader,
|
||||
languages: opts.languages
|
||||
}))
|
||||
.pipe(gulp.dest(out));
|
||||
};
|
||||
|
||||
@@ -18,11 +18,11 @@ import * as concat from 'gulp-concat';
|
||||
import * as VinylFile from 'vinyl';
|
||||
import * as bundle from './bundle';
|
||||
import * as util from './util';
|
||||
import * as i18n from './i18n';
|
||||
import * as gulpUtil from 'gulp-util';
|
||||
import * as flatmap from 'gulp-flatmap';
|
||||
import * as pump from 'pump';
|
||||
import * as sm from 'source-map';
|
||||
import { Language } from './i18n';
|
||||
|
||||
const REPO_ROOT_PATH = path.join(__dirname, '../..');
|
||||
|
||||
@@ -52,21 +52,21 @@ declare class FileSourceMap extends VinylFile {
|
||||
public sourceMap: sm.RawSourceMap;
|
||||
}
|
||||
|
||||
function loader(bundledFileHeader: string, bundleLoader: boolean): NodeJS.ReadWriteStream {
|
||||
function loader(src: string, bundledFileHeader: string, bundleLoader: boolean): NodeJS.ReadWriteStream {
|
||||
let sources = [
|
||||
'out-build/vs/loader.js'
|
||||
`${src}/vs/loader.js`
|
||||
];
|
||||
if (bundleLoader) {
|
||||
sources = sources.concat([
|
||||
'out-build/vs/css.js',
|
||||
'out-build/vs/nls.js'
|
||||
`${src}/vs/css.js`,
|
||||
`${src}/vs/nls.js`
|
||||
]);
|
||||
}
|
||||
|
||||
let isFirst = true;
|
||||
return (
|
||||
gulp
|
||||
.src(sources, { base: 'out-build' })
|
||||
.src(sources, { base: `${src}` })
|
||||
.pipe(es.through(function (data) {
|
||||
if (isFirst) {
|
||||
isFirst = false;
|
||||
@@ -89,7 +89,7 @@ function loader(bundledFileHeader: string, bundleLoader: boolean): NodeJS.ReadWr
|
||||
);
|
||||
}
|
||||
|
||||
function toConcatStream(bundledFileHeader: string, sources: bundle.IFile[], dest: string): NodeJS.ReadWriteStream {
|
||||
function toConcatStream(src: string, bundledFileHeader: string, sources: bundle.IFile[], dest: string): NodeJS.ReadWriteStream {
|
||||
const useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
|
||||
|
||||
// If a bundle ends up including in any of the sources our copyright, then
|
||||
@@ -112,7 +112,7 @@ function toConcatStream(bundledFileHeader: string, sources: bundle.IFile[], dest
|
||||
|
||||
const treatedSources = sources.map(function (source) {
|
||||
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
|
||||
const base = source.path ? root + '/out-build' : '';
|
||||
const base = source.path ? root + `/${src}` : '';
|
||||
|
||||
return new VinylFile({
|
||||
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
|
||||
@@ -126,13 +126,17 @@ function toConcatStream(bundledFileHeader: string, sources: bundle.IFile[], dest
|
||||
.pipe(concat(dest));
|
||||
}
|
||||
|
||||
function toBundleStream(bundledFileHeader: string, bundles: bundle.IConcatFile[]): NodeJS.ReadWriteStream {
|
||||
function toBundleStream(src:string, bundledFileHeader: string, bundles: bundle.IConcatFile[]): NodeJS.ReadWriteStream {
|
||||
return es.merge(bundles.map(function (bundle) {
|
||||
return toConcatStream(bundledFileHeader, bundle.sources, bundle.dest);
|
||||
return toConcatStream(src, bundledFileHeader, bundle.sources, bundle.dest);
|
||||
}));
|
||||
}
|
||||
|
||||
export interface IOptimizeTaskOpts {
|
||||
/**
|
||||
* The folder to read files from.
|
||||
*/
|
||||
src: string;
|
||||
/**
|
||||
* (for AMD files, will get bundled and get Copyright treatment)
|
||||
*/
|
||||
@@ -163,11 +167,13 @@ export interface IOptimizeTaskOpts {
|
||||
*/
|
||||
out: string;
|
||||
/**
|
||||
* (languages to process)
|
||||
* (out folder name)
|
||||
*/
|
||||
languages: i18n.Language[];
|
||||
languages?: Language[];
|
||||
}
|
||||
|
||||
export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStream {
|
||||
const src = opts.src;
|
||||
const entryPoints = opts.entryPoints;
|
||||
const otherSources = opts.otherSources;
|
||||
const resources = opts.resources;
|
||||
@@ -184,7 +190,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
|
||||
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
|
||||
if (err) { return bundlesStream.emit('error', JSON.stringify(err)); }
|
||||
|
||||
toBundleStream(bundledFileHeader, result.files).pipe(bundlesStream);
|
||||
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
|
||||
|
||||
// Remove css inlined resources
|
||||
const filteredResources = resources.slice();
|
||||
@@ -194,7 +200,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
|
||||
}
|
||||
filteredResources.push('!' + resource);
|
||||
});
|
||||
gulp.src(filteredResources, { base: 'out-build' }).pipe(resourcesStream);
|
||||
gulp.src(filteredResources, { base: `${src}` }).pipe(resourcesStream);
|
||||
|
||||
const bundleInfoArray: VinylFile[] = [];
|
||||
if (opts.bundleInfo) {
|
||||
@@ -210,9 +216,9 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
|
||||
const otherSourcesStream = es.through();
|
||||
const otherSourcesStreamArr: NodeJS.ReadWriteStream[] = [];
|
||||
|
||||
gulp.src(otherSources, { base: 'out-build' })
|
||||
gulp.src(otherSources, { base: `${src}` })
|
||||
.pipe(es.through(function (data) {
|
||||
otherSourcesStreamArr.push(toConcatStream(bundledFileHeader, [data], data.relative));
|
||||
otherSourcesStreamArr.push(toConcatStream(src, bundledFileHeader, [data], data.relative));
|
||||
}, function () {
|
||||
if (!otherSourcesStreamArr.length) {
|
||||
setTimeout(function () { otherSourcesStream.emit('end'); }, 0);
|
||||
@@ -222,7 +228,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
|
||||
}));
|
||||
|
||||
const result = es.merge(
|
||||
loader(bundledFileHeader, bundleLoader),
|
||||
loader(src, bundledFileHeader, bundleLoader),
|
||||
bundlesStream,
|
||||
otherSourcesStream,
|
||||
resourcesStream,
|
||||
@@ -235,10 +241,6 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
|
||||
addComment: true,
|
||||
includeContent: true
|
||||
}))
|
||||
.pipe(i18n.processNlsFiles({
|
||||
fileHeader: bundledFileHeader,
|
||||
languages: opts.languages
|
||||
}))
|
||||
.pipe(gulp.dest(out));
|
||||
};
|
||||
}
|
||||
|
||||
@@ -7,9 +7,93 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var ts = require("typescript");
|
||||
var fs = require("fs");
|
||||
var path = require("path");
|
||||
var tss = require("./treeshaking");
|
||||
var REPO_ROOT = path.join(__dirname, '../../');
|
||||
var SRC_DIR = path.join(REPO_ROOT, 'src');
|
||||
var OUT_EDITOR = path.join(REPO_ROOT, 'out-editor');
|
||||
var dirCache = {};
|
||||
function writeFile(filePath, contents) {
|
||||
function ensureDirs(dirPath) {
|
||||
if (dirCache[dirPath]) {
|
||||
return;
|
||||
}
|
||||
dirCache[dirPath] = true;
|
||||
ensureDirs(path.dirname(dirPath));
|
||||
if (fs.existsSync(dirPath)) {
|
||||
return;
|
||||
}
|
||||
fs.mkdirSync(dirPath);
|
||||
}
|
||||
ensureDirs(path.dirname(filePath));
|
||||
fs.writeFileSync(filePath, contents);
|
||||
}
|
||||
function extractEditor(options) {
|
||||
var result = tss.shake(options);
|
||||
for (var fileName in result) {
|
||||
if (result.hasOwnProperty(fileName)) {
|
||||
writeFile(path.join(options.destRoot, fileName), result[fileName]);
|
||||
}
|
||||
}
|
||||
var copied = {};
|
||||
var copyFile = function (fileName) {
|
||||
if (copied[fileName]) {
|
||||
return;
|
||||
}
|
||||
copied[fileName] = true;
|
||||
var srcPath = path.join(options.sourcesRoot, fileName);
|
||||
var dstPath = path.join(options.destRoot, fileName);
|
||||
writeFile(dstPath, fs.readFileSync(srcPath));
|
||||
};
|
||||
var writeOutputFile = function (fileName, contents) {
|
||||
writeFile(path.join(options.destRoot, fileName), contents);
|
||||
};
|
||||
for (var fileName in result) {
|
||||
if (result.hasOwnProperty(fileName)) {
|
||||
var fileContents = result[fileName];
|
||||
var info = ts.preProcessFile(fileContents);
|
||||
for (var i = info.importedFiles.length - 1; i >= 0; i--) {
|
||||
var importedFileName = info.importedFiles[i].fileName;
|
||||
var importedFilePath = void 0;
|
||||
if (/^vs\/css!/.test(importedFileName)) {
|
||||
importedFilePath = importedFileName.substr('vs/css!'.length) + '.css';
|
||||
}
|
||||
else {
|
||||
importedFilePath = importedFileName;
|
||||
}
|
||||
if (/(^\.\/)|(^\.\.\/)/.test(importedFilePath)) {
|
||||
importedFilePath = path.join(path.dirname(fileName), importedFilePath);
|
||||
}
|
||||
if (/\.css$/.test(importedFilePath)) {
|
||||
transportCSS(importedFilePath, copyFile, writeOutputFile);
|
||||
}
|
||||
else {
|
||||
if (fs.existsSync(path.join(options.sourcesRoot, importedFilePath + '.js'))) {
|
||||
copyFile(importedFilePath + '.js');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
var tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.json')).toString());
|
||||
tsConfig.compilerOptions.noUnusedLocals = false;
|
||||
writeOutputFile('tsconfig.json', JSON.stringify(tsConfig, null, '\t'));
|
||||
[
|
||||
'vs/css.build.js',
|
||||
'vs/css.d.ts',
|
||||
'vs/css.js',
|
||||
'vs/loader.js',
|
||||
'vs/monaco.d.ts',
|
||||
'vs/nls.build.js',
|
||||
'vs/nls.d.ts',
|
||||
'vs/nls.js',
|
||||
'vs/nls.mock.ts',
|
||||
'typings/lib.ie11_safe_es6.d.ts',
|
||||
'typings/thenable.d.ts',
|
||||
'typings/es6-promise.d.ts',
|
||||
'typings/require.d.ts',
|
||||
].forEach(copyFile);
|
||||
}
|
||||
exports.extractEditor = extractEditor;
|
||||
function createESMSourcesAndResources(options) {
|
||||
var OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
|
||||
var OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
|
||||
@@ -94,7 +178,7 @@ function createESMSourcesAndResources(options) {
|
||||
options.entryPoints.forEach(function (entryPoint) { return enqueue(entryPoint); });
|
||||
while (queue.length > 0) {
|
||||
var module_1 = queue.shift();
|
||||
if (transportCSS(options, module_1, enqueue, write)) {
|
||||
if (transportCSS(module_1, enqueue, write)) {
|
||||
continue;
|
||||
}
|
||||
if (transportResource(options, module_1, enqueue, write)) {
|
||||
@@ -171,7 +255,7 @@ function createESMSourcesAndResources(options) {
|
||||
fs.writeFileSync(path.join(OUT_FOLDER, 'vs/monaco.d.ts'), monacodts);
|
||||
}
|
||||
exports.createESMSourcesAndResources = createESMSourcesAndResources;
|
||||
function transportCSS(options, module, enqueue, write) {
|
||||
function transportCSS(module, enqueue, write) {
|
||||
if (!/\.css/.test(module)) {
|
||||
return false;
|
||||
}
|
||||
@@ -179,10 +263,10 @@ function transportCSS(options, module, enqueue, write) {
|
||||
var fileContents = fs.readFileSync(filename).toString();
|
||||
var inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
|
||||
var inlineResourcesLimit = 300000; //3000; // see https://github.com/Microsoft/monaco-editor/issues/336
|
||||
var newContents = _rewriteOrInlineUrls(filename, fileContents, inlineResources === 'base64', inlineResourcesLimit);
|
||||
var newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
|
||||
write(module, newContents);
|
||||
return true;
|
||||
function _rewriteOrInlineUrls(originalFileFSPath, contents, forceBase64, inlineByteLimit) {
|
||||
function _rewriteOrInlineUrls(contents, forceBase64, inlineByteLimit) {
|
||||
return _replaceURL(contents, function (url) {
|
||||
var imagePath = path.join(path.dirname(module), url);
|
||||
var fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
|
||||
|
||||
@@ -6,11 +6,101 @@
|
||||
import * as ts from 'typescript';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as tss from './treeshaking';
|
||||
|
||||
const REPO_ROOT = path.join(__dirname, '../../');
|
||||
const SRC_DIR = path.join(REPO_ROOT, 'src');
|
||||
const OUT_EDITOR = path.join(REPO_ROOT, 'out-editor');
|
||||
|
||||
let dirCache: { [dir: string]: boolean; } = {};
|
||||
|
||||
function writeFile(filePath: string, contents: Buffer | string): void {
|
||||
function ensureDirs(dirPath: string): void {
|
||||
if (dirCache[dirPath]) {
|
||||
return;
|
||||
}
|
||||
dirCache[dirPath] = true;
|
||||
|
||||
ensureDirs(path.dirname(dirPath));
|
||||
if (fs.existsSync(dirPath)) {
|
||||
return;
|
||||
}
|
||||
fs.mkdirSync(dirPath);
|
||||
}
|
||||
ensureDirs(path.dirname(filePath));
|
||||
fs.writeFileSync(filePath, contents);
|
||||
}
|
||||
|
||||
export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: string }): void {
|
||||
let result = tss.shake(options);
|
||||
for (let fileName in result) {
|
||||
if (result.hasOwnProperty(fileName)) {
|
||||
writeFile(path.join(options.destRoot, fileName), result[fileName]);
|
||||
}
|
||||
}
|
||||
let copied: { [fileName:string]: boolean; } = {};
|
||||
const copyFile = (fileName: string) => {
|
||||
if (copied[fileName]) {
|
||||
return;
|
||||
}
|
||||
copied[fileName] = true;
|
||||
const srcPath = path.join(options.sourcesRoot, fileName);
|
||||
const dstPath = path.join(options.destRoot, fileName);
|
||||
writeFile(dstPath, fs.readFileSync(srcPath));
|
||||
};
|
||||
const writeOutputFile = (fileName: string, contents: string) => {
|
||||
writeFile(path.join(options.destRoot, fileName), contents);
|
||||
};
|
||||
for (let fileName in result) {
|
||||
if (result.hasOwnProperty(fileName)) {
|
||||
const fileContents = result[fileName];
|
||||
const info = ts.preProcessFile(fileContents);
|
||||
|
||||
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
|
||||
const importedFileName = info.importedFiles[i].fileName;
|
||||
|
||||
let importedFilePath: string;
|
||||
if (/^vs\/css!/.test(importedFileName)) {
|
||||
importedFilePath = importedFileName.substr('vs/css!'.length) + '.css';
|
||||
} else {
|
||||
importedFilePath = importedFileName;
|
||||
}
|
||||
if (/(^\.\/)|(^\.\.\/)/.test(importedFilePath)) {
|
||||
importedFilePath = path.join(path.dirname(fileName), importedFilePath);
|
||||
}
|
||||
|
||||
if (/\.css$/.test(importedFilePath)) {
|
||||
transportCSS(importedFilePath, copyFile, writeOutputFile);
|
||||
} else {
|
||||
if (fs.existsSync(path.join(options.sourcesRoot, importedFilePath + '.js'))) {
|
||||
copyFile(importedFilePath + '.js');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.json')).toString());
|
||||
tsConfig.compilerOptions.noUnusedLocals = false;
|
||||
writeOutputFile('tsconfig.json', JSON.stringify(tsConfig, null, '\t'));
|
||||
|
||||
[
|
||||
'vs/css.build.js',
|
||||
'vs/css.d.ts',
|
||||
'vs/css.js',
|
||||
'vs/loader.js',
|
||||
'vs/monaco.d.ts',
|
||||
'vs/nls.build.js',
|
||||
'vs/nls.d.ts',
|
||||
'vs/nls.js',
|
||||
'vs/nls.mock.ts',
|
||||
'typings/lib.ie11_safe_es6.d.ts',
|
||||
'typings/thenable.d.ts',
|
||||
'typings/es6-promise.d.ts',
|
||||
'typings/require.d.ts',
|
||||
].forEach(copyFile);
|
||||
}
|
||||
|
||||
export interface IOptions {
|
||||
entryPoints: string[];
|
||||
outFolder: string;
|
||||
@@ -111,7 +201,7 @@ export function createESMSourcesAndResources(options: IOptions): void {
|
||||
|
||||
while (queue.length > 0) {
|
||||
const module = queue.shift();
|
||||
if (transportCSS(options, module, enqueue, write)) {
|
||||
if (transportCSS(module, enqueue, write)) {
|
||||
continue;
|
||||
}
|
||||
if (transportResource(options, module, enqueue, write)) {
|
||||
@@ -198,7 +288,7 @@ export function createESMSourcesAndResources(options: IOptions): void {
|
||||
|
||||
}
|
||||
|
||||
function transportCSS(options: IOptions, module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
|
||||
function transportCSS(module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
|
||||
|
||||
if (!/\.css/.test(module)) {
|
||||
return false;
|
||||
@@ -209,11 +299,11 @@ function transportCSS(options: IOptions, module: string, enqueue: (module: strin
|
||||
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
|
||||
const inlineResourcesLimit = 300000;//3000; // see https://github.com/Microsoft/monaco-editor/issues/336
|
||||
|
||||
const newContents = _rewriteOrInlineUrls(filename, fileContents, inlineResources === 'base64', inlineResourcesLimit);
|
||||
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
|
||||
write(module, newContents);
|
||||
return true;
|
||||
|
||||
function _rewriteOrInlineUrls(originalFileFSPath: string, contents: string, forceBase64: boolean, inlineByteLimit: number): string {
|
||||
function _rewriteOrInlineUrls(contents: string, forceBase64: boolean, inlineByteLimit: number): string {
|
||||
return _replaceURL(contents, (url) => {
|
||||
let imagePath = path.join(path.dirname(module), url);
|
||||
let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var assert = require("assert");
|
||||
var util = require("../util");
|
||||
function getMockTagExists(tags) {
|
||||
return function (tag) { return tags.indexOf(tag) >= 0; };
|
||||
}
|
||||
suite('util tests', function () {
|
||||
test('getPreviousVersion - patch', function () {
|
||||
assert.equal(util.getPreviousVersion('1.2.3', getMockTagExists(['1.2.2', '1.2.1', '1.2.0', '1.1.0'])), '1.2.2');
|
||||
});
|
||||
test('getPreviousVersion - patch invalid', function () {
|
||||
try {
|
||||
util.getPreviousVersion('1.2.2', getMockTagExists(['1.2.0', '1.1.0']));
|
||||
}
|
||||
catch (e) {
|
||||
// expected
|
||||
return;
|
||||
}
|
||||
throw new Error('Expected an exception');
|
||||
});
|
||||
test('getPreviousVersion - minor', function () {
|
||||
assert.equal(util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.1.1', '1.1.2', '1.1.3'])), '1.1.3');
|
||||
assert.equal(util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.0.0'])), '1.1.0');
|
||||
});
|
||||
test('getPreviousVersion - minor gap', function () {
|
||||
assert.equal(util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.1.1', '1.1.3'])), '1.1.1');
|
||||
});
|
||||
test('getPreviousVersion - minor invalid', function () {
|
||||
try {
|
||||
util.getPreviousVersion('1.2.0', getMockTagExists(['1.0.0']));
|
||||
}
|
||||
catch (e) {
|
||||
// expected
|
||||
return;
|
||||
}
|
||||
throw new Error('Expected an exception');
|
||||
});
|
||||
test('getPreviousVersion - major', function () {
|
||||
assert.equal(util.getPreviousVersion('2.0.0', getMockTagExists(['1.0.0', '1.1.0', '1.2.0', '1.2.1', '1.2.2'])), '1.2.2');
|
||||
});
|
||||
test('getPreviousVersion - major invalid', function () {
|
||||
try {
|
||||
util.getPreviousVersion('3.0.0', getMockTagExists(['1.0.0']));
|
||||
}
|
||||
catch (e) {
|
||||
// expected
|
||||
return;
|
||||
}
|
||||
throw new Error('Expected an exception');
|
||||
});
|
||||
});
|
||||
@@ -1,79 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import assert = require('assert');
|
||||
import util = require('../util');
|
||||
|
||||
function getMockTagExists(tags: string[]) {
|
||||
return (tag: string) => tags.indexOf(tag) >= 0;
|
||||
}
|
||||
|
||||
suite('util tests', () => {
|
||||
test('getPreviousVersion - patch', () => {
|
||||
assert.equal(
|
||||
util.getPreviousVersion('1.2.3', getMockTagExists(['1.2.2', '1.2.1', '1.2.0', '1.1.0'])),
|
||||
'1.2.2'
|
||||
);
|
||||
});
|
||||
|
||||
test('getPreviousVersion - patch invalid', () => {
|
||||
try {
|
||||
util.getPreviousVersion('1.2.2', getMockTagExists(['1.2.0', '1.1.0']));
|
||||
} catch (e) {
|
||||
// expected
|
||||
return;
|
||||
}
|
||||
|
||||
throw new Error('Expected an exception');
|
||||
});
|
||||
|
||||
test('getPreviousVersion - minor', () => {
|
||||
assert.equal(
|
||||
util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.1.1', '1.1.2', '1.1.3'])),
|
||||
'1.1.3'
|
||||
);
|
||||
|
||||
assert.equal(
|
||||
util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.0.0'])),
|
||||
'1.1.0'
|
||||
);
|
||||
});
|
||||
|
||||
test('getPreviousVersion - minor gap', () => {
|
||||
assert.equal(
|
||||
util.getPreviousVersion('1.2.0', getMockTagExists(['1.1.0', '1.1.1', '1.1.3'])),
|
||||
'1.1.1'
|
||||
);
|
||||
});
|
||||
|
||||
test('getPreviousVersion - minor invalid', () => {
|
||||
try {
|
||||
util.getPreviousVersion('1.2.0', getMockTagExists(['1.0.0']));
|
||||
} catch (e) {
|
||||
// expected
|
||||
return;
|
||||
}
|
||||
|
||||
throw new Error('Expected an exception');
|
||||
});
|
||||
|
||||
test('getPreviousVersion - major', () => {
|
||||
assert.equal(
|
||||
util.getPreviousVersion('2.0.0', getMockTagExists(['1.0.0', '1.1.0', '1.2.0', '1.2.1', '1.2.2'])),
|
||||
'1.2.2'
|
||||
);
|
||||
});
|
||||
|
||||
test('getPreviousVersion - major invalid', () => {
|
||||
try {
|
||||
util.getPreviousVersion('3.0.0', getMockTagExists(['1.0.0']));
|
||||
} catch (e) {
|
||||
// expected
|
||||
return;
|
||||
}
|
||||
|
||||
throw new Error('Expected an exception');
|
||||
});
|
||||
});
|
||||
682
build/lib/treeshaking.js
Normal file
682
build/lib/treeshaking.js
Normal file
@@ -0,0 +1,682 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var fs = require("fs");
|
||||
var path = require("path");
|
||||
var ts = require("typescript");
|
||||
var TYPESCRIPT_LIB_FOLDER = path.dirname(require.resolve('typescript/lib/lib.d.ts'));
|
||||
var ShakeLevel;
|
||||
(function (ShakeLevel) {
|
||||
ShakeLevel[ShakeLevel["Files"] = 0] = "Files";
|
||||
ShakeLevel[ShakeLevel["InnerFile"] = 1] = "InnerFile";
|
||||
ShakeLevel[ShakeLevel["ClassMembers"] = 2] = "ClassMembers";
|
||||
})(ShakeLevel = exports.ShakeLevel || (exports.ShakeLevel = {}));
|
||||
function shake(options) {
|
||||
var languageService = createTypeScriptLanguageService(options);
|
||||
markNodes(languageService, options);
|
||||
return generateResult(languageService, options.shakeLevel);
|
||||
}
|
||||
exports.shake = shake;
|
||||
//#region Discovery, LanguageService & Setup
|
||||
function createTypeScriptLanguageService(options) {
|
||||
// Discover referenced files
|
||||
var FILES = discoverAndReadFiles(options);
|
||||
// Add fake usage files
|
||||
options.inlineEntryPoints.forEach(function (inlineEntryPoint, index) {
|
||||
FILES["inlineEntryPoint:" + index + ".ts"] = inlineEntryPoint;
|
||||
});
|
||||
// Resolve libs
|
||||
var RESOLVED_LIBS = {};
|
||||
options.libs.forEach(function (filename) {
|
||||
var filepath = path.join(TYPESCRIPT_LIB_FOLDER, filename);
|
||||
RESOLVED_LIBS["defaultLib:" + filename] = fs.readFileSync(filepath).toString();
|
||||
});
|
||||
var host = new TypeScriptLanguageServiceHost(RESOLVED_LIBS, FILES, options.compilerOptions);
|
||||
return ts.createLanguageService(host);
|
||||
}
|
||||
/**
|
||||
* Read imports and follow them until all files have been handled
|
||||
*/
|
||||
function discoverAndReadFiles(options) {
|
||||
var FILES = {};
|
||||
var in_queue = Object.create(null);
|
||||
var queue = [];
|
||||
var enqueue = function (moduleId) {
|
||||
if (in_queue[moduleId]) {
|
||||
return;
|
||||
}
|
||||
in_queue[moduleId] = true;
|
||||
queue.push(moduleId);
|
||||
};
|
||||
options.entryPoints.forEach(function (entryPoint) { return enqueue(entryPoint); });
|
||||
while (queue.length > 0) {
|
||||
var moduleId = queue.shift();
|
||||
var dts_filename = path.join(options.sourcesRoot, moduleId + '.d.ts');
|
||||
if (fs.existsSync(dts_filename)) {
|
||||
var dts_filecontents = fs.readFileSync(dts_filename).toString();
|
||||
FILES[moduleId + '.d.ts'] = dts_filecontents;
|
||||
continue;
|
||||
}
|
||||
var ts_filename = void 0;
|
||||
if (options.redirects[moduleId]) {
|
||||
ts_filename = path.join(options.sourcesRoot, options.redirects[moduleId] + '.ts');
|
||||
}
|
||||
else {
|
||||
ts_filename = path.join(options.sourcesRoot, moduleId + '.ts');
|
||||
}
|
||||
var ts_filecontents = fs.readFileSync(ts_filename).toString();
|
||||
var info = ts.preProcessFile(ts_filecontents);
|
||||
for (var i = info.importedFiles.length - 1; i >= 0; i--) {
|
||||
var importedFileName = info.importedFiles[i].fileName;
|
||||
if (options.importIgnorePattern.test(importedFileName)) {
|
||||
// Ignore vs/css! imports
|
||||
continue;
|
||||
}
|
||||
var importedModuleId = importedFileName;
|
||||
if (/(^\.\/)|(^\.\.\/)/.test(importedModuleId)) {
|
||||
importedModuleId = path.join(path.dirname(moduleId), importedModuleId);
|
||||
}
|
||||
enqueue(importedModuleId);
|
||||
}
|
||||
FILES[moduleId + '.ts'] = ts_filecontents;
|
||||
}
|
||||
return FILES;
|
||||
}
|
||||
/**
|
||||
* A TypeScript language service host
|
||||
*/
|
||||
var TypeScriptLanguageServiceHost = /** @class */ (function () {
|
||||
function TypeScriptLanguageServiceHost(libs, files, compilerOptions) {
|
||||
this._libs = libs;
|
||||
this._files = files;
|
||||
this._compilerOptions = compilerOptions;
|
||||
}
|
||||
// --- language service host ---------------
|
||||
TypeScriptLanguageServiceHost.prototype.getCompilationSettings = function () {
|
||||
return this._compilerOptions;
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getScriptFileNames = function () {
|
||||
return ([]
|
||||
.concat(Object.keys(this._libs))
|
||||
.concat(Object.keys(this._files)));
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getScriptVersion = function (fileName) {
|
||||
return '1';
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getProjectVersion = function () {
|
||||
return '1';
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getScriptSnapshot = function (fileName) {
|
||||
if (this._files.hasOwnProperty(fileName)) {
|
||||
return ts.ScriptSnapshot.fromString(this._files[fileName]);
|
||||
}
|
||||
else if (this._libs.hasOwnProperty(fileName)) {
|
||||
return ts.ScriptSnapshot.fromString(this._libs[fileName]);
|
||||
}
|
||||
else {
|
||||
return ts.ScriptSnapshot.fromString('');
|
||||
}
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getScriptKind = function (fileName) {
|
||||
return ts.ScriptKind.TS;
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getCurrentDirectory = function () {
|
||||
return '';
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getDefaultLibFileName = function (options) {
|
||||
return 'defaultLib:lib.d.ts';
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.isDefaultLibFileName = function (fileName) {
|
||||
return fileName === this.getDefaultLibFileName(this._compilerOptions);
|
||||
};
|
||||
return TypeScriptLanguageServiceHost;
|
||||
}());
|
||||
//#endregion
|
||||
//#region Tree Shaking
|
||||
var NodeColor;
|
||||
(function (NodeColor) {
|
||||
NodeColor[NodeColor["White"] = 0] = "White";
|
||||
NodeColor[NodeColor["Gray"] = 1] = "Gray";
|
||||
NodeColor[NodeColor["Black"] = 2] = "Black";
|
||||
})(NodeColor || (NodeColor = {}));
|
||||
function getColor(node) {
|
||||
return node.$$$color || 0 /* White */;
|
||||
}
|
||||
function setColor(node, color) {
|
||||
node.$$$color = color;
|
||||
}
|
||||
function nodeOrParentIsBlack(node) {
|
||||
while (node) {
|
||||
var color = getColor(node);
|
||||
if (color === 2 /* Black */) {
|
||||
return true;
|
||||
}
|
||||
node = node.parent;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function nodeOrChildIsBlack(node) {
|
||||
if (getColor(node) === 2 /* Black */) {
|
||||
return true;
|
||||
}
|
||||
for (var _i = 0, _a = node.getChildren(); _i < _a.length; _i++) {
|
||||
var child = _a[_i];
|
||||
if (nodeOrChildIsBlack(child)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function markNodes(languageService, options) {
|
||||
var program = languageService.getProgram();
|
||||
if (options.shakeLevel === 0 /* Files */) {
|
||||
// Mark all source files Black
|
||||
program.getSourceFiles().forEach(function (sourceFile) {
|
||||
setColor(sourceFile, 2 /* Black */);
|
||||
});
|
||||
return;
|
||||
}
|
||||
var black_queue = [];
|
||||
var gray_queue = [];
|
||||
var sourceFilesLoaded = {};
|
||||
function enqueueTopLevelModuleStatements(sourceFile) {
|
||||
sourceFile.forEachChild(function (node) {
|
||||
if (ts.isImportDeclaration(node)) {
|
||||
if (!node.importClause && ts.isStringLiteral(node.moduleSpecifier)) {
|
||||
setColor(node, 2 /* Black */);
|
||||
enqueueImport(node, node.moduleSpecifier.text);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (ts.isExportDeclaration(node)) {
|
||||
if (ts.isStringLiteral(node.moduleSpecifier)) {
|
||||
setColor(node, 2 /* Black */);
|
||||
enqueueImport(node, node.moduleSpecifier.text);
|
||||
}
|
||||
return;
|
||||
}
|
||||
if (ts.isExpressionStatement(node)
|
||||
|| ts.isIfStatement(node)
|
||||
|| ts.isIterationStatement(node, true)
|
||||
|| ts.isExportAssignment(node)) {
|
||||
enqueue_black(node);
|
||||
}
|
||||
if (ts.isImportEqualsDeclaration(node)) {
|
||||
if (/export/.test(node.getFullText(sourceFile))) {
|
||||
// e.g. "export import Severity = BaseSeverity;"
|
||||
enqueue_black(node);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
function enqueue_gray(node) {
|
||||
if (nodeOrParentIsBlack(node) || getColor(node) === 1 /* Gray */) {
|
||||
return;
|
||||
}
|
||||
setColor(node, 1 /* Gray */);
|
||||
gray_queue.push(node);
|
||||
}
|
||||
function enqueue_black(node) {
|
||||
var previousColor = getColor(node);
|
||||
if (previousColor === 2 /* Black */) {
|
||||
return;
|
||||
}
|
||||
if (previousColor === 1 /* Gray */) {
|
||||
// remove from gray queue
|
||||
gray_queue.splice(gray_queue.indexOf(node), 1);
|
||||
setColor(node, 0 /* White */);
|
||||
// add to black queue
|
||||
enqueue_black(node);
|
||||
// // move from one queue to the other
|
||||
// black_queue.push(node);
|
||||
// setColor(node, NodeColor.Black);
|
||||
return;
|
||||
}
|
||||
if (nodeOrParentIsBlack(node)) {
|
||||
return;
|
||||
}
|
||||
var fileName = node.getSourceFile().fileName;
|
||||
if (/^defaultLib:/.test(fileName) || /\.d\.ts$/.test(fileName)) {
|
||||
setColor(node, 2 /* Black */);
|
||||
return;
|
||||
}
|
||||
var sourceFile = node.getSourceFile();
|
||||
if (!sourceFilesLoaded[sourceFile.fileName]) {
|
||||
sourceFilesLoaded[sourceFile.fileName] = true;
|
||||
enqueueTopLevelModuleStatements(sourceFile);
|
||||
}
|
||||
if (ts.isSourceFile(node)) {
|
||||
return;
|
||||
}
|
||||
setColor(node, 2 /* Black */);
|
||||
black_queue.push(node);
|
||||
if (options.shakeLevel === 2 /* ClassMembers */ && (ts.isMethodDeclaration(node) || ts.isMethodSignature(node) || ts.isPropertySignature(node) || ts.isGetAccessor(node) || ts.isSetAccessor(node))) {
|
||||
var references = languageService.getReferencesAtPosition(node.getSourceFile().fileName, node.name.pos + node.name.getLeadingTriviaWidth());
|
||||
if (references) {
|
||||
for (var i = 0, len = references.length; i < len; i++) {
|
||||
var reference = references[i];
|
||||
var referenceSourceFile = program.getSourceFile(reference.fileName);
|
||||
var referenceNode = getTokenAtPosition(referenceSourceFile, reference.textSpan.start, false, false);
|
||||
if (ts.isMethodDeclaration(referenceNode.parent)
|
||||
|| ts.isPropertyDeclaration(referenceNode.parent)
|
||||
|| ts.isGetAccessor(referenceNode.parent)
|
||||
|| ts.isSetAccessor(referenceNode.parent)) {
|
||||
enqueue_gray(referenceNode.parent);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
function enqueueFile(filename) {
|
||||
var sourceFile = program.getSourceFile(filename);
|
||||
if (!sourceFile) {
|
||||
console.warn("Cannot find source file " + filename);
|
||||
return;
|
||||
}
|
||||
enqueue_black(sourceFile);
|
||||
}
|
||||
function enqueueImport(node, importText) {
|
||||
if (options.importIgnorePattern.test(importText)) {
|
||||
// this import should be ignored
|
||||
return;
|
||||
}
|
||||
var nodeSourceFile = node.getSourceFile();
|
||||
var fullPath;
|
||||
if (/(^\.\/)|(^\.\.\/)/.test(importText)) {
|
||||
fullPath = path.join(path.dirname(nodeSourceFile.fileName), importText) + '.ts';
|
||||
}
|
||||
else {
|
||||
fullPath = importText + '.ts';
|
||||
}
|
||||
enqueueFile(fullPath);
|
||||
}
|
||||
options.entryPoints.forEach(function (moduleId) { return enqueueFile(moduleId + '.ts'); });
|
||||
// Add fake usage files
|
||||
options.inlineEntryPoints.forEach(function (_, index) { return enqueueFile("inlineEntryPoint:" + index + ".ts"); });
|
||||
var step = 0;
|
||||
var checker = program.getTypeChecker();
|
||||
var _loop_1 = function () {
|
||||
++step;
|
||||
var node = void 0;
|
||||
if (step % 100 === 0) {
|
||||
console.log(step + "/" + (step + black_queue.length + gray_queue.length) + " (" + black_queue.length + ", " + gray_queue.length + ")");
|
||||
}
|
||||
if (black_queue.length === 0) {
|
||||
for (var i = 0; i < gray_queue.length; i++) {
|
||||
var node_1 = gray_queue[i];
|
||||
var nodeParent = node_1.parent;
|
||||
if ((ts.isClassDeclaration(nodeParent) || ts.isInterfaceDeclaration(nodeParent)) && nodeOrChildIsBlack(nodeParent)) {
|
||||
gray_queue.splice(i, 1);
|
||||
black_queue.push(node_1);
|
||||
setColor(node_1, 2 /* Black */);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (black_queue.length > 0) {
|
||||
node = black_queue.shift();
|
||||
}
|
||||
else {
|
||||
return "break";
|
||||
}
|
||||
var nodeSourceFile = node.getSourceFile();
|
||||
var loop = function (node) {
|
||||
var _a = getRealNodeSymbol(checker, node), symbol = _a[0], symbolImportNode = _a[1];
|
||||
if (symbolImportNode) {
|
||||
setColor(symbolImportNode, 2 /* Black */);
|
||||
}
|
||||
if (symbol && !nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol)) {
|
||||
for (var i = 0, len = symbol.declarations.length; i < len; i++) {
|
||||
var declaration = symbol.declarations[i];
|
||||
if (ts.isSourceFile(declaration)) {
|
||||
// Do not enqueue full source files
|
||||
// (they can be the declaration of a module import)
|
||||
continue;
|
||||
}
|
||||
if (options.shakeLevel === 2 /* ClassMembers */ && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration))) {
|
||||
enqueue_black(declaration.name);
|
||||
for (var j = 0; j < declaration.members.length; j++) {
|
||||
var member = declaration.members[j];
|
||||
var memberName = member.name ? member.name.getText() : null;
|
||||
if (ts.isConstructorDeclaration(member)
|
||||
|| ts.isConstructSignatureDeclaration(member)
|
||||
|| ts.isIndexSignatureDeclaration(member)
|
||||
|| ts.isCallSignatureDeclaration(member)
|
||||
|| memberName === 'toJSON'
|
||||
|| memberName === 'toString'
|
||||
|| memberName === 'dispose' // TODO: keeping all `dispose` methods
|
||||
) {
|
||||
enqueue_black(member);
|
||||
}
|
||||
}
|
||||
// queue the heritage clauses
|
||||
if (declaration.heritageClauses) {
|
||||
for (var _i = 0, _b = declaration.heritageClauses; _i < _b.length; _i++) {
|
||||
var heritageClause = _b[_i];
|
||||
enqueue_black(heritageClause);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
enqueue_black(declaration);
|
||||
}
|
||||
}
|
||||
}
|
||||
node.forEachChild(loop);
|
||||
};
|
||||
node.forEachChild(loop);
|
||||
};
|
||||
while (black_queue.length > 0 || gray_queue.length > 0) {
|
||||
var state_1 = _loop_1();
|
||||
if (state_1 === "break")
|
||||
break;
|
||||
}
|
||||
}
|
||||
function nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol) {
|
||||
for (var i = 0, len = symbol.declarations.length; i < len; i++) {
|
||||
var declaration = symbol.declarations[i];
|
||||
var declarationSourceFile = declaration.getSourceFile();
|
||||
if (nodeSourceFile === declarationSourceFile) {
|
||||
if (declaration.pos <= node.pos && node.end <= declaration.end) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function generateResult(languageService, shakeLevel) {
|
||||
var program = languageService.getProgram();
|
||||
var result = {};
|
||||
var writeFile = function (filePath, contents) {
|
||||
result[filePath] = contents;
|
||||
};
|
||||
program.getSourceFiles().forEach(function (sourceFile) {
|
||||
var fileName = sourceFile.fileName;
|
||||
if (/^defaultLib:/.test(fileName)) {
|
||||
return;
|
||||
}
|
||||
var destination = fileName;
|
||||
if (/\.d\.ts$/.test(fileName)) {
|
||||
if (nodeOrChildIsBlack(sourceFile)) {
|
||||
writeFile(destination, sourceFile.text);
|
||||
}
|
||||
return;
|
||||
}
|
||||
var text = sourceFile.text;
|
||||
var result = '';
|
||||
function keep(node) {
|
||||
result += text.substring(node.pos, node.end);
|
||||
}
|
||||
function write(data) {
|
||||
result += data;
|
||||
}
|
||||
function writeMarkedNodes(node) {
|
||||
if (getColor(node) === 2 /* Black */) {
|
||||
return keep(node);
|
||||
}
|
||||
// Always keep certain top-level statements
|
||||
if (ts.isSourceFile(node.parent)) {
|
||||
if (ts.isExpressionStatement(node) && ts.isStringLiteral(node.expression) && node.expression.text === 'use strict') {
|
||||
return keep(node);
|
||||
}
|
||||
if (ts.isVariableStatement(node) && nodeOrChildIsBlack(node)) {
|
||||
return keep(node);
|
||||
}
|
||||
}
|
||||
// Keep the entire import in import * as X cases
|
||||
if (ts.isImportDeclaration(node)) {
|
||||
if (node.importClause && node.importClause.namedBindings) {
|
||||
if (ts.isNamespaceImport(node.importClause.namedBindings)) {
|
||||
if (getColor(node.importClause.namedBindings) === 2 /* Black */) {
|
||||
return keep(node);
|
||||
}
|
||||
}
|
||||
else {
|
||||
var survivingImports = [];
|
||||
for (var i = 0; i < node.importClause.namedBindings.elements.length; i++) {
|
||||
var importNode = node.importClause.namedBindings.elements[i];
|
||||
if (getColor(importNode) === 2 /* Black */) {
|
||||
survivingImports.push(importNode.getFullText(sourceFile));
|
||||
}
|
||||
}
|
||||
var leadingTriviaWidth = node.getLeadingTriviaWidth();
|
||||
var leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth);
|
||||
if (survivingImports.length > 0) {
|
||||
if (node.importClause && getColor(node.importClause) === 2 /* Black */) {
|
||||
return write(leadingTrivia + "import " + node.importClause.name.text + ", {" + survivingImports.join(',') + " } from" + node.moduleSpecifier.getFullText(sourceFile) + ";");
|
||||
}
|
||||
return write(leadingTrivia + "import {" + survivingImports.join(',') + " } from" + node.moduleSpecifier.getFullText(sourceFile) + ";");
|
||||
}
|
||||
else {
|
||||
if (node.importClause && getColor(node.importClause) === 2 /* Black */) {
|
||||
return write(leadingTrivia + "import " + node.importClause.name.text + " from" + node.moduleSpecifier.getFullText(sourceFile) + ";");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (node.importClause && getColor(node.importClause) === 2 /* Black */) {
|
||||
return keep(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (shakeLevel === 2 /* ClassMembers */ && (ts.isClassDeclaration(node) || ts.isInterfaceDeclaration(node)) && nodeOrChildIsBlack(node)) {
|
||||
var toWrite = node.getFullText();
|
||||
for (var i = node.members.length - 1; i >= 0; i--) {
|
||||
var member = node.members[i];
|
||||
if (getColor(member) === 2 /* Black */) {
|
||||
// keep method
|
||||
continue;
|
||||
}
|
||||
if (/^_(.*)Brand$/.test(member.name.getText())) {
|
||||
// TODO: keep all members ending with `Brand`...
|
||||
continue;
|
||||
}
|
||||
var pos = member.pos - node.pos;
|
||||
var end = member.end - node.pos;
|
||||
toWrite = toWrite.substring(0, pos) + toWrite.substring(end);
|
||||
}
|
||||
return write(toWrite);
|
||||
}
|
||||
if (ts.isFunctionDeclaration(node)) {
|
||||
// Do not go inside functions if they haven't been marked
|
||||
return;
|
||||
}
|
||||
node.forEachChild(writeMarkedNodes);
|
||||
}
|
||||
if (getColor(sourceFile) !== 2 /* Black */) {
|
||||
if (!nodeOrChildIsBlack(sourceFile)) {
|
||||
// none of the elements are reachable => don't write this file at all!
|
||||
return;
|
||||
}
|
||||
sourceFile.forEachChild(writeMarkedNodes);
|
||||
result += sourceFile.endOfFileToken.getFullText(sourceFile);
|
||||
}
|
||||
else {
|
||||
result = text;
|
||||
}
|
||||
writeFile(destination, result);
|
||||
});
|
||||
return result;
|
||||
}
|
||||
//#endregion
|
||||
//#region Utils
|
||||
/**
|
||||
* Returns the node's symbol and the `import` node (if the symbol resolved from a different module)
|
||||
*/
|
||||
function getRealNodeSymbol(checker, node) {
|
||||
/**
|
||||
* Returns the containing object literal property declaration given a possible name node, e.g. "a" in x = { "a": 1 }
|
||||
*/
|
||||
/* @internal */
|
||||
function getContainingObjectLiteralElement(node) {
|
||||
switch (node.kind) {
|
||||
case ts.SyntaxKind.StringLiteral:
|
||||
case ts.SyntaxKind.NumericLiteral:
|
||||
if (node.parent.kind === ts.SyntaxKind.ComputedPropertyName) {
|
||||
return ts.isObjectLiteralElement(node.parent.parent) ? node.parent.parent : undefined;
|
||||
}
|
||||
// falls through
|
||||
case ts.SyntaxKind.Identifier:
|
||||
return ts.isObjectLiteralElement(node.parent) &&
|
||||
(node.parent.parent.kind === ts.SyntaxKind.ObjectLiteralExpression || node.parent.parent.kind === ts.SyntaxKind.JsxAttributes) &&
|
||||
node.parent.name === node ? node.parent : undefined;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
function getPropertySymbolsFromType(type, propName) {
|
||||
function getTextOfPropertyName(name) {
|
||||
function isStringOrNumericLiteral(node) {
|
||||
var kind = node.kind;
|
||||
return kind === ts.SyntaxKind.StringLiteral
|
||||
|| kind === ts.SyntaxKind.NumericLiteral;
|
||||
}
|
||||
switch (name.kind) {
|
||||
case ts.SyntaxKind.Identifier:
|
||||
return name.text;
|
||||
case ts.SyntaxKind.StringLiteral:
|
||||
case ts.SyntaxKind.NumericLiteral:
|
||||
return name.text;
|
||||
case ts.SyntaxKind.ComputedPropertyName:
|
||||
return isStringOrNumericLiteral(name.expression) ? name.expression.text : undefined;
|
||||
}
|
||||
}
|
||||
var name = getTextOfPropertyName(propName);
|
||||
if (name && type) {
|
||||
var result = [];
|
||||
var symbol_1 = type.getProperty(name);
|
||||
if (type.flags & ts.TypeFlags.Union) {
|
||||
for (var _i = 0, _a = type.types; _i < _a.length; _i++) {
|
||||
var t = _a[_i];
|
||||
var symbol_2 = t.getProperty(name);
|
||||
if (symbol_2) {
|
||||
result.push(symbol_2);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
if (symbol_1) {
|
||||
result.push(symbol_1);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
function getPropertySymbolsFromContextualType(typeChecker, node) {
|
||||
var objectLiteral = node.parent;
|
||||
var contextualType = typeChecker.getContextualType(objectLiteral);
|
||||
return getPropertySymbolsFromType(contextualType, node.name);
|
||||
}
|
||||
// Go to the original declaration for cases:
|
||||
//
|
||||
// (1) when the aliased symbol was declared in the location(parent).
|
||||
// (2) when the aliased symbol is originating from an import.
|
||||
//
|
||||
function shouldSkipAlias(node, declaration) {
|
||||
if (node.kind !== ts.SyntaxKind.Identifier) {
|
||||
return false;
|
||||
}
|
||||
if (node.parent === declaration) {
|
||||
return true;
|
||||
}
|
||||
switch (declaration.kind) {
|
||||
case ts.SyntaxKind.ImportClause:
|
||||
case ts.SyntaxKind.ImportEqualsDeclaration:
|
||||
return true;
|
||||
case ts.SyntaxKind.ImportSpecifier:
|
||||
return declaration.parent.kind === ts.SyntaxKind.NamedImports;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (!ts.isShorthandPropertyAssignment(node)) {
|
||||
if (node.getChildCount() !== 0) {
|
||||
return [null, null];
|
||||
}
|
||||
}
|
||||
var symbol = checker.getSymbolAtLocation(node);
|
||||
var importNode = null;
|
||||
if (symbol && symbol.flags & ts.SymbolFlags.Alias && shouldSkipAlias(node, symbol.declarations[0])) {
|
||||
var aliased = checker.getAliasedSymbol(symbol);
|
||||
if (aliased.declarations) {
|
||||
// We should mark the import as visited
|
||||
importNode = symbol.declarations[0];
|
||||
symbol = aliased;
|
||||
}
|
||||
}
|
||||
if (symbol) {
|
||||
// Because name in short-hand property assignment has two different meanings: property name and property value,
|
||||
// using go-to-definition at such position should go to the variable declaration of the property value rather than
|
||||
// go to the declaration of the property name (in this case stay at the same position). However, if go-to-definition
|
||||
// is performed at the location of property access, we would like to go to definition of the property in the short-hand
|
||||
// assignment. This case and others are handled by the following code.
|
||||
if (node.parent.kind === ts.SyntaxKind.ShorthandPropertyAssignment) {
|
||||
symbol = checker.getShorthandAssignmentValueSymbol(symbol.valueDeclaration);
|
||||
}
|
||||
// If the node is the name of a BindingElement within an ObjectBindingPattern instead of just returning the
|
||||
// declaration the symbol (which is itself), we should try to get to the original type of the ObjectBindingPattern
|
||||
// and return the property declaration for the referenced property.
|
||||
// For example:
|
||||
// import('./foo').then(({ b/*goto*/ar }) => undefined); => should get use to the declaration in file "./foo"
|
||||
//
|
||||
// function bar<T>(onfulfilled: (value: T) => void) { //....}
|
||||
// interface Test {
|
||||
// pr/*destination*/op1: number
|
||||
// }
|
||||
// bar<Test>(({pr/*goto*/op1})=>{});
|
||||
if (ts.isPropertyName(node) && ts.isBindingElement(node.parent) && ts.isObjectBindingPattern(node.parent.parent) &&
|
||||
(node === (node.parent.propertyName || node.parent.name))) {
|
||||
var type = checker.getTypeAtLocation(node.parent.parent);
|
||||
if (type) {
|
||||
var propSymbols = getPropertySymbolsFromType(type, node);
|
||||
if (propSymbols) {
|
||||
symbol = propSymbols[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
// If the current location we want to find its definition is in an object literal, try to get the contextual type for the
|
||||
// object literal, lookup the property symbol in the contextual type, and use this for goto-definition.
|
||||
// For example
|
||||
// interface Props{
|
||||
// /*first*/prop1: number
|
||||
// prop2: boolean
|
||||
// }
|
||||
// function Foo(arg: Props) {}
|
||||
// Foo( { pr/*1*/op1: 10, prop2: false })
|
||||
var element = getContainingObjectLiteralElement(node);
|
||||
if (element && checker.getContextualType(element.parent)) {
|
||||
var propertySymbols = getPropertySymbolsFromContextualType(checker, element);
|
||||
if (propertySymbols) {
|
||||
symbol = propertySymbols[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
if (symbol && symbol.declarations) {
|
||||
return [symbol, importNode];
|
||||
}
|
||||
return [null, null];
|
||||
}
|
||||
/** Get the token whose text contains the position */
|
||||
function getTokenAtPosition(sourceFile, position, allowPositionInLeadingTrivia, includeEndPosition) {
|
||||
var current = sourceFile;
|
||||
outer: while (true) {
|
||||
// find the child that contains 'position'
|
||||
for (var _i = 0, _a = current.getChildren(); _i < _a.length; _i++) {
|
||||
var child = _a[_i];
|
||||
var start = allowPositionInLeadingTrivia ? child.getFullStart() : child.getStart(sourceFile, /*includeJsDoc*/ true);
|
||||
if (start > position) {
|
||||
// If this child begins after position, then all subsequent children will as well.
|
||||
break;
|
||||
}
|
||||
var end = child.getEnd();
|
||||
if (position < end || (position === end && (child.kind === ts.SyntaxKind.EndOfFileToken || includeEndPosition))) {
|
||||
current = child;
|
||||
continue outer;
|
||||
}
|
||||
}
|
||||
return current;
|
||||
}
|
||||
}
|
||||
817
build/lib/treeshaking.ts
Normal file
817
build/lib/treeshaking.ts
Normal file
@@ -0,0 +1,817 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as ts from 'typescript';
|
||||
|
||||
const TYPESCRIPT_LIB_FOLDER = path.dirname(require.resolve('typescript/lib/lib.d.ts'));
|
||||
|
||||
export const enum ShakeLevel {
|
||||
Files = 0,
|
||||
InnerFile = 1,
|
||||
ClassMembers = 2
|
||||
}
|
||||
|
||||
export interface ITreeShakingOptions {
|
||||
/**
|
||||
* The full path to the root where sources are.
|
||||
*/
|
||||
sourcesRoot: string;
|
||||
/**
|
||||
* Module ids.
|
||||
* e.g. `vs/editor/editor.main` or `index`
|
||||
*/
|
||||
entryPoints: string[];
|
||||
/**
|
||||
* Inline usages.
|
||||
*/
|
||||
inlineEntryPoints: string[];
|
||||
/**
|
||||
* TypeScript libs.
|
||||
* e.g. `lib.d.ts`, `lib.es2015.collection.d.ts`
|
||||
*/
|
||||
libs: string[];
|
||||
/**
|
||||
* TypeScript compiler options.
|
||||
*/
|
||||
compilerOptions: ts.CompilerOptions;
|
||||
/**
|
||||
* The shake level to perform.
|
||||
*/
|
||||
shakeLevel: ShakeLevel;
|
||||
/**
|
||||
* regex pattern to ignore certain imports e.g. `vs/css!` imports
|
||||
*/
|
||||
importIgnorePattern: RegExp;
|
||||
|
||||
redirects: { [module: string]: string; };
|
||||
}
|
||||
|
||||
export interface ITreeShakingResult {
|
||||
[file: string]: string;
|
||||
}
|
||||
|
||||
export function shake(options: ITreeShakingOptions): ITreeShakingResult {
|
||||
const languageService = createTypeScriptLanguageService(options);
|
||||
|
||||
markNodes(languageService, options);
|
||||
|
||||
return generateResult(languageService, options.shakeLevel);
|
||||
}
|
||||
|
||||
//#region Discovery, LanguageService & Setup
|
||||
function createTypeScriptLanguageService(options: ITreeShakingOptions): ts.LanguageService {
|
||||
// Discover referenced files
|
||||
const FILES = discoverAndReadFiles(options);
|
||||
|
||||
// Add fake usage files
|
||||
options.inlineEntryPoints.forEach((inlineEntryPoint, index) => {
|
||||
FILES[`inlineEntryPoint:${index}.ts`] = inlineEntryPoint;
|
||||
});
|
||||
|
||||
// Resolve libs
|
||||
const RESOLVED_LIBS: ILibMap = {};
|
||||
options.libs.forEach((filename) => {
|
||||
const filepath = path.join(TYPESCRIPT_LIB_FOLDER, filename);
|
||||
RESOLVED_LIBS[`defaultLib:${filename}`] = fs.readFileSync(filepath).toString();
|
||||
});
|
||||
|
||||
const host = new TypeScriptLanguageServiceHost(RESOLVED_LIBS, FILES, options.compilerOptions);
|
||||
return ts.createLanguageService(host);
|
||||
}
|
||||
|
||||
/**
|
||||
* Read imports and follow them until all files have been handled
|
||||
*/
|
||||
function discoverAndReadFiles(options: ITreeShakingOptions): IFileMap {
|
||||
const FILES: IFileMap = {};
|
||||
|
||||
const in_queue: { [module: string]: boolean; } = Object.create(null);
|
||||
const queue: string[] = [];
|
||||
|
||||
const enqueue = (moduleId: string) => {
|
||||
if (in_queue[moduleId]) {
|
||||
return;
|
||||
}
|
||||
in_queue[moduleId] = true;
|
||||
queue.push(moduleId);
|
||||
};
|
||||
|
||||
options.entryPoints.forEach((entryPoint) => enqueue(entryPoint));
|
||||
|
||||
while (queue.length > 0) {
|
||||
const moduleId = queue.shift();
|
||||
const dts_filename = path.join(options.sourcesRoot, moduleId + '.d.ts');
|
||||
if (fs.existsSync(dts_filename)) {
|
||||
const dts_filecontents = fs.readFileSync(dts_filename).toString();
|
||||
FILES[moduleId + '.d.ts'] = dts_filecontents;
|
||||
continue;
|
||||
}
|
||||
|
||||
let ts_filename: string;
|
||||
if (options.redirects[moduleId]) {
|
||||
ts_filename = path.join(options.sourcesRoot, options.redirects[moduleId] + '.ts');
|
||||
} else {
|
||||
ts_filename = path.join(options.sourcesRoot, moduleId + '.ts');
|
||||
}
|
||||
const ts_filecontents = fs.readFileSync(ts_filename).toString();
|
||||
const info = ts.preProcessFile(ts_filecontents);
|
||||
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
|
||||
const importedFileName = info.importedFiles[i].fileName;
|
||||
|
||||
if (options.importIgnorePattern.test(importedFileName)) {
|
||||
// Ignore vs/css! imports
|
||||
continue;
|
||||
}
|
||||
|
||||
let importedModuleId = importedFileName;
|
||||
if (/(^\.\/)|(^\.\.\/)/.test(importedModuleId)) {
|
||||
importedModuleId = path.join(path.dirname(moduleId), importedModuleId);
|
||||
}
|
||||
enqueue(importedModuleId);
|
||||
}
|
||||
|
||||
FILES[moduleId + '.ts'] = ts_filecontents;
|
||||
}
|
||||
|
||||
return FILES;
|
||||
}
|
||||
|
||||
interface ILibMap { [libName: string]: string; }
|
||||
interface IFileMap { [fileName: string]: string; }
|
||||
|
||||
/**
|
||||
* A TypeScript language service host
|
||||
*/
|
||||
class TypeScriptLanguageServiceHost implements ts.LanguageServiceHost {
|
||||
|
||||
private readonly _libs: ILibMap;
|
||||
private readonly _files: IFileMap;
|
||||
private readonly _compilerOptions: ts.CompilerOptions;
|
||||
|
||||
constructor(libs: ILibMap, files: IFileMap, compilerOptions: ts.CompilerOptions) {
|
||||
this._libs = libs;
|
||||
this._files = files;
|
||||
this._compilerOptions = compilerOptions;
|
||||
}
|
||||
|
||||
// --- language service host ---------------
|
||||
|
||||
getCompilationSettings(): ts.CompilerOptions {
|
||||
return this._compilerOptions;
|
||||
}
|
||||
getScriptFileNames(): string[] {
|
||||
return (
|
||||
[]
|
||||
.concat(Object.keys(this._libs))
|
||||
.concat(Object.keys(this._files))
|
||||
);
|
||||
}
|
||||
getScriptVersion(fileName: string): string {
|
||||
return '1';
|
||||
}
|
||||
getProjectVersion(): string {
|
||||
return '1';
|
||||
}
|
||||
getScriptSnapshot(fileName: string): ts.IScriptSnapshot {
|
||||
if (this._files.hasOwnProperty(fileName)) {
|
||||
return ts.ScriptSnapshot.fromString(this._files[fileName]);
|
||||
} else if (this._libs.hasOwnProperty(fileName)) {
|
||||
return ts.ScriptSnapshot.fromString(this._libs[fileName]);
|
||||
} else {
|
||||
return ts.ScriptSnapshot.fromString('');
|
||||
}
|
||||
}
|
||||
getScriptKind(fileName: string): ts.ScriptKind {
|
||||
return ts.ScriptKind.TS;
|
||||
}
|
||||
getCurrentDirectory(): string {
|
||||
return '';
|
||||
}
|
||||
getDefaultLibFileName(options: ts.CompilerOptions): string {
|
||||
return 'defaultLib:lib.d.ts';
|
||||
}
|
||||
isDefaultLibFileName(fileName: string): boolean {
|
||||
return fileName === this.getDefaultLibFileName(this._compilerOptions);
|
||||
}
|
||||
}
|
||||
//#endregion
|
||||
|
||||
//#region Tree Shaking
|
||||
|
||||
const enum NodeColor {
|
||||
White = 0,
|
||||
Gray = 1,
|
||||
Black = 2
|
||||
}
|
||||
|
||||
function getColor(node: ts.Node): NodeColor {
|
||||
return (<any>node).$$$color || NodeColor.White;
|
||||
}
|
||||
function setColor(node: ts.Node, color: NodeColor): void {
|
||||
(<any>node).$$$color = color;
|
||||
}
|
||||
function nodeOrParentIsBlack(node: ts.Node): boolean {
|
||||
while (node) {
|
||||
const color = getColor(node);
|
||||
if (color === NodeColor.Black) {
|
||||
return true;
|
||||
}
|
||||
node = node.parent;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function nodeOrChildIsBlack(node: ts.Node): boolean {
|
||||
if (getColor(node) === NodeColor.Black) {
|
||||
return true;
|
||||
}
|
||||
for (const child of node.getChildren()) {
|
||||
if (nodeOrChildIsBlack(child)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function markNodes(languageService: ts.LanguageService, options: ITreeShakingOptions) {
|
||||
const program = languageService.getProgram();
|
||||
|
||||
if (options.shakeLevel === ShakeLevel.Files) {
|
||||
// Mark all source files Black
|
||||
program.getSourceFiles().forEach((sourceFile) => {
|
||||
setColor(sourceFile, NodeColor.Black);
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const black_queue: ts.Node[] = [];
|
||||
const gray_queue: ts.Node[] = [];
|
||||
const sourceFilesLoaded: { [fileName: string]: boolean } = {};
|
||||
|
||||
function enqueueTopLevelModuleStatements(sourceFile: ts.SourceFile): void {
|
||||
|
||||
sourceFile.forEachChild((node: ts.Node) => {
|
||||
|
||||
if (ts.isImportDeclaration(node)) {
|
||||
if (!node.importClause && ts.isStringLiteral(node.moduleSpecifier)) {
|
||||
setColor(node, NodeColor.Black);
|
||||
enqueueImport(node, node.moduleSpecifier.text);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (ts.isExportDeclaration(node)) {
|
||||
if (ts.isStringLiteral(node.moduleSpecifier)) {
|
||||
setColor(node, NodeColor.Black);
|
||||
enqueueImport(node, node.moduleSpecifier.text);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (
|
||||
ts.isExpressionStatement(node)
|
||||
|| ts.isIfStatement(node)
|
||||
|| ts.isIterationStatement(node, true)
|
||||
|| ts.isExportAssignment(node)
|
||||
) {
|
||||
enqueue_black(node);
|
||||
}
|
||||
|
||||
if (ts.isImportEqualsDeclaration(node)) {
|
||||
if (/export/.test(node.getFullText(sourceFile))) {
|
||||
// e.g. "export import Severity = BaseSeverity;"
|
||||
enqueue_black(node);
|
||||
}
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
function enqueue_gray(node: ts.Node): void {
|
||||
if (nodeOrParentIsBlack(node) || getColor(node) === NodeColor.Gray) {
|
||||
return;
|
||||
}
|
||||
setColor(node, NodeColor.Gray);
|
||||
gray_queue.push(node);
|
||||
}
|
||||
|
||||
function enqueue_black(node: ts.Node): void {
|
||||
const previousColor = getColor(node);
|
||||
|
||||
if (previousColor === NodeColor.Black) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (previousColor === NodeColor.Gray) {
|
||||
// remove from gray queue
|
||||
gray_queue.splice(gray_queue.indexOf(node), 1);
|
||||
setColor(node, NodeColor.White);
|
||||
|
||||
// add to black queue
|
||||
enqueue_black(node);
|
||||
|
||||
// // move from one queue to the other
|
||||
// black_queue.push(node);
|
||||
// setColor(node, NodeColor.Black);
|
||||
return;
|
||||
}
|
||||
|
||||
if (nodeOrParentIsBlack(node)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const fileName = node.getSourceFile().fileName;
|
||||
if (/^defaultLib:/.test(fileName) || /\.d\.ts$/.test(fileName)) {
|
||||
setColor(node, NodeColor.Black);
|
||||
return;
|
||||
}
|
||||
|
||||
const sourceFile = node.getSourceFile();
|
||||
if (!sourceFilesLoaded[sourceFile.fileName]) {
|
||||
sourceFilesLoaded[sourceFile.fileName] = true;
|
||||
enqueueTopLevelModuleStatements(sourceFile);
|
||||
}
|
||||
|
||||
if (ts.isSourceFile(node)) {
|
||||
return;
|
||||
}
|
||||
|
||||
setColor(node, NodeColor.Black);
|
||||
black_queue.push(node);
|
||||
|
||||
if (options.shakeLevel === ShakeLevel.ClassMembers && (ts.isMethodDeclaration(node) || ts.isMethodSignature(node) || ts.isPropertySignature(node) || ts.isGetAccessor(node) || ts.isSetAccessor(node))) {
|
||||
const references = languageService.getReferencesAtPosition(node.getSourceFile().fileName, node.name.pos + node.name.getLeadingTriviaWidth());
|
||||
if (references) {
|
||||
for (let i = 0, len = references.length; i < len; i++) {
|
||||
const reference = references[i];
|
||||
const referenceSourceFile = program.getSourceFile(reference.fileName);
|
||||
const referenceNode = getTokenAtPosition(referenceSourceFile, reference.textSpan.start, false, false);
|
||||
if (
|
||||
ts.isMethodDeclaration(referenceNode.parent)
|
||||
|| ts.isPropertyDeclaration(referenceNode.parent)
|
||||
|| ts.isGetAccessor(referenceNode.parent)
|
||||
|| ts.isSetAccessor(referenceNode.parent)
|
||||
) {
|
||||
enqueue_gray(referenceNode.parent);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function enqueueFile(filename: string): void {
|
||||
const sourceFile = program.getSourceFile(filename);
|
||||
if (!sourceFile) {
|
||||
console.warn(`Cannot find source file ${filename}`);
|
||||
return;
|
||||
}
|
||||
enqueue_black(sourceFile);
|
||||
}
|
||||
|
||||
function enqueueImport(node: ts.Node, importText: string): void {
|
||||
if (options.importIgnorePattern.test(importText)) {
|
||||
// this import should be ignored
|
||||
return;
|
||||
}
|
||||
|
||||
const nodeSourceFile = node.getSourceFile();
|
||||
let fullPath: string;
|
||||
if (/(^\.\/)|(^\.\.\/)/.test(importText)) {
|
||||
fullPath = path.join(path.dirname(nodeSourceFile.fileName), importText) + '.ts';
|
||||
} else {
|
||||
fullPath = importText + '.ts';
|
||||
}
|
||||
enqueueFile(fullPath);
|
||||
}
|
||||
|
||||
options.entryPoints.forEach(moduleId => enqueueFile(moduleId + '.ts'));
|
||||
// Add fake usage files
|
||||
options.inlineEntryPoints.forEach((_, index) => enqueueFile(`inlineEntryPoint:${index}.ts`));
|
||||
|
||||
let step = 0;
|
||||
|
||||
const checker = program.getTypeChecker();
|
||||
while (black_queue.length > 0 || gray_queue.length > 0) {
|
||||
++step;
|
||||
let node: ts.Node;
|
||||
|
||||
if (step % 100 === 0) {
|
||||
console.log(`${step}/${step+black_queue.length+gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
|
||||
}
|
||||
|
||||
if (black_queue.length === 0) {
|
||||
for (let i = 0; i < gray_queue.length; i++) {
|
||||
const node = gray_queue[i];
|
||||
const nodeParent = node.parent;
|
||||
if ((ts.isClassDeclaration(nodeParent) || ts.isInterfaceDeclaration(nodeParent)) && nodeOrChildIsBlack(nodeParent)) {
|
||||
gray_queue.splice(i, 1);
|
||||
black_queue.push(node);
|
||||
setColor(node, NodeColor.Black);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (black_queue.length > 0) {
|
||||
node = black_queue.shift();
|
||||
} else {
|
||||
// only gray nodes remaining...
|
||||
break;
|
||||
}
|
||||
const nodeSourceFile = node.getSourceFile();
|
||||
|
||||
const loop = (node: ts.Node) => {
|
||||
const [symbol, symbolImportNode] = getRealNodeSymbol(checker, node);
|
||||
if (symbolImportNode) {
|
||||
setColor(symbolImportNode, NodeColor.Black);
|
||||
}
|
||||
|
||||
if (symbol && !nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol)) {
|
||||
for (let i = 0, len = symbol.declarations.length; i < len; i++) {
|
||||
const declaration = symbol.declarations[i];
|
||||
if (ts.isSourceFile(declaration)) {
|
||||
// Do not enqueue full source files
|
||||
// (they can be the declaration of a module import)
|
||||
continue;
|
||||
}
|
||||
|
||||
if (options.shakeLevel === ShakeLevel.ClassMembers && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration))) {
|
||||
enqueue_black(declaration.name);
|
||||
|
||||
for (let j = 0; j < declaration.members.length; j++) {
|
||||
const member = declaration.members[j];
|
||||
const memberName = member.name ? member.name.getText() : null;
|
||||
if (
|
||||
ts.isConstructorDeclaration(member)
|
||||
|| ts.isConstructSignatureDeclaration(member)
|
||||
|| ts.isIndexSignatureDeclaration(member)
|
||||
|| ts.isCallSignatureDeclaration(member)
|
||||
|| memberName === 'toJSON'
|
||||
|| memberName === 'toString'
|
||||
|| memberName === 'dispose'// TODO: keeping all `dispose` methods
|
||||
) {
|
||||
enqueue_black(member);
|
||||
}
|
||||
}
|
||||
|
||||
// queue the heritage clauses
|
||||
if (declaration.heritageClauses) {
|
||||
for (let heritageClause of declaration.heritageClauses) {
|
||||
enqueue_black(heritageClause);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
enqueue_black(declaration);
|
||||
}
|
||||
}
|
||||
}
|
||||
node.forEachChild(loop);
|
||||
};
|
||||
node.forEachChild(loop);
|
||||
}
|
||||
}
|
||||
|
||||
function nodeIsInItsOwnDeclaration(nodeSourceFile: ts.SourceFile, node: ts.Node, symbol: ts.Symbol): boolean {
|
||||
for (let i = 0, len = symbol.declarations.length; i < len; i++) {
|
||||
const declaration = symbol.declarations[i];
|
||||
const declarationSourceFile = declaration.getSourceFile();
|
||||
|
||||
if (nodeSourceFile === declarationSourceFile) {
|
||||
if (declaration.pos <= node.pos && node.end <= declaration.end) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function generateResult(languageService: ts.LanguageService, shakeLevel: ShakeLevel): ITreeShakingResult {
|
||||
const program = languageService.getProgram();
|
||||
|
||||
let result: ITreeShakingResult = {};
|
||||
const writeFile = (filePath: string, contents: string): void => {
|
||||
result[filePath] = contents;
|
||||
};
|
||||
|
||||
program.getSourceFiles().forEach((sourceFile) => {
|
||||
const fileName = sourceFile.fileName;
|
||||
if (/^defaultLib:/.test(fileName)) {
|
||||
return;
|
||||
}
|
||||
const destination = fileName;
|
||||
if (/\.d\.ts$/.test(fileName)) {
|
||||
if (nodeOrChildIsBlack(sourceFile)) {
|
||||
writeFile(destination, sourceFile.text);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
let text = sourceFile.text;
|
||||
let result = '';
|
||||
|
||||
function keep(node: ts.Node): void {
|
||||
result += text.substring(node.pos, node.end);
|
||||
}
|
||||
function write(data: string): void {
|
||||
result += data;
|
||||
}
|
||||
|
||||
function writeMarkedNodes(node: ts.Node): void {
|
||||
if (getColor(node) === NodeColor.Black) {
|
||||
return keep(node);
|
||||
}
|
||||
|
||||
// Always keep certain top-level statements
|
||||
if (ts.isSourceFile(node.parent)) {
|
||||
if (ts.isExpressionStatement(node) && ts.isStringLiteral(node.expression) && node.expression.text === 'use strict') {
|
||||
return keep(node);
|
||||
}
|
||||
|
||||
if (ts.isVariableStatement(node) && nodeOrChildIsBlack(node)) {
|
||||
return keep(node);
|
||||
}
|
||||
}
|
||||
|
||||
// Keep the entire import in import * as X cases
|
||||
if (ts.isImportDeclaration(node)) {
|
||||
if (node.importClause && node.importClause.namedBindings) {
|
||||
if (ts.isNamespaceImport(node.importClause.namedBindings)) {
|
||||
if (getColor(node.importClause.namedBindings) === NodeColor.Black) {
|
||||
return keep(node);
|
||||
}
|
||||
} else {
|
||||
let survivingImports: string[] = [];
|
||||
for (let i = 0; i < node.importClause.namedBindings.elements.length; i++) {
|
||||
const importNode = node.importClause.namedBindings.elements[i];
|
||||
if (getColor(importNode) === NodeColor.Black) {
|
||||
survivingImports.push(importNode.getFullText(sourceFile));
|
||||
}
|
||||
}
|
||||
const leadingTriviaWidth = node.getLeadingTriviaWidth();
|
||||
const leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth);
|
||||
if (survivingImports.length > 0) {
|
||||
if (node.importClause && getColor(node.importClause) === NodeColor.Black) {
|
||||
return write(`${leadingTrivia}import ${node.importClause.name.text}, {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
|
||||
}
|
||||
return write(`${leadingTrivia}import {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
|
||||
} else {
|
||||
if (node.importClause && getColor(node.importClause) === NodeColor.Black) {
|
||||
return write(`${leadingTrivia}import ${node.importClause.name.text} from${node.moduleSpecifier.getFullText(sourceFile)};`);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (node.importClause && getColor(node.importClause) === NodeColor.Black) {
|
||||
return keep(node);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (shakeLevel === ShakeLevel.ClassMembers && (ts.isClassDeclaration(node) || ts.isInterfaceDeclaration(node)) && nodeOrChildIsBlack(node)) {
|
||||
let toWrite = node.getFullText();
|
||||
for (let i = node.members.length - 1; i >= 0; i--) {
|
||||
const member = node.members[i];
|
||||
if (getColor(member) === NodeColor.Black) {
|
||||
// keep method
|
||||
continue;
|
||||
}
|
||||
if (/^_(.*)Brand$/.test(member.name.getText())) {
|
||||
// TODO: keep all members ending with `Brand`...
|
||||
continue;
|
||||
}
|
||||
|
||||
let pos = member.pos - node.pos;
|
||||
let end = member.end - node.pos;
|
||||
toWrite = toWrite.substring(0, pos) + toWrite.substring(end);
|
||||
}
|
||||
return write(toWrite);
|
||||
}
|
||||
|
||||
if (ts.isFunctionDeclaration(node)) {
|
||||
// Do not go inside functions if they haven't been marked
|
||||
return;
|
||||
}
|
||||
|
||||
node.forEachChild(writeMarkedNodes);
|
||||
}
|
||||
|
||||
if (getColor(sourceFile) !== NodeColor.Black) {
|
||||
if (!nodeOrChildIsBlack(sourceFile)) {
|
||||
// none of the elements are reachable => don't write this file at all!
|
||||
return;
|
||||
}
|
||||
sourceFile.forEachChild(writeMarkedNodes);
|
||||
result += sourceFile.endOfFileToken.getFullText(sourceFile);
|
||||
} else {
|
||||
result = text;
|
||||
}
|
||||
|
||||
writeFile(destination, result);
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
//#endregion
|
||||
|
||||
//#region Utils
|
||||
|
||||
/**
|
||||
* Returns the node's symbol and the `import` node (if the symbol resolved from a different module)
|
||||
*/
|
||||
function getRealNodeSymbol(checker: ts.TypeChecker, node: ts.Node): [ts.Symbol, ts.Declaration] {
|
||||
/**
|
||||
* Returns the containing object literal property declaration given a possible name node, e.g. "a" in x = { "a": 1 }
|
||||
*/
|
||||
/* @internal */
|
||||
function getContainingObjectLiteralElement(node: ts.Node): ts.ObjectLiteralElement | undefined {
|
||||
switch (node.kind) {
|
||||
case ts.SyntaxKind.StringLiteral:
|
||||
case ts.SyntaxKind.NumericLiteral:
|
||||
if (node.parent.kind === ts.SyntaxKind.ComputedPropertyName) {
|
||||
return ts.isObjectLiteralElement(node.parent.parent) ? node.parent.parent : undefined;
|
||||
}
|
||||
// falls through
|
||||
case ts.SyntaxKind.Identifier:
|
||||
return ts.isObjectLiteralElement(node.parent) &&
|
||||
(node.parent.parent.kind === ts.SyntaxKind.ObjectLiteralExpression || node.parent.parent.kind === ts.SyntaxKind.JsxAttributes) &&
|
||||
node.parent.name === node ? node.parent : undefined;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function getPropertySymbolsFromType(type: ts.Type, propName: ts.PropertyName) {
|
||||
function getTextOfPropertyName(name: ts.PropertyName): string {
|
||||
|
||||
function isStringOrNumericLiteral(node: ts.Node): node is ts.StringLiteral | ts.NumericLiteral {
|
||||
const kind = node.kind;
|
||||
return kind === ts.SyntaxKind.StringLiteral
|
||||
|| kind === ts.SyntaxKind.NumericLiteral;
|
||||
}
|
||||
|
||||
switch (name.kind) {
|
||||
case ts.SyntaxKind.Identifier:
|
||||
return name.text;
|
||||
case ts.SyntaxKind.StringLiteral:
|
||||
case ts.SyntaxKind.NumericLiteral:
|
||||
return name.text;
|
||||
case ts.SyntaxKind.ComputedPropertyName:
|
||||
return isStringOrNumericLiteral(name.expression) ? name.expression.text : undefined!;
|
||||
}
|
||||
}
|
||||
|
||||
const name = getTextOfPropertyName(propName);
|
||||
if (name && type) {
|
||||
const result: ts.Symbol[] = [];
|
||||
const symbol = type.getProperty(name);
|
||||
if (type.flags & ts.TypeFlags.Union) {
|
||||
for (const t of (<ts.UnionType>type).types) {
|
||||
const symbol = t.getProperty(name);
|
||||
if (symbol) {
|
||||
result.push(symbol);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
if (symbol) {
|
||||
result.push(symbol);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function getPropertySymbolsFromContextualType(typeChecker: ts.TypeChecker, node: ts.ObjectLiteralElement): ts.Symbol[] {
|
||||
const objectLiteral = <ts.ObjectLiteralExpression | ts.JsxAttributes>node.parent;
|
||||
const contextualType = typeChecker.getContextualType(objectLiteral)!;
|
||||
return getPropertySymbolsFromType(contextualType, node.name!)!;
|
||||
}
|
||||
|
||||
// Go to the original declaration for cases:
|
||||
//
|
||||
// (1) when the aliased symbol was declared in the location(parent).
|
||||
// (2) when the aliased symbol is originating from an import.
|
||||
//
|
||||
function shouldSkipAlias(node: ts.Node, declaration: ts.Node): boolean {
|
||||
if (node.kind !== ts.SyntaxKind.Identifier) {
|
||||
return false;
|
||||
}
|
||||
if (node.parent === declaration) {
|
||||
return true;
|
||||
}
|
||||
switch (declaration.kind) {
|
||||
case ts.SyntaxKind.ImportClause:
|
||||
case ts.SyntaxKind.ImportEqualsDeclaration:
|
||||
return true;
|
||||
case ts.SyntaxKind.ImportSpecifier:
|
||||
return declaration.parent.kind === ts.SyntaxKind.NamedImports;
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (!ts.isShorthandPropertyAssignment(node)) {
|
||||
if (node.getChildCount() !== 0) {
|
||||
return [null, null];
|
||||
}
|
||||
}
|
||||
|
||||
let symbol = checker.getSymbolAtLocation(node);
|
||||
let importNode: ts.Declaration = null;
|
||||
if (symbol && symbol.flags & ts.SymbolFlags.Alias && shouldSkipAlias(node, symbol.declarations[0])) {
|
||||
const aliased = checker.getAliasedSymbol(symbol);
|
||||
if (aliased.declarations) {
|
||||
// We should mark the import as visited
|
||||
importNode = symbol.declarations[0];
|
||||
symbol = aliased;
|
||||
}
|
||||
}
|
||||
|
||||
if (symbol) {
|
||||
// Because name in short-hand property assignment has two different meanings: property name and property value,
|
||||
// using go-to-definition at such position should go to the variable declaration of the property value rather than
|
||||
// go to the declaration of the property name (in this case stay at the same position). However, if go-to-definition
|
||||
// is performed at the location of property access, we would like to go to definition of the property in the short-hand
|
||||
// assignment. This case and others are handled by the following code.
|
||||
if (node.parent.kind === ts.SyntaxKind.ShorthandPropertyAssignment) {
|
||||
symbol = checker.getShorthandAssignmentValueSymbol(symbol.valueDeclaration);
|
||||
}
|
||||
|
||||
// If the node is the name of a BindingElement within an ObjectBindingPattern instead of just returning the
|
||||
// declaration the symbol (which is itself), we should try to get to the original type of the ObjectBindingPattern
|
||||
// and return the property declaration for the referenced property.
|
||||
// For example:
|
||||
// import('./foo').then(({ b/*goto*/ar }) => undefined); => should get use to the declaration in file "./foo"
|
||||
//
|
||||
// function bar<T>(onfulfilled: (value: T) => void) { //....}
|
||||
// interface Test {
|
||||
// pr/*destination*/op1: number
|
||||
// }
|
||||
// bar<Test>(({pr/*goto*/op1})=>{});
|
||||
if (ts.isPropertyName(node) && ts.isBindingElement(node.parent) && ts.isObjectBindingPattern(node.parent.parent) &&
|
||||
(node === (node.parent.propertyName || node.parent.name))) {
|
||||
const type = checker.getTypeAtLocation(node.parent.parent);
|
||||
if (type) {
|
||||
const propSymbols = getPropertySymbolsFromType(type, node);
|
||||
if (propSymbols) {
|
||||
symbol = propSymbols[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If the current location we want to find its definition is in an object literal, try to get the contextual type for the
|
||||
// object literal, lookup the property symbol in the contextual type, and use this for goto-definition.
|
||||
// For example
|
||||
// interface Props{
|
||||
// /*first*/prop1: number
|
||||
// prop2: boolean
|
||||
// }
|
||||
// function Foo(arg: Props) {}
|
||||
// Foo( { pr/*1*/op1: 10, prop2: false })
|
||||
const element = getContainingObjectLiteralElement(node);
|
||||
if (element && checker.getContextualType(element.parent as ts.Expression)) {
|
||||
const propertySymbols = getPropertySymbolsFromContextualType(checker, element);
|
||||
if (propertySymbols) {
|
||||
symbol = propertySymbols[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (symbol && symbol.declarations) {
|
||||
return [symbol, importNode];
|
||||
}
|
||||
|
||||
return [null, null];
|
||||
}
|
||||
|
||||
/** Get the token whose text contains the position */
|
||||
function getTokenAtPosition(sourceFile: ts.SourceFile, position: number, allowPositionInLeadingTrivia: boolean, includeEndPosition: boolean): ts.Node {
|
||||
let current: ts.Node = sourceFile;
|
||||
outer: while (true) {
|
||||
// find the child that contains 'position'
|
||||
for (const child of current.getChildren()) {
|
||||
const start = allowPositionInLeadingTrivia ? child.getFullStart() : child.getStart(sourceFile, /*includeJsDoc*/ true);
|
||||
if (start > position) {
|
||||
// If this child begins after position, then all subsequent children will as well.
|
||||
break;
|
||||
}
|
||||
|
||||
const end = child.getEnd();
|
||||
if (position < end || (position === end && (child.kind === ts.SyntaxKind.EndOfFileToken || includeEndPosition))) {
|
||||
current = child;
|
||||
continue outer;
|
||||
}
|
||||
}
|
||||
|
||||
return current;
|
||||
}
|
||||
}
|
||||
|
||||
//#endregion
|
||||
73
build/lib/tslint/noStandaloneEditorRule.js
Normal file
73
build/lib/tslint/noStandaloneEditorRule.js
Normal file
@@ -0,0 +1,73 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
var __extends = (this && this.__extends) || (function () {
|
||||
var extendStatics = Object.setPrototypeOf ||
|
||||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
||||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
||||
return function (d, b) {
|
||||
extendStatics(d, b);
|
||||
function __() { this.constructor = d; }
|
||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var ts = require("typescript");
|
||||
var Lint = require("tslint");
|
||||
var path_1 = require("path");
|
||||
var Rule = /** @class */ (function (_super) {
|
||||
__extends(Rule, _super);
|
||||
function Rule() {
|
||||
return _super !== null && _super.apply(this, arguments) || this;
|
||||
}
|
||||
Rule.prototype.apply = function (sourceFile) {
|
||||
if (/vs(\/|\\)editor/.test(sourceFile.fileName)) {
|
||||
// the vs/editor folder is allowed to use the standalone editor
|
||||
return [];
|
||||
}
|
||||
return this.applyWithWalker(new NoStandaloneEditorRuleWalker(sourceFile, this.getOptions()));
|
||||
};
|
||||
return Rule;
|
||||
}(Lint.Rules.AbstractRule));
|
||||
exports.Rule = Rule;
|
||||
var NoStandaloneEditorRuleWalker = /** @class */ (function (_super) {
|
||||
__extends(NoStandaloneEditorRuleWalker, _super);
|
||||
function NoStandaloneEditorRuleWalker(file, opts) {
|
||||
return _super.call(this, file, opts) || this;
|
||||
}
|
||||
NoStandaloneEditorRuleWalker.prototype.visitImportEqualsDeclaration = function (node) {
|
||||
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
|
||||
this._validateImport(node.moduleReference.expression.getText(), node);
|
||||
}
|
||||
};
|
||||
NoStandaloneEditorRuleWalker.prototype.visitImportDeclaration = function (node) {
|
||||
this._validateImport(node.moduleSpecifier.getText(), node);
|
||||
};
|
||||
NoStandaloneEditorRuleWalker.prototype.visitCallExpression = function (node) {
|
||||
_super.prototype.visitCallExpression.call(this, node);
|
||||
// import('foo') statements inside the code
|
||||
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
|
||||
var path = node.arguments[0];
|
||||
this._validateImport(path.getText(), node);
|
||||
}
|
||||
};
|
||||
NoStandaloneEditorRuleWalker.prototype._validateImport = function (path, node) {
|
||||
// remove quotes
|
||||
path = path.slice(1, -1);
|
||||
// resolve relative paths
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(this.getSourceFile().fileName, path);
|
||||
}
|
||||
if (/vs(\/|\\)editor(\/|\\)standalone/.test(path)
|
||||
|| /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone/.test(path)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.api/.test(path)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.main/.test(path)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.worker/.test(path)) {
|
||||
// {{SQL CARBON EDIT}}
|
||||
//this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Not allowed to import standalone editor modules. See https://github.com/Microsoft/vscode/wiki/Code-Organization`));
|
||||
}
|
||||
};
|
||||
return NoStandaloneEditorRuleWalker;
|
||||
}(Lint.RuleWalker));
|
||||
66
build/lib/tslint/noStandaloneEditorRule.ts
Normal file
66
build/lib/tslint/noStandaloneEditorRule.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as ts from 'typescript';
|
||||
import * as Lint from 'tslint';
|
||||
import { join } from 'path';
|
||||
|
||||
export class Rule extends Lint.Rules.AbstractRule {
|
||||
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
|
||||
if (/vs(\/|\\)editor/.test(sourceFile.fileName)) {
|
||||
// the vs/editor folder is allowed to use the standalone editor
|
||||
return [];
|
||||
}
|
||||
return this.applyWithWalker(new NoStandaloneEditorRuleWalker(sourceFile, this.getOptions()));
|
||||
}
|
||||
}
|
||||
|
||||
class NoStandaloneEditorRuleWalker extends Lint.RuleWalker {
|
||||
|
||||
constructor(file: ts.SourceFile, opts: Lint.IOptions) {
|
||||
super(file, opts);
|
||||
}
|
||||
|
||||
protected visitImportEqualsDeclaration(node: ts.ImportEqualsDeclaration): void {
|
||||
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
|
||||
this._validateImport(node.moduleReference.expression.getText(), node);
|
||||
}
|
||||
}
|
||||
|
||||
protected visitImportDeclaration(node: ts.ImportDeclaration): void {
|
||||
this._validateImport(node.moduleSpecifier.getText(), node);
|
||||
}
|
||||
|
||||
protected visitCallExpression(node: ts.CallExpression): void {
|
||||
super.visitCallExpression(node);
|
||||
|
||||
// import('foo') statements inside the code
|
||||
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
|
||||
const [path] = node.arguments;
|
||||
this._validateImport(path.getText(), node);
|
||||
}
|
||||
}
|
||||
|
||||
private _validateImport(path: string, node: ts.Node): void {
|
||||
// remove quotes
|
||||
path = path.slice(1, -1);
|
||||
|
||||
// resolve relative paths
|
||||
if (path[0] === '.') {
|
||||
path = join(this.getSourceFile().fileName, path);
|
||||
}
|
||||
|
||||
if (
|
||||
/vs(\/|\\)editor(\/|\\)standalone/.test(path)
|
||||
|| /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone/.test(path)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.api/.test(path)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.main/.test(path)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.worker/.test(path)
|
||||
) {
|
||||
// {{SQL CARBON EDIT}}
|
||||
//this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Not allowed to import standalone editor modules. See https://github.com/Microsoft/vscode/wiki/Code-Organization`));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -14,7 +14,6 @@ var fs = require("fs");
|
||||
var _rimraf = require("rimraf");
|
||||
var git = require("./git");
|
||||
var VinylFile = require("vinyl");
|
||||
var cp = require("child_process");
|
||||
var NoCancellationToken = { isCancellationRequested: function () { return false; } };
|
||||
function incremental(streamProvider, initial, supportsCancellation) {
|
||||
var input = es.through();
|
||||
@@ -211,62 +210,6 @@ function filter(fn) {
|
||||
return result;
|
||||
}
|
||||
exports.filter = filter;
|
||||
function tagExists(tagName) {
|
||||
try {
|
||||
cp.execSync("git rev-parse " + tagName, { stdio: 'ignore' });
|
||||
return true;
|
||||
}
|
||||
catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Returns the version previous to the given version. Throws if a git tag for that version doesn't exist.
|
||||
* Given 1.17.2, return 1.17.1
|
||||
* 1.18.0 => 1.17.2. (or the highest 1.17.x)
|
||||
* 2.0.0 => 1.18.0 (or the highest 1.x)
|
||||
*/
|
||||
function getPreviousVersion(versionStr, _tagExists) {
|
||||
if (_tagExists === void 0) { _tagExists = tagExists; }
|
||||
function getLatestTagFromBase(semverArr, componentToTest) {
|
||||
var baseVersion = semverArr.join('.');
|
||||
if (!_tagExists(baseVersion)) {
|
||||
throw new Error('Failed to find git tag for base version, ' + baseVersion);
|
||||
}
|
||||
var goodTag;
|
||||
do {
|
||||
goodTag = semverArr.join('.');
|
||||
semverArr[componentToTest]++;
|
||||
} while (_tagExists(semverArr.join('.')));
|
||||
return goodTag;
|
||||
}
|
||||
var semverArr = versionStringToNumberArray(versionStr);
|
||||
if (semverArr[2] > 0) {
|
||||
semverArr[2]--;
|
||||
var previous = semverArr.join('.');
|
||||
if (!_tagExists(previous)) {
|
||||
throw new Error('Failed to find git tag for previous version, ' + previous);
|
||||
}
|
||||
return previous;
|
||||
}
|
||||
else if (semverArr[1] > 0) {
|
||||
semverArr[1]--;
|
||||
return getLatestTagFromBase(semverArr, 2);
|
||||
}
|
||||
else {
|
||||
semverArr[0]--;
|
||||
// Find 1.x.0 for latest x
|
||||
var latestMinorVersion = getLatestTagFromBase(semverArr, 1);
|
||||
// Find 1.x.y for latest y
|
||||
return getLatestTagFromBase(versionStringToNumberArray(latestMinorVersion), 2);
|
||||
}
|
||||
}
|
||||
exports.getPreviousVersion = getPreviousVersion;
|
||||
function versionStringToNumberArray(versionStr) {
|
||||
return versionStr
|
||||
.split('.')
|
||||
.map(function (s) { return parseInt(s); });
|
||||
}
|
||||
function versionStringToNumber(versionStr) {
|
||||
var semverRegex = /(\d+)\.(\d+)\.(\d+)/;
|
||||
var match = versionStr.match(semverRegex);
|
||||
|
||||
@@ -17,7 +17,6 @@ import * as git from './git';
|
||||
import * as VinylFile from 'vinyl';
|
||||
import { ThroughStream } from 'through';
|
||||
import * as sm from 'source-map';
|
||||
import * as cp from 'child_process';
|
||||
|
||||
export interface ICancellationToken {
|
||||
isCancellationRequested(): boolean;
|
||||
@@ -271,66 +270,6 @@ export function filter(fn: (data: any) => boolean): FilterStream {
|
||||
return result;
|
||||
}
|
||||
|
||||
function tagExists(tagName: string): boolean {
|
||||
try {
|
||||
cp.execSync(`git rev-parse ${tagName}`, { stdio: 'ignore' });
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the version previous to the given version. Throws if a git tag for that version doesn't exist.
|
||||
* Given 1.17.2, return 1.17.1
|
||||
* 1.18.0 => 1.17.2. (or the highest 1.17.x)
|
||||
* 2.0.0 => 1.18.0 (or the highest 1.x)
|
||||
*/
|
||||
export function getPreviousVersion(versionStr: string, _tagExists = tagExists) {
|
||||
function getLatestTagFromBase(semverArr: number[], componentToTest: number): string {
|
||||
const baseVersion = semverArr.join('.');
|
||||
if (!_tagExists(baseVersion)) {
|
||||
throw new Error('Failed to find git tag for base version, ' + baseVersion);
|
||||
}
|
||||
|
||||
let goodTag;
|
||||
do {
|
||||
goodTag = semverArr.join('.');
|
||||
semverArr[componentToTest]++;
|
||||
} while (_tagExists(semverArr.join('.')));
|
||||
|
||||
return goodTag;
|
||||
}
|
||||
|
||||
const semverArr = versionStringToNumberArray(versionStr);
|
||||
if (semverArr[2] > 0) {
|
||||
semverArr[2]--;
|
||||
const previous = semverArr.join('.');
|
||||
if (!_tagExists(previous)) {
|
||||
throw new Error('Failed to find git tag for previous version, ' + previous);
|
||||
}
|
||||
|
||||
return previous;
|
||||
} else if (semverArr[1] > 0) {
|
||||
semverArr[1]--;
|
||||
return getLatestTagFromBase(semverArr, 2);
|
||||
} else {
|
||||
semverArr[0]--;
|
||||
|
||||
// Find 1.x.0 for latest x
|
||||
const latestMinorVersion = getLatestTagFromBase(semverArr, 1);
|
||||
|
||||
// Find 1.x.y for latest y
|
||||
return getLatestTagFromBase(versionStringToNumberArray(latestMinorVersion), 2);
|
||||
}
|
||||
}
|
||||
|
||||
function versionStringToNumberArray(versionStr: string): number[] {
|
||||
return versionStr
|
||||
.split('.')
|
||||
.map(s => parseInt(s));
|
||||
}
|
||||
|
||||
export function versionStringToNumber(versionStr: string) {
|
||||
const semverRegex = /(\d+)\.(\d+)\.(\d+)/;
|
||||
const match = versionStr.match(semverRegex);
|
||||
|
||||
@@ -134,7 +134,25 @@ function getTopLevelDeclaration(sourceFile, typeName) {
|
||||
function getNodeText(sourceFile, node) {
|
||||
return sourceFile.getFullText().substring(node.pos, node.end);
|
||||
}
|
||||
function getMassagedTopLevelDeclarationText(sourceFile, declaration) {
|
||||
function hasModifier(modifiers, kind) {
|
||||
if (modifiers) {
|
||||
for (var i = 0; i < modifiers.length; i++) {
|
||||
var mod = modifiers[i];
|
||||
if (mod.kind === kind) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
function isStatic(member) {
|
||||
return hasModifier(member.modifiers, ts.SyntaxKind.StaticKeyword);
|
||||
}
|
||||
function isDefaultExport(declaration) {
|
||||
return (hasModifier(declaration.modifiers, ts.SyntaxKind.DefaultKeyword)
|
||||
&& hasModifier(declaration.modifiers, ts.SyntaxKind.ExportKeyword));
|
||||
}
|
||||
function getMassagedTopLevelDeclarationText(sourceFile, declaration, importName, usage) {
|
||||
var result = getNodeText(sourceFile, declaration);
|
||||
// if (result.indexOf('MonacoWorker') >= 0) {
|
||||
// console.log('here!');
|
||||
@@ -142,6 +160,18 @@ function getMassagedTopLevelDeclarationText(sourceFile, declaration) {
|
||||
// }
|
||||
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
|
||||
var interfaceDeclaration = declaration;
|
||||
var staticTypeName_1 = (isDefaultExport(interfaceDeclaration)
|
||||
? importName + ".default"
|
||||
: importName + "." + declaration.name.text);
|
||||
var instanceTypeName_1 = staticTypeName_1;
|
||||
var typeParametersCnt = (interfaceDeclaration.typeParameters ? interfaceDeclaration.typeParameters.length : 0);
|
||||
if (typeParametersCnt > 0) {
|
||||
var arr = [];
|
||||
for (var i = 0; i < typeParametersCnt; i++) {
|
||||
arr.push('any');
|
||||
}
|
||||
instanceTypeName_1 = instanceTypeName_1 + "<" + arr.join(',') + ">";
|
||||
}
|
||||
var members = interfaceDeclaration.members;
|
||||
members.forEach(function (member) {
|
||||
try {
|
||||
@@ -151,6 +181,15 @@ function getMassagedTopLevelDeclarationText(sourceFile, declaration) {
|
||||
result = result.replace(memberText, '');
|
||||
// console.log('AFTER: ', result);
|
||||
}
|
||||
else {
|
||||
var memberName = member.name.text;
|
||||
if (isStatic(member)) {
|
||||
usage.push("a = " + staticTypeName_1 + "." + memberName + ";");
|
||||
}
|
||||
else {
|
||||
usage.push("a = (<" + instanceTypeName_1 + ">b)." + memberName + ";");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
// life..
|
||||
@@ -211,6 +250,16 @@ function generateDeclarationFile(out, inputFiles, recipe) {
|
||||
var endl = /\r\n/.test(recipe) ? '\r\n' : '\n';
|
||||
var lines = recipe.split(endl);
|
||||
var result = [];
|
||||
var usageCounter = 0;
|
||||
var usageImports = [];
|
||||
var usage = [];
|
||||
usage.push("var a;");
|
||||
usage.push("var b;");
|
||||
var generateUsageImport = function (moduleId) {
|
||||
var importName = 'm' + (++usageCounter);
|
||||
usageImports.push("import * as " + importName + " from '" + moduleId.replace(/\.d\.ts$/, '') + "';");
|
||||
return importName;
|
||||
};
|
||||
lines.forEach(function (line) {
|
||||
var m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
|
||||
if (m1) {
|
||||
@@ -220,6 +269,7 @@ function generateDeclarationFile(out, inputFiles, recipe) {
|
||||
if (!sourceFile_1) {
|
||||
return;
|
||||
}
|
||||
var importName_1 = generateUsageImport(moduleId);
|
||||
var replacer_1 = createReplacer(m1[2]);
|
||||
var typeNames = m1[3].split(/,/);
|
||||
typeNames.forEach(function (typeName) {
|
||||
@@ -232,7 +282,7 @@ function generateDeclarationFile(out, inputFiles, recipe) {
|
||||
logErr('Cannot find type ' + typeName);
|
||||
return;
|
||||
}
|
||||
result.push(replacer_1(getMassagedTopLevelDeclarationText(sourceFile_1, declaration)));
|
||||
result.push(replacer_1(getMassagedTopLevelDeclarationText(sourceFile_1, declaration, importName_1, usage)));
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -244,6 +294,7 @@ function generateDeclarationFile(out, inputFiles, recipe) {
|
||||
if (!sourceFile_2) {
|
||||
return;
|
||||
}
|
||||
var importName_2 = generateUsageImport(moduleId);
|
||||
var replacer_2 = createReplacer(m2[2]);
|
||||
var typeNames = m2[3].split(/,/);
|
||||
var typesToExcludeMap_1 = {};
|
||||
@@ -271,7 +322,7 @@ function generateDeclarationFile(out, inputFiles, recipe) {
|
||||
}
|
||||
}
|
||||
}
|
||||
result.push(replacer_2(getMassagedTopLevelDeclarationText(sourceFile_2, declaration)));
|
||||
result.push(replacer_2(getMassagedTopLevelDeclarationText(sourceFile_2, declaration, importName_2, usage)));
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -282,9 +333,12 @@ function generateDeclarationFile(out, inputFiles, recipe) {
|
||||
resultTxt = resultTxt.replace(/\bEvent</g, 'IEvent<');
|
||||
resultTxt = resultTxt.replace(/\bTPromise</g, 'Promise<');
|
||||
resultTxt = format(resultTxt);
|
||||
return resultTxt;
|
||||
return [
|
||||
resultTxt,
|
||||
usageImports.join('\n') + "\n\n" + usage.join('\n')
|
||||
];
|
||||
}
|
||||
function getFilesToWatch(out) {
|
||||
function getIncludesInRecipe() {
|
||||
var recipe = fs.readFileSync(RECIPE_PATH).toString();
|
||||
var lines = recipe.split(/\r\n|\n|\r/);
|
||||
var result = [];
|
||||
@@ -292,24 +346,27 @@ function getFilesToWatch(out) {
|
||||
var m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
|
||||
if (m1) {
|
||||
var moduleId = m1[1];
|
||||
result.push(moduleIdToPath(out, moduleId));
|
||||
result.push(moduleId);
|
||||
return;
|
||||
}
|
||||
var m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
|
||||
if (m2) {
|
||||
var moduleId = m2[1];
|
||||
result.push(moduleIdToPath(out, moduleId));
|
||||
result.push(moduleId);
|
||||
return;
|
||||
}
|
||||
});
|
||||
return result;
|
||||
}
|
||||
function getFilesToWatch(out) {
|
||||
return getIncludesInRecipe().map(function (moduleId) { return moduleIdToPath(out, moduleId); });
|
||||
}
|
||||
exports.getFilesToWatch = getFilesToWatch;
|
||||
function run(out, inputFiles) {
|
||||
log('Starting monaco.d.ts generation');
|
||||
SOURCE_FILE_MAP = {};
|
||||
var recipe = fs.readFileSync(RECIPE_PATH).toString();
|
||||
var result = generateDeclarationFile(out, inputFiles, recipe);
|
||||
var _a = generateDeclarationFile(out, inputFiles, recipe), result = _a[0], usageContent = _a[1];
|
||||
var currentContent = fs.readFileSync(DECLARATION_PATH).toString();
|
||||
log('Finished monaco.d.ts generation');
|
||||
var one = currentContent.replace(/\r\n/gm, '\n');
|
||||
@@ -317,6 +374,7 @@ function run(out, inputFiles) {
|
||||
var isTheSame = one === other;
|
||||
return {
|
||||
content: result,
|
||||
usageContent: usageContent,
|
||||
filePath: DECLARATION_PATH,
|
||||
isTheSame: isTheSame
|
||||
};
|
||||
@@ -326,3 +384,78 @@ function complainErrors() {
|
||||
logErr('Not running monaco.d.ts generation due to compile errors');
|
||||
}
|
||||
exports.complainErrors = complainErrors;
|
||||
var TypeScriptLanguageServiceHost = /** @class */ (function () {
|
||||
function TypeScriptLanguageServiceHost(libs, files, compilerOptions) {
|
||||
this._libs = libs;
|
||||
this._files = files;
|
||||
this._compilerOptions = compilerOptions;
|
||||
}
|
||||
// --- language service host ---------------
|
||||
TypeScriptLanguageServiceHost.prototype.getCompilationSettings = function () {
|
||||
return this._compilerOptions;
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getScriptFileNames = function () {
|
||||
return ([]
|
||||
.concat(Object.keys(this._libs))
|
||||
.concat(Object.keys(this._files)));
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getScriptVersion = function (fileName) {
|
||||
return '1';
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getProjectVersion = function () {
|
||||
return '1';
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getScriptSnapshot = function (fileName) {
|
||||
if (this._files.hasOwnProperty(fileName)) {
|
||||
return ts.ScriptSnapshot.fromString(this._files[fileName]);
|
||||
}
|
||||
else if (this._libs.hasOwnProperty(fileName)) {
|
||||
return ts.ScriptSnapshot.fromString(this._libs[fileName]);
|
||||
}
|
||||
else {
|
||||
return ts.ScriptSnapshot.fromString('');
|
||||
}
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getScriptKind = function (fileName) {
|
||||
return ts.ScriptKind.TS;
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getCurrentDirectory = function () {
|
||||
return '';
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getDefaultLibFileName = function (options) {
|
||||
return 'defaultLib:es5';
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.isDefaultLibFileName = function (fileName) {
|
||||
return fileName === this.getDefaultLibFileName(this._compilerOptions);
|
||||
};
|
||||
return TypeScriptLanguageServiceHost;
|
||||
}());
|
||||
function execute() {
|
||||
var OUTPUT_FILES = {};
|
||||
var SRC_FILES = {};
|
||||
var SRC_FILE_TO_EXPECTED_NAME = {};
|
||||
getIncludesInRecipe().forEach(function (moduleId) {
|
||||
if (/\.d\.ts$/.test(moduleId)) {
|
||||
var fileName_1 = path.join(SRC, moduleId);
|
||||
OUTPUT_FILES[moduleIdToPath('src', moduleId)] = fs.readFileSync(fileName_1).toString();
|
||||
return;
|
||||
}
|
||||
var fileName = path.join(SRC, moduleId) + '.ts';
|
||||
SRC_FILES[fileName] = fs.readFileSync(fileName).toString();
|
||||
SRC_FILE_TO_EXPECTED_NAME[fileName] = moduleIdToPath('src', moduleId);
|
||||
});
|
||||
var languageService = ts.createLanguageService(new TypeScriptLanguageServiceHost({}, SRC_FILES, {}));
|
||||
var t1 = Date.now();
|
||||
Object.keys(SRC_FILES).forEach(function (fileName) {
|
||||
var t = Date.now();
|
||||
var emitOutput = languageService.getEmitOutput(fileName, true);
|
||||
OUTPUT_FILES[SRC_FILE_TO_EXPECTED_NAME[fileName]] = emitOutput.outputFiles[0].text;
|
||||
// console.log(`Generating .d.ts for ${fileName} took ${Date.now() - t} ms`);
|
||||
});
|
||||
console.log("Generating .d.ts took " + (Date.now() - t1) + " ms");
|
||||
// console.log(result.filePath);
|
||||
// fs.writeFileSync(result.filePath, result.content.replace(/\r\n/gm, '\n'));
|
||||
// fs.writeFileSync(path.join(SRC, 'user.ts'), result.usageContent.replace(/\r\n/gm, '\n'));
|
||||
return run('src', OUTPUT_FILES);
|
||||
}
|
||||
exports.execute = execute;
|
||||
|
||||
@@ -153,8 +153,30 @@ function getNodeText(sourceFile: ts.SourceFile, node: { pos: number; end: number
|
||||
return sourceFile.getFullText().substring(node.pos, node.end);
|
||||
}
|
||||
|
||||
function hasModifier(modifiers: ts.NodeArray<ts.Modifier>, kind: ts.SyntaxKind): boolean {
|
||||
if (modifiers) {
|
||||
for (let i = 0; i < modifiers.length; i++) {
|
||||
let mod = modifiers[i];
|
||||
if (mod.kind === kind) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declaration: TSTopLevelDeclare): string {
|
||||
function isStatic(member: ts.ClassElement | ts.TypeElement): boolean {
|
||||
return hasModifier(member.modifiers, ts.SyntaxKind.StaticKeyword);
|
||||
}
|
||||
|
||||
function isDefaultExport(declaration: ts.InterfaceDeclaration | ts.ClassDeclaration): boolean {
|
||||
return (
|
||||
hasModifier(declaration.modifiers, ts.SyntaxKind.DefaultKeyword)
|
||||
&& hasModifier(declaration.modifiers, ts.SyntaxKind.ExportKeyword)
|
||||
);
|
||||
}
|
||||
|
||||
function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declaration: TSTopLevelDeclare, importName: string, usage: string[]): string {
|
||||
let result = getNodeText(sourceFile, declaration);
|
||||
// if (result.indexOf('MonacoWorker') >= 0) {
|
||||
// console.log('here!');
|
||||
@@ -163,7 +185,23 @@ function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declarati
|
||||
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
|
||||
let interfaceDeclaration = <ts.InterfaceDeclaration | ts.ClassDeclaration>declaration;
|
||||
|
||||
let members: ts.NodeArray<ts.Node> = interfaceDeclaration.members;
|
||||
const staticTypeName = (
|
||||
isDefaultExport(interfaceDeclaration)
|
||||
? `${importName}.default`
|
||||
: `${importName}.${declaration.name.text}`
|
||||
);
|
||||
|
||||
let instanceTypeName = staticTypeName;
|
||||
const typeParametersCnt = (interfaceDeclaration.typeParameters ? interfaceDeclaration.typeParameters.length : 0);
|
||||
if (typeParametersCnt > 0) {
|
||||
let arr: string[] = [];
|
||||
for (let i = 0; i < typeParametersCnt; i++) {
|
||||
arr.push('any');
|
||||
}
|
||||
instanceTypeName = `${instanceTypeName}<${arr.join(',')}>`;
|
||||
}
|
||||
|
||||
const members: ts.NodeArray<ts.ClassElement | ts.TypeElement> = interfaceDeclaration.members;
|
||||
members.forEach((member) => {
|
||||
try {
|
||||
let memberText = getNodeText(sourceFile, member);
|
||||
@@ -171,6 +209,13 @@ function getMassagedTopLevelDeclarationText(sourceFile: ts.SourceFile, declarati
|
||||
// console.log('BEFORE: ', result);
|
||||
result = result.replace(memberText, '');
|
||||
// console.log('AFTER: ', result);
|
||||
} else {
|
||||
const memberName = (<ts.Identifier | ts.StringLiteral>member.name).text;
|
||||
if (isStatic(member)) {
|
||||
usage.push(`a = ${staticTypeName}.${memberName};`);
|
||||
} else {
|
||||
usage.push(`a = (<${instanceTypeName}>b).${memberName};`);
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
// life..
|
||||
@@ -237,11 +282,24 @@ function createReplacer(data: string): (str: string) => string {
|
||||
};
|
||||
}
|
||||
|
||||
function generateDeclarationFile(out: string, inputFiles: { [file: string]: string; }, recipe: string): string {
|
||||
function generateDeclarationFile(out: string, inputFiles: { [file: string]: string; }, recipe: string): [string, string] {
|
||||
const endl = /\r\n/.test(recipe) ? '\r\n' : '\n';
|
||||
|
||||
let lines = recipe.split(endl);
|
||||
let result = [];
|
||||
let result: string[] = [];
|
||||
|
||||
let usageCounter = 0;
|
||||
let usageImports: string[] = [];
|
||||
let usage: string[] = [];
|
||||
|
||||
usage.push(`var a;`);
|
||||
usage.push(`var b;`);
|
||||
|
||||
const generateUsageImport = (moduleId: string) => {
|
||||
let importName = 'm' + (++usageCounter);
|
||||
usageImports.push(`import * as ${importName} from '${moduleId.replace(/\.d\.ts$/, '')}';`);
|
||||
return importName;
|
||||
};
|
||||
|
||||
lines.forEach(line => {
|
||||
|
||||
@@ -254,6 +312,8 @@ function generateDeclarationFile(out: string, inputFiles: { [file: string]: stri
|
||||
return;
|
||||
}
|
||||
|
||||
const importName = generateUsageImport(moduleId);
|
||||
|
||||
let replacer = createReplacer(m1[2]);
|
||||
|
||||
let typeNames = m1[3].split(/,/);
|
||||
@@ -267,7 +327,7 @@ function generateDeclarationFile(out: string, inputFiles: { [file: string]: stri
|
||||
logErr('Cannot find type ' + typeName);
|
||||
return;
|
||||
}
|
||||
result.push(replacer(getMassagedTopLevelDeclarationText(sourceFile, declaration)));
|
||||
result.push(replacer(getMassagedTopLevelDeclarationText(sourceFile, declaration, importName, usage)));
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -281,6 +341,8 @@ function generateDeclarationFile(out: string, inputFiles: { [file: string]: stri
|
||||
return;
|
||||
}
|
||||
|
||||
const importName = generateUsageImport(moduleId);
|
||||
|
||||
let replacer = createReplacer(m2[2]);
|
||||
|
||||
let typeNames = m2[3].split(/,/);
|
||||
@@ -309,7 +371,7 @@ function generateDeclarationFile(out: string, inputFiles: { [file: string]: stri
|
||||
}
|
||||
}
|
||||
}
|
||||
result.push(replacer(getMassagedTopLevelDeclarationText(sourceFile, declaration)));
|
||||
result.push(replacer(getMassagedTopLevelDeclarationText(sourceFile, declaration, importName, usage)));
|
||||
});
|
||||
return;
|
||||
}
|
||||
@@ -324,10 +386,13 @@ function generateDeclarationFile(out: string, inputFiles: { [file: string]: stri
|
||||
|
||||
resultTxt = format(resultTxt);
|
||||
|
||||
return resultTxt;
|
||||
return [
|
||||
resultTxt,
|
||||
`${usageImports.join('\n')}\n\n${usage.join('\n')}`
|
||||
];
|
||||
}
|
||||
|
||||
export function getFilesToWatch(out: string): string[] {
|
||||
function getIncludesInRecipe(): string[] {
|
||||
let recipe = fs.readFileSync(RECIPE_PATH).toString();
|
||||
let lines = recipe.split(/\r\n|\n|\r/);
|
||||
let result = [];
|
||||
@@ -337,14 +402,14 @@ export function getFilesToWatch(out: string): string[] {
|
||||
let m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
|
||||
if (m1) {
|
||||
let moduleId = m1[1];
|
||||
result.push(moduleIdToPath(out, moduleId));
|
||||
result.push(moduleId);
|
||||
return;
|
||||
}
|
||||
|
||||
let m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
|
||||
if (m2) {
|
||||
let moduleId = m2[1];
|
||||
result.push(moduleIdToPath(out, moduleId));
|
||||
result.push(moduleId);
|
||||
return;
|
||||
}
|
||||
});
|
||||
@@ -352,8 +417,13 @@ export function getFilesToWatch(out: string): string[] {
|
||||
return result;
|
||||
}
|
||||
|
||||
export function getFilesToWatch(out: string): string[] {
|
||||
return getIncludesInRecipe().map((moduleId) => moduleIdToPath(out, moduleId));
|
||||
}
|
||||
|
||||
export interface IMonacoDeclarationResult {
|
||||
content: string;
|
||||
usageContent: string;
|
||||
filePath: string;
|
||||
isTheSame: boolean;
|
||||
}
|
||||
@@ -363,7 +433,7 @@ export function run(out: string, inputFiles: { [file: string]: string; }): IMona
|
||||
SOURCE_FILE_MAP = {};
|
||||
|
||||
let recipe = fs.readFileSync(RECIPE_PATH).toString();
|
||||
let result = generateDeclarationFile(out, inputFiles, recipe);
|
||||
let [result, usageContent] = generateDeclarationFile(out, inputFiles, recipe);
|
||||
|
||||
let currentContent = fs.readFileSync(DECLARATION_PATH).toString();
|
||||
log('Finished monaco.d.ts generation');
|
||||
@@ -374,6 +444,7 @@ export function run(out: string, inputFiles: { [file: string]: string; }): IMona
|
||||
|
||||
return {
|
||||
content: result,
|
||||
usageContent: usageContent,
|
||||
filePath: DECLARATION_PATH,
|
||||
isTheSame
|
||||
};
|
||||
@@ -382,3 +453,96 @@ export function run(out: string, inputFiles: { [file: string]: string; }): IMona
|
||||
export function complainErrors() {
|
||||
logErr('Not running monaco.d.ts generation due to compile errors');
|
||||
}
|
||||
|
||||
|
||||
|
||||
interface ILibMap { [libName: string]: string; }
|
||||
interface IFileMap { [fileName: string]: string; }
|
||||
|
||||
class TypeScriptLanguageServiceHost implements ts.LanguageServiceHost {
|
||||
|
||||
private readonly _libs: ILibMap;
|
||||
private readonly _files: IFileMap;
|
||||
private readonly _compilerOptions: ts.CompilerOptions;
|
||||
|
||||
constructor(libs: ILibMap, files: IFileMap, compilerOptions: ts.CompilerOptions) {
|
||||
this._libs = libs;
|
||||
this._files = files;
|
||||
this._compilerOptions = compilerOptions;
|
||||
}
|
||||
|
||||
// --- language service host ---------------
|
||||
|
||||
getCompilationSettings(): ts.CompilerOptions {
|
||||
return this._compilerOptions;
|
||||
}
|
||||
getScriptFileNames(): string[] {
|
||||
return (
|
||||
[]
|
||||
.concat(Object.keys(this._libs))
|
||||
.concat(Object.keys(this._files))
|
||||
);
|
||||
}
|
||||
getScriptVersion(fileName: string): string {
|
||||
return '1';
|
||||
}
|
||||
getProjectVersion(): string {
|
||||
return '1';
|
||||
}
|
||||
getScriptSnapshot(fileName: string): ts.IScriptSnapshot {
|
||||
if (this._files.hasOwnProperty(fileName)) {
|
||||
return ts.ScriptSnapshot.fromString(this._files[fileName]);
|
||||
} else if (this._libs.hasOwnProperty(fileName)) {
|
||||
return ts.ScriptSnapshot.fromString(this._libs[fileName]);
|
||||
} else {
|
||||
return ts.ScriptSnapshot.fromString('');
|
||||
}
|
||||
}
|
||||
getScriptKind(fileName: string): ts.ScriptKind {
|
||||
return ts.ScriptKind.TS;
|
||||
}
|
||||
getCurrentDirectory(): string {
|
||||
return '';
|
||||
}
|
||||
getDefaultLibFileName(options: ts.CompilerOptions): string {
|
||||
return 'defaultLib:es5';
|
||||
}
|
||||
isDefaultLibFileName(fileName: string): boolean {
|
||||
return fileName === this.getDefaultLibFileName(this._compilerOptions);
|
||||
}
|
||||
}
|
||||
|
||||
export function execute(): IMonacoDeclarationResult {
|
||||
|
||||
const OUTPUT_FILES: { [file: string]: string; } = {};
|
||||
const SRC_FILES: IFileMap = {};
|
||||
const SRC_FILE_TO_EXPECTED_NAME: { [filename: string]: string; } = {};
|
||||
getIncludesInRecipe().forEach((moduleId) => {
|
||||
if (/\.d\.ts$/.test(moduleId)) {
|
||||
let fileName = path.join(SRC, moduleId);
|
||||
OUTPUT_FILES[moduleIdToPath('src', moduleId)] = fs.readFileSync(fileName).toString();
|
||||
return;
|
||||
}
|
||||
|
||||
let fileName = path.join(SRC, moduleId) + '.ts';
|
||||
SRC_FILES[fileName] = fs.readFileSync(fileName).toString();
|
||||
SRC_FILE_TO_EXPECTED_NAME[fileName] = moduleIdToPath('src', moduleId);
|
||||
});
|
||||
|
||||
const languageService = ts.createLanguageService(new TypeScriptLanguageServiceHost({}, SRC_FILES, {}));
|
||||
|
||||
var t1 = Date.now();
|
||||
Object.keys(SRC_FILES).forEach((fileName) => {
|
||||
var t = Date.now();
|
||||
const emitOutput = languageService.getEmitOutput(fileName, true);
|
||||
OUTPUT_FILES[SRC_FILE_TO_EXPECTED_NAME[fileName]] = emitOutput.outputFiles[0].text;
|
||||
// console.log(`Generating .d.ts for ${fileName} took ${Date.now() - t} ms`);
|
||||
});
|
||||
console.log(`Generating .d.ts took ${Date.now() - t1} ms`);
|
||||
|
||||
// console.log(result.filePath);
|
||||
// fs.writeFileSync(result.filePath, result.content.replace(/\r\n/gm, '\n'));
|
||||
// fs.writeFileSync(path.join(SRC, 'user.ts'), result.usageContent.replace(/\r\n/gm, '\n'));
|
||||
|
||||
return run('src', OUTPUT_FILES);
|
||||
}
|
||||
|
||||
@@ -32,6 +32,10 @@ declare namespace monaco {
|
||||
Error = 3,
|
||||
}
|
||||
|
||||
export enum MarkerTag {
|
||||
Unnecessary = 1,
|
||||
}
|
||||
|
||||
export enum MarkerSeverity {
|
||||
Hint = 1,
|
||||
Info = 2,
|
||||
|
||||
82
build/monaco/monaco.usage.recipe
Normal file
82
build/monaco/monaco.usage.recipe
Normal file
@@ -0,0 +1,82 @@
|
||||
|
||||
// This file is adding references to various symbols which should not be removed via tree shaking
|
||||
|
||||
import { ServiceIdentifier } from 'vs/platform/instantiation/common/instantiation';
|
||||
import { IContextViewService } from 'vs/platform/contextview/browser/contextView';
|
||||
import { IHighlight } from 'vs/base/parts/quickopen/browser/quickOpenModel';
|
||||
import { IWorkspaceContextService } from 'vs/platform/workspace/common/workspace';
|
||||
import { IEnvironmentService } from 'vs/platform/environment/common/environment';
|
||||
import { CountBadge } from 'vs/base/browser/ui/countBadge/countBadge';
|
||||
import { SimpleWorkerClient, create as create1 } from 'vs/base/common/worker/simpleWorker';
|
||||
import { create as create2 } from 'vs/editor/common/services/editorSimpleWorker';
|
||||
import { QuickOpenWidget } from 'vs/base/parts/quickopen/browser/quickOpenWidget';
|
||||
import { SyncDescriptor0, SyncDescriptor1, SyncDescriptor2, SyncDescriptor3, SyncDescriptor4, SyncDescriptor5, SyncDescriptor6, SyncDescriptor7, SyncDescriptor8 } from 'vs/platform/instantiation/common/descriptors';
|
||||
import { PolyfillPromise } from 'vs/base/common/winjs.polyfill.promise';
|
||||
import { DiffNavigator } from 'vs/editor/browser/widget/diffNavigator';
|
||||
import * as editorAPI from 'vs/editor/editor.api';
|
||||
|
||||
(function () {
|
||||
var a: any;
|
||||
var b: any;
|
||||
a = (<IContextViewService>b).layout; // IContextViewProvider
|
||||
a = (<IWorkspaceContextService>b).getWorkspaceFolder; // IWorkspaceFolderProvider
|
||||
a = (<IWorkspaceContextService>b).getWorkspace; // IWorkspaceFolderProvider
|
||||
a = (<CountBadge>b).style; // IThemable
|
||||
a = (<QuickOpenWidget>b).style; // IThemable
|
||||
a = (<IEnvironmentService>b).userHome; // IUserHomeProvider
|
||||
a = (<DiffNavigator>b).previous; // IDiffNavigator
|
||||
a = (<ServiceIdentifier<any>>b).type;
|
||||
a = (<IHighlight>b).start;
|
||||
a = (<IHighlight>b).end;
|
||||
a = (<SimpleWorkerClient<any>>b).getProxyObject; // IWorkerClient
|
||||
a = create1;
|
||||
a = create2;
|
||||
|
||||
// promise polyfill
|
||||
a = PolyfillPromise.all;
|
||||
a = PolyfillPromise.race;
|
||||
a = PolyfillPromise.resolve;
|
||||
a = PolyfillPromise.reject;
|
||||
a = (<PolyfillPromise>b).then;
|
||||
a = (<PolyfillPromise>b).catch;
|
||||
|
||||
// injection madness
|
||||
a = (<SyncDescriptor0<any>>b).ctor;
|
||||
a = (<SyncDescriptor0<any>>b).bind;
|
||||
a = (<SyncDescriptor1<any, any>>b).ctor;
|
||||
a = (<SyncDescriptor1<any, any>>b).bind;
|
||||
a = (<SyncDescriptor1<any, any>>b).ctor;
|
||||
a = (<SyncDescriptor1<any, any>>b).bind;
|
||||
a = (<SyncDescriptor2<any, any, any>>b).ctor;
|
||||
a = (<SyncDescriptor2<any, any, any>>b).bind;
|
||||
a = (<SyncDescriptor3<any, any, any, any>>b).ctor;
|
||||
a = (<SyncDescriptor3<any, any, any, any>>b).bind;
|
||||
a = (<SyncDescriptor4<any, any, any, any, any>>b).ctor;
|
||||
a = (<SyncDescriptor4<any, any, any, any, any>>b).bind;
|
||||
a = (<SyncDescriptor5<any, any, any, any, any, any>>b).ctor;
|
||||
a = (<SyncDescriptor5<any, any, any, any, any, any>>b).bind;
|
||||
a = (<SyncDescriptor6<any, any, any, any, any, any, any>>b).ctor;
|
||||
a = (<SyncDescriptor6<any, any, any, any, any, any, any>>b).bind;
|
||||
a = (<SyncDescriptor7<any, any, any, any, any, any, any, any>>b).ctor;
|
||||
a = (<SyncDescriptor7<any, any, any, any, any, any, any, any>>b).bind;
|
||||
a = (<SyncDescriptor8<any, any, any, any, any, any, any, any, any>>b).ctor;
|
||||
a = (<SyncDescriptor8<any, any, any, any, any, any, any, any, any>>b).bind;
|
||||
|
||||
// exported API
|
||||
a = editorAPI.CancellationTokenSource;
|
||||
a = editorAPI.Emitter;
|
||||
a = editorAPI.KeyCode;
|
||||
a = editorAPI.KeyMod;
|
||||
a = editorAPI.Position;
|
||||
a = editorAPI.Range;
|
||||
a = editorAPI.Selection;
|
||||
a = editorAPI.SelectionDirection;
|
||||
a = editorAPI.Severity;
|
||||
a = editorAPI.MarkerSeverity;
|
||||
a = editorAPI.MarkerTag;
|
||||
a = editorAPI.Promise;
|
||||
a = editorAPI.Uri;
|
||||
a = editorAPI.Token;
|
||||
a = editorAPI.editor;
|
||||
a = editorAPI.languages;
|
||||
})();
|
||||
@@ -20,7 +20,6 @@ function yarnInstall(location, opts) {
|
||||
}
|
||||
}
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
yarnInstall('extensions'); // node modules shared by all extensions
|
||||
|
||||
const allExtensionFolders = fs.readdirSync('extensions');
|
||||
|
||||
@@ -83,7 +83,7 @@ exports.update = function (repoId, repoPath, dest, modifyGrammar, version = 'mas
|
||||
grammar = plist.parse(content);
|
||||
} else if (ext === '.cson') {
|
||||
grammar = cson.parse(content);
|
||||
} else if (ext === '.json') {
|
||||
} else if (ext === '.json' || ext === '.JSON-tmLanguage') {
|
||||
grammar = JSON.parse(content);
|
||||
} else {
|
||||
return Promise.reject(new Error('Unknown file extension: ' + ext));
|
||||
|
||||
@@ -43,7 +43,12 @@ function update(idOrPath) {
|
||||
let apiToken = process.env.TRANSIFEX_API_TOKEN;
|
||||
let languageId = localization.transifexId || localization.languageId;
|
||||
let translationDataFolder = path.join(locExtFolder, 'translations');
|
||||
|
||||
if (languageId === "zh-cn") {
|
||||
languageId = "zh-hans";
|
||||
}
|
||||
if (languageId === "zh-tw") {
|
||||
languageId = "zh-hant";
|
||||
}
|
||||
if (fs.existsSync(translationDataFolder) && fs.existsSync(path.join(translationDataFolder, 'main.i18n.json'))) {
|
||||
console.log('Clearing \'' + translationDataFolder + '\'...');
|
||||
rimraf.sync(translationDataFolder);
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"fs-extra-promise": "^1.0.1",
|
||||
"mime": "^1.3.4",
|
||||
"minimist": "^1.2.0",
|
||||
"typescript": "2.8.1",
|
||||
"typescript": "2.9.2",
|
||||
"vscode": "^1.0.1",
|
||||
"xml2js": "^0.4.17",
|
||||
"github-releases": "^0.4.1",
|
||||
|
||||
@@ -1,43 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# set agent specific npm cache
|
||||
if [ -n "$AGENT_WORKFOLDER" ]
|
||||
then
|
||||
export npm_config_cache="$AGENT_WORKFOLDER/npm-cache"
|
||||
echo "Using npm cache: $npm_config_cache"
|
||||
|
||||
export YARN_CACHE_FOLDER="$AGENT_WORKFOLDER/yarn-cache"
|
||||
echo "Using yarn cache: $YARN_CACHE_FOLDER"
|
||||
fi
|
||||
|
||||
SUMMARY="Task;Duration"$'\n'
|
||||
step() {
|
||||
START=$SECONDS
|
||||
TASK=$1; shift
|
||||
echo ""
|
||||
echo "*****************************************************************************"
|
||||
echo "Start: $TASK"
|
||||
echo "*****************************************************************************"
|
||||
"$@"
|
||||
|
||||
# Calculate total duration
|
||||
TOTAL=$(echo "$SECONDS - $START" | bc)
|
||||
M=$(echo "$TOTAL / 60" | bc)
|
||||
S=$(echo "$TOTAL % 60" | bc)
|
||||
DURATION="$(printf "%02d" $M):$(printf "%02d" $S)"
|
||||
|
||||
echo "*****************************************************************************"
|
||||
echo "End: $TASK, Total: $DURATION"
|
||||
echo "*****************************************************************************"
|
||||
SUMMARY="$SUMMARY$TASK;$DURATION"$'\n'
|
||||
}
|
||||
|
||||
done_steps() {
|
||||
echo ""
|
||||
echo "Build Summary"
|
||||
echo "============="
|
||||
echo "${SUMMARY}" | column -t -s';'
|
||||
}
|
||||
|
||||
trap done_steps EXIT
|
||||
@@ -1,18 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# setup nvm
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
export NVM_DIR=~/.nvm
|
||||
source $(brew --prefix nvm)/nvm.sh --no-use
|
||||
else
|
||||
source $NVM_DIR/nvm.sh --no-use
|
||||
fi
|
||||
|
||||
# install node
|
||||
NODE_VERSION=8.9.1
|
||||
nvm install $NODE_VERSION
|
||||
nvm use $NODE_VERSION
|
||||
|
||||
# install yarn
|
||||
npm i -g yarn
|
||||
@@ -2,99 +2,14 @@ phases:
|
||||
- phase: Windows
|
||||
queue: Hosted VS2017
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.9.1"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.3.2"
|
||||
- powershell: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
yarn
|
||||
.\node_modules\.bin\gulp electron
|
||||
npm run gulp -- hygiene
|
||||
.\node_modules\.bin\tsc -p .\src\tsconfig.monaco.json --noEmit
|
||||
npm run compile
|
||||
node build/lib/builtInExtensions.js
|
||||
name: build
|
||||
- powershell: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
.\scripts\test.bat --tfs
|
||||
.\scripts\test-integration.bat
|
||||
yarn smoketest --screenshots "$(Build.ArtifactStagingDirectory)\artifacts" --log "$(Build.ArtifactStagingDirectory)\artifacts\smoketest.log"
|
||||
name: test
|
||||
- task: PublishBuildArtifacts@1
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
|
||||
ArtifactName: build-artifacts-win32
|
||||
publishLocation: Container
|
||||
condition: succeededOrFailed()
|
||||
- template: win32/continuous-build-win32.yml
|
||||
|
||||
- phase: Linux
|
||||
queue: Hosted Linux Preview
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
apt-get update
|
||||
apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 libgconf-2-4 dbus xvfb libgtk-3-0
|
||||
cp build/tfs/linux/x64/xvfb.init /etc/init.d/xvfb
|
||||
chmod +x /etc/init.d/xvfb
|
||||
update-rc.d xvfb defaults
|
||||
ln -sf /bin/dbus-daemon /usr/bin/dbus-daemon
|
||||
service xvfb start
|
||||
service dbus start
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.9.1"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.3.2"
|
||||
- script: |
|
||||
set -e
|
||||
yarn
|
||||
npm run gulp -- electron-x64
|
||||
- script: |
|
||||
set -e
|
||||
npm run gulp -- hygiene
|
||||
./node_modules/.bin/tsc -p ./src/tsconfig.monaco.json --noEmit
|
||||
npm run compile
|
||||
node build/lib/builtInExtensions.js
|
||||
name: build
|
||||
- script: |
|
||||
set -e
|
||||
DISPLAY=:10 ./scripts/test.sh --tfs
|
||||
# DISPLAY=:10 ./scripts/test-integration.sh
|
||||
name: test
|
||||
- template: linux/continuous-build-linux.yml
|
||||
|
||||
- phase: macOS
|
||||
queue: Hosted macOS Preview
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.9.1"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.3.2"
|
||||
- script: |
|
||||
set -e
|
||||
yarn
|
||||
npm run gulp -- electron-x64
|
||||
- script: |
|
||||
set -e
|
||||
npm run gulp -- hygiene
|
||||
./node_modules/.bin/tsc -p ./src/tsconfig.monaco.json --noEmit
|
||||
npm run compile
|
||||
node build/lib/builtInExtensions.js
|
||||
name: build
|
||||
- script: |
|
||||
set -e
|
||||
./scripts/test.sh --tfs
|
||||
./scripts/test-integration.sh
|
||||
yarn smoketest --screenshots "$(Build.ArtifactStagingDirectory)/artifacts" --log "$(Build.ArtifactStagingDirectory)/artifacts/smoketest.log"
|
||||
name: test
|
||||
- task: PublishBuildArtifacts@1
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
|
||||
ArtifactName: build-artifacts-darwin
|
||||
publishLocation: Container
|
||||
condition: succeededOrFailed()
|
||||
- template: darwin/continuous-build-darwin.yml
|
||||
@@ -1,58 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
. ./build/tfs/common/node.sh
|
||||
. ./scripts/env.sh
|
||||
. ./build/tfs/common/common.sh
|
||||
|
||||
export VSCODE_MIXIN_PASSWORD="$1"
|
||||
export AZURE_STORAGE_ACCESS_KEY="$2"
|
||||
export AZURE_STORAGE_ACCESS_KEY_2="$3"
|
||||
export MOONCAKE_STORAGE_ACCESS_KEY="$4"
|
||||
export AZURE_DOCUMENTDB_MASTERKEY="$5"
|
||||
VSO_PAT="$6"
|
||||
|
||||
echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc
|
||||
|
||||
step "Install dependencies" \
|
||||
yarn
|
||||
|
||||
step "Hygiene" \
|
||||
npm run gulp -- hygiene
|
||||
|
||||
step "Monaco Editor Check" \
|
||||
./node_modules/.bin/tsc -p ./src/tsconfig.monaco.json --noEmit
|
||||
|
||||
step "Mix in repository from vscode-distro" \
|
||||
npm run gulp -- mixin
|
||||
|
||||
step "Install distro dependencies" \
|
||||
node build/tfs/common/installDistro.js
|
||||
|
||||
step "Build minified & upload source maps" \
|
||||
npm run gulp -- vscode-darwin-min upload-vscode-sourcemaps
|
||||
|
||||
# step "Create loader snapshot"
|
||||
# node build/lib/snapshotLoader.js
|
||||
|
||||
step "Run unit tests" \
|
||||
./scripts/test.sh --build --reporter dot
|
||||
|
||||
step "Run integration tests" \
|
||||
./scripts/test-integration.sh
|
||||
|
||||
# function smoketest {
|
||||
# ARTIFACTS="$AGENT_BUILDDIRECTORY/smoketest-artifacts"
|
||||
# rm -rf $ARTIFACTS
|
||||
|
||||
# [[ "$VSCODE_QUALITY" == "insider" ]] && VSCODE_APPNAME="Visual Studio Code - Insiders" || VSCODE_APPNAME="Visual Studio Code"
|
||||
# npm run smoketest -- --build "$AGENT_BUILDDIRECTORY/VSCode-darwin/$VSCODE_APPNAME.app" --log $ARTIFACTS
|
||||
# }
|
||||
|
||||
# step "Run smoke test" \
|
||||
# smoketest
|
||||
|
||||
step "Publish release" \
|
||||
./build/tfs/darwin/release.sh
|
||||
|
||||
step "Generate and upload configuration.json" \
|
||||
npm run gulp -- upload-vscode-configuration
|
||||
48
build/tfs/darwin/continuous-build-darwin.yml
Normal file
48
build/tfs/darwin/continuous-build-darwin.yml
Normal file
@@ -0,0 +1,48 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.9.1"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.3.2"
|
||||
- script: |
|
||||
yarn
|
||||
displayName: Install Dependencies
|
||||
- script: |
|
||||
yarn gulp electron-x64
|
||||
displayName: Download Electron
|
||||
- script: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
- script: |
|
||||
yarn check-monaco-editor-compilation
|
||||
displayName: Run Monaco Editor Checks
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
- script: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
- script: |
|
||||
./scripts/test.sh --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests
|
||||
- script: |
|
||||
./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
displayName: Run Integration Tests
|
||||
- script: |
|
||||
yarn smoketest --screenshots "$(Build.ArtifactStagingDirectory)/artifacts" --log "$(Build.ArtifactStagingDirectory)/artifacts/smoketest.log"
|
||||
displayName: Run Smoke Tests
|
||||
continueOnError: true
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish Smoketest Artifacts
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
|
||||
ArtifactName: build-artifacts-darwin
|
||||
publishLocation: Container
|
||||
condition: eq(variables['System.PullRequest.IsFork'], 'False')
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
testResultsFiles: '*-results.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||
condition: succeededOrFailed()
|
||||
106
build/tfs/darwin/enqueue.js
Normal file
106
build/tfs/darwin/enqueue.js
Normal file
@@ -0,0 +1,106 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
||||
return new (P || (P = Promise))(function (resolve, reject) {
|
||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
||||
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
|
||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
||||
});
|
||||
};
|
||||
var __generator = (this && this.__generator) || function (thisArg, body) {
|
||||
var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
|
||||
return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
|
||||
function verb(n) { return function (v) { return step([n, v]); }; }
|
||||
function step(op) {
|
||||
if (f) throw new TypeError("Generator is already executing.");
|
||||
while (_) try {
|
||||
if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
|
||||
if (y = 0, t) op = [op[0] & 2, t.value];
|
||||
switch (op[0]) {
|
||||
case 0: case 1: t = op; break;
|
||||
case 4: _.label++; return { value: op[1], done: false };
|
||||
case 5: _.label++; y = op[1]; op = [0]; continue;
|
||||
case 7: op = _.ops.pop(); _.trys.pop(); continue;
|
||||
default:
|
||||
if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
|
||||
if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
|
||||
if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
|
||||
if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
|
||||
if (t[2]) _.ops.pop();
|
||||
_.trys.pop(); continue;
|
||||
}
|
||||
op = body.call(thisArg, _);
|
||||
} catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
|
||||
if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
|
||||
}
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var child_process_1 = require("child_process");
|
||||
var documentdb_1 = require("documentdb");
|
||||
var azure = require("azure-storage");
|
||||
function queueSigningRequest(quality, commit) {
|
||||
var retryOperations = new azure.ExponentialRetryPolicyFilter();
|
||||
var queueSvc = azure
|
||||
.createQueueService(process.env['AZURE_STORAGE_ACCOUNT_2'], process.env['AZURE_STORAGE_ACCESS_KEY_2'])
|
||||
.withFilter(retryOperations);
|
||||
queueSvc.messageEncoder = new azure.QueueMessageEncoder.TextBase64QueueMessageEncoder();
|
||||
var message = quality + "/" + commit;
|
||||
return new Promise(function (c, e) { return queueSvc.createMessage('sign-darwin', message, function (err) { return err ? e(err) : c(); }); });
|
||||
}
|
||||
function isBuildSigned(quality, commit) {
|
||||
var client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
var collection = 'dbs/builds/colls/' + quality;
|
||||
var updateQuery = {
|
||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
||||
parameters: [{ name: '@id', value: commit }]
|
||||
};
|
||||
return new Promise(function (c, e) {
|
||||
client.queryDocuments(collection, updateQuery).toArray(function (err, results) {
|
||||
if (err) {
|
||||
return e(err);
|
||||
}
|
||||
if (results.length !== 1) {
|
||||
return c(false);
|
||||
}
|
||||
var release = results[0];
|
||||
var assets = release.assets;
|
||||
var isSigned = assets.some(function (a) { return a.platform === 'darwin' && a.type === 'archive'; });
|
||||
c(isSigned);
|
||||
});
|
||||
});
|
||||
}
|
||||
// async function waitForSignedBuild(quality: string, commit: string): Promise<void> {
|
||||
// let retries = 0;
|
||||
// while (retries < 180) {
|
||||
// if (await isBuildSigned(quality, commit)) {
|
||||
// return;
|
||||
// }
|
||||
// await new Promise<void>(c => setTimeout(c, 10000));
|
||||
// retries++;
|
||||
// }
|
||||
// throw new Error('Timed out waiting for signed build');
|
||||
// }
|
||||
function main(quality) {
|
||||
return __awaiter(this, void 0, void 0, function () {
|
||||
var commit;
|
||||
return __generator(this, function (_a) {
|
||||
switch (_a.label) {
|
||||
case 0:
|
||||
commit = child_process_1.execSync('git rev-parse HEAD', { encoding: 'utf8' }).trim();
|
||||
console.log("Queueing signing request for '" + quality + "/" + commit + "'...");
|
||||
return [4 /*yield*/, queueSigningRequest(quality, commit)];
|
||||
case 1:
|
||||
_a.sent();
|
||||
return [2 /*return*/];
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
main(process.argv[2]).catch(function (err) {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,5 +1,4 @@
|
||||
steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.9.1"
|
||||
@@ -9,22 +8,31 @@ steps:
|
||||
versionSpec: "1.3.2"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
|
||||
yarn
|
||||
npm run gulp -- hygiene
|
||||
npm run monaco-compile-check
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin electron
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
|
||||
node build/tfs/common/installDistro.js
|
||||
node build/lib/builtInExtensions.js
|
||||
|
||||
- script: |
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- vscode-darwin-min
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" \
|
||||
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
|
||||
npm run gulp -- vscode-darwin-min upload-vscode-sourcemaps
|
||||
name: build
|
||||
|
||||
- script: |
|
||||
./scripts/test.sh --build --tfs
|
||||
set -e
|
||||
./scripts/test.sh --build --tfs "Unit Tests"
|
||||
APP_NAME="`ls $(agent.builddirectory)/VSCode-darwin | head -n 1`"
|
||||
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-darwin/$APP_NAME"
|
||||
name: test
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
# archive the unsigned build
|
||||
pushd ../VSCode-darwin && zip -r -X -y ../VSCode-darwin-unsigned.zip * && popd
|
||||
|
||||
@@ -43,9 +51,13 @@ steps:
|
||||
false \
|
||||
../VSCode-darwin-unsigned.zip
|
||||
|
||||
# publish hockeyapp symbols
|
||||
node build/tfs/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_MACOS)"
|
||||
|
||||
# enqueue the unsigned build
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
|
||||
node build/tfs/common/enqueue.js "$(VSCODE_QUALITY)"
|
||||
node build/tfs/darwin/enqueue.js "$(VSCODE_QUALITY)"
|
||||
|
||||
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
|
||||
npm run gulp -- upload-vscode-configuration
|
||||
@@ -1,22 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
. ./scripts/env.sh
|
||||
. ./build/tfs/common/common.sh
|
||||
|
||||
REPO=`pwd`
|
||||
ZIP=$REPO/../VSCode-darwin-selfsigned.zip
|
||||
UNSIGNEDZIP=$REPO/../VSCode-darwin-unsigned.zip
|
||||
BUILD=$REPO/../VSCode-darwin
|
||||
PACKAGEJSON=`ls $BUILD/*.app/Contents/Resources/app/package.json`
|
||||
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
|
||||
|
||||
rm -rf $UNSIGNEDZIP
|
||||
(cd $BUILD && \
|
||||
step "Create unsigned archive" \
|
||||
zip -r -X -y $UNSIGNEDZIP *)
|
||||
|
||||
step "Upload unsigned archive" \
|
||||
node build/tfs/common/publish.js $VSCODE_QUALITY darwin archive-unsigned VSCode-darwin-$VSCODE_QUALITY-unsigned.zip $VERSION false $UNSIGNEDZIP
|
||||
|
||||
step "Sign build" \
|
||||
node build/tfs/common/enqueue.js $VSCODE_QUALITY
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
./build/tfs/linux/build.sh ia32 "$@"
|
||||
@@ -1,3 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
./build/tfs/linux/build.sh x64 "$@"
|
||||
@@ -1,64 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
. ./build/tfs/common/node.sh
|
||||
. ./scripts/env.sh
|
||||
. ./build/tfs/common/common.sh
|
||||
|
||||
export ARCH="$1"
|
||||
export npm_config_arch="$ARCH"
|
||||
export VSCODE_MIXIN_PASSWORD="$2"
|
||||
export AZURE_STORAGE_ACCESS_KEY="$3"
|
||||
export AZURE_STORAGE_ACCESS_KEY_2="$4"
|
||||
export MOONCAKE_STORAGE_ACCESS_KEY="$5"
|
||||
export AZURE_DOCUMENTDB_MASTERKEY="$6"
|
||||
export LINUX_REPO_PASSWORD="$7"
|
||||
VSO_PAT="$8"
|
||||
|
||||
echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc
|
||||
|
||||
step "Install dependencies" \
|
||||
yarn
|
||||
|
||||
step "Hygiene" \
|
||||
npm run gulp -- hygiene
|
||||
|
||||
step "Monaco Editor Check" \
|
||||
./node_modules/.bin/tsc -p ./src/tsconfig.monaco.json --noEmit
|
||||
|
||||
step "Mix in repository from vscode-distro" \
|
||||
npm run gulp -- mixin
|
||||
|
||||
step "Get Electron" \
|
||||
npm run gulp -- "electron-$ARCH"
|
||||
|
||||
step "Install distro dependencies" \
|
||||
node build/tfs/common/installDistro.js
|
||||
|
||||
step "Build minified" \
|
||||
npm run gulp -- "vscode-linux-$ARCH-min"
|
||||
|
||||
# step "Create loader snapshot"
|
||||
# node build/lib/snapshotLoader.js --arch=$ARCH
|
||||
|
||||
step "Run unit tests" \
|
||||
./scripts/test.sh --build --reporter dot
|
||||
|
||||
# function smoketest {
|
||||
# id -u testuser &>/dev/null || (useradd -m testuser; chpasswd <<< testuser:testpassword)
|
||||
# sudo -i -u testuser -- sh -c 'git config --global user.name "VS Code Agent" && git config --global user.email "monacotools@microsoft.com"'
|
||||
|
||||
# ARTIFACTS="$AGENT_BUILDDIRECTORY/smoketest-artifacts"
|
||||
# rm -rf $ARTIFACTS
|
||||
# mkdir -p $ARTIFACTS
|
||||
# chown -R testuser $ARTIFACTS
|
||||
|
||||
# ps -o pid= -u testuser | xargs sudo kill -9
|
||||
# DISPLAY=:10 sudo -i -u testuser -- sh -c "cd $BUILD_SOURCESDIRECTORY/test/smoke && ./node_modules/.bin/mocha --build $AGENT_BUILDDIRECTORY/VSCode-linux-$ARCH --log $ARTIFACTS"
|
||||
# # DISPLAY=:10 sudo -i -u testuser -- sh -c "cd /vso/work/1/s/test/smoke && ./node_modules/.bin/mocha --build /vso/work/1/VSCode-linux-ia32"
|
||||
# }
|
||||
|
||||
# step "Run smoke test" \
|
||||
# smoketest
|
||||
|
||||
step "Publish release" \
|
||||
./build/tfs/linux/release.sh
|
||||
@@ -1,103 +0,0 @@
|
||||
steps:
|
||||
|
||||
- script: |
|
||||
# dependencies
|
||||
dpkg --add-architecture i386
|
||||
apt-get update
|
||||
|
||||
DEPS=" \
|
||||
gcc-multilib g++-multilib \
|
||||
pkg-config \
|
||||
dbus \
|
||||
xvfb \
|
||||
fakeroot \
|
||||
bc \
|
||||
bsdmainutils \
|
||||
rpm \
|
||||
"
|
||||
|
||||
if [[ "$(VSCODE_ARCH)" == "x64" ]]; then
|
||||
DEPS="$DEPS \
|
||||
dpkg-dev \
|
||||
libgconf-2-4 \
|
||||
libnss3 \
|
||||
libasound2 \
|
||||
libxtst6 \
|
||||
libx11-dev \
|
||||
libxkbfile-dev \
|
||||
libxss1 \
|
||||
libx11-xcb-dev \
|
||||
libsecret-1-dev \
|
||||
"
|
||||
else
|
||||
DEPS="$DEPS \
|
||||
dpkg-dev:i386 \
|
||||
libgconf-2-4:i386 \
|
||||
libnss3:i386 \
|
||||
libasound2:i386 \
|
||||
libxtst6:i386 \
|
||||
libnotify4:i386 \
|
||||
libx11-dev:i386 \
|
||||
libxkbfile-dev:i386 \
|
||||
libxss1:i386 \
|
||||
libx11-xcb-dev:i386 \
|
||||
libgl1-mesa-glx:i386 libgl1-mesa-dri:i386 \
|
||||
libgirepository-1.0-1:i386 \
|
||||
gir1.2-glib-2.0:i386 \
|
||||
gir1.2-secret-1:i386 \
|
||||
libsecret-1-dev:i386 \
|
||||
libgtk2.0-0:i386 \
|
||||
"
|
||||
fi
|
||||
|
||||
apt-get install -y $DEPS
|
||||
|
||||
# setup xvfb
|
||||
cp build/tfs/linux/$(VSCODE_ARCH)/xvfb.init /etc/init.d/xvfb
|
||||
chmod +x /etc/init.d/xvfb
|
||||
update-rc.d xvfb defaults
|
||||
service xvfb start
|
||||
|
||||
# setup dbus
|
||||
ln -sf /bin/dbus-daemon /usr/bin/dbus-daemon
|
||||
service dbus start
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.9.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.3.2"
|
||||
|
||||
- script: |
|
||||
export npm_config_arch="$(VSCODE_ARCH)"
|
||||
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
|
||||
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
|
||||
fi
|
||||
|
||||
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
|
||||
yarn
|
||||
npm run gulp -- hygiene
|
||||
npm run monaco-compile-check
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
|
||||
node build/tfs/common/installDistro.js
|
||||
|
||||
- script: |
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- vscode-linux-$(VSCODE_ARCH)-min
|
||||
name: build
|
||||
|
||||
- script: |
|
||||
npm run gulp -- "electron-$(VSCODE_ARCH)"
|
||||
DISPLAY=:10 ./scripts/test.sh --build --tfs
|
||||
name: test
|
||||
|
||||
- script: |
|
||||
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-deb"
|
||||
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-rpm"
|
||||
#npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-snap"
|
||||
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
|
||||
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
|
||||
./build/tfs/linux/release2.sh "$(VSCODE_ARCH)" "$(LINUX_REPO_PASSWORD)"
|
||||
44
build/tfs/linux/continuous-build-linux.yml
Normal file
44
build/tfs/linux/continuous-build-linux.yml
Normal file
@@ -0,0 +1,44 @@
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
apt-get update
|
||||
apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 libgconf-2-4 dbus xvfb libgtk-3-0
|
||||
cp build/tfs/linux/x64/xvfb.init /etc/init.d/xvfb
|
||||
chmod +x /etc/init.d/xvfb
|
||||
update-rc.d xvfb defaults
|
||||
ln -sf /bin/dbus-daemon /usr/bin/dbus-daemon
|
||||
service xvfb start
|
||||
service dbus start
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.9.1"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.3.2"
|
||||
- script: |
|
||||
yarn
|
||||
displayName: Install Dependencies
|
||||
- script: |
|
||||
yarn gulp electron-x64
|
||||
displayName: Download Electron
|
||||
- script: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
- script: |
|
||||
yarn check-monaco-editor-compilation
|
||||
displayName: Run Monaco Editor Checks
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
- script: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
testResultsFiles: '*-results.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||
condition: succeededOrFailed()
|
||||
@@ -39,4 +39,11 @@ function getConfig(quality: string): Promise<Config> {
|
||||
}
|
||||
|
||||
getConfig(process.argv[2])
|
||||
.then(c => console.log(c.frozen), e => console.error(e));
|
||||
.then(config => {
|
||||
console.log(config.frozen);
|
||||
process.exit(0);
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
118
build/tfs/linux/product-build-linux.yml
Normal file
118
build/tfs/linux/product-build-linux.yml
Normal file
@@ -0,0 +1,118 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.9.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.3.2"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export npm_config_arch="$(VSCODE_ARCH)"
|
||||
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
|
||||
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
|
||||
fi
|
||||
|
||||
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
|
||||
yarn
|
||||
npm run gulp -- hygiene
|
||||
npm run monaco-compile-check
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
|
||||
node build/tfs/common/installDistro.js
|
||||
node build/lib/builtInExtensions.js
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- vscode-linux-$(VSCODE_ARCH)-min
|
||||
name: build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
npm run gulp -- "electron-$(VSCODE_ARCH)"
|
||||
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
|
||||
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)"
|
||||
name: test
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
REPO="$(pwd)"
|
||||
ROOT="$REPO/.."
|
||||
ARCH="$(VSCODE_ARCH)"
|
||||
|
||||
# Publish tarball
|
||||
PLATFORM_LINUX="linux-$(VSCODE_ARCH)"
|
||||
[[ "$ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
|
||||
[[ "$ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
|
||||
BUILDNAME="VSCode-$PLATFORM_LINUX"
|
||||
BUILD="$ROOT/$BUILDNAME"
|
||||
BUILD_VERSION="$(date +%s)"
|
||||
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
|
||||
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
|
||||
PACKAGEJSON="$BUILD/resources/app/package.json"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
|
||||
rm -rf $ROOT/code-*.tar.*
|
||||
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
|
||||
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
|
||||
node build/tfs/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$VERSION" true "$TARBALL_PATH"
|
||||
|
||||
# Publish hockeyapp symbols
|
||||
node build/tfs/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_LINUX64)"
|
||||
|
||||
# Publish DEB
|
||||
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-deb"
|
||||
PLATFORM_DEB="linux-deb-$ARCH"
|
||||
[[ "$ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
|
||||
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
|
||||
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
|
||||
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
|
||||
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
|
||||
node build/tfs/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_DEB" package "$DEB_FILENAME" "$VERSION" true "$DEB_PATH"
|
||||
|
||||
# Publish RPM
|
||||
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-rpm"
|
||||
PLATFORM_RPM="linux-rpm-$ARCH"
|
||||
[[ "$ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
|
||||
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
|
||||
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
|
||||
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
|
||||
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
|
||||
node build/tfs/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_RPM" package "$RPM_FILENAME" "$VERSION" true "$RPM_PATH"
|
||||
|
||||
# SNAP_FILENAME="$(ls $REPO/.build/linux/snap/$ARCH/ | grep .snap)"
|
||||
# SNAP_PATH="$REPO/.build/linux/snap/$ARCH/$SNAP_FILENAME"
|
||||
|
||||
# Publish to MS repo
|
||||
IS_FROZEN="$(AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" node build/tfs/linux/frozen-check.js $VSCODE_QUALITY)"
|
||||
|
||||
if [ -z "$VSCODE_QUALITY" ]; then
|
||||
echo "VSCODE_QUALITY is not set, skipping repo package publish"
|
||||
elif [ "$IS_FROZEN" = "true" ]; then
|
||||
echo "$VSCODE_QUALITY is frozen, skipping repo package publish"
|
||||
else
|
||||
if [ "$BUILD_SOURCEBRANCH" = "master" ] || [ "$BUILD_SOURCEBRANCH" = "refs/heads/master" ] || [ "$VSCODE_PUBLISH_LINUX" = "true" ]; then
|
||||
if [[ $BUILD_QUEUEDBY = *"Project Collection Service Accounts"* || $BUILD_QUEUEDBY = *"Microsoft.VisualStudio.Services.TFS"* ]]; then
|
||||
# Write config files needed by API, use eval to force environment variable expansion
|
||||
pushd build/tfs/linux
|
||||
# Submit to apt repo
|
||||
# if [ "$DEB_ARCH" = "amd64" ]; then
|
||||
# echo "{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4adf642421134a1a48d1a\", \"username\": \"vscode\", \"password\": \"$(LINUX_REPO_PASSWORD)\" }" > apt-config.json
|
||||
# ./repoapi_client.sh -config apt-config.json -addfile $DEB_PATH
|
||||
# fi
|
||||
# Submit to yum repo (disabled as it's manual until signing is automated)
|
||||
# eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4ae3542421134a1a48d1b\", \"username\": \"vscode\", \"password\": \"$(LINUX_REPO_PASSWORD)\" }' > yum-config.json
|
||||
|
||||
# ./repoapi_client.sh -config yum-config.json -addfile $RPM_PATH
|
||||
popd
|
||||
echo "To check repo publish status run ./repoapi_client.sh -config config.json -check <id>"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
@@ -1,81 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
. ./scripts/env.sh
|
||||
. ./build/tfs/common/common.sh
|
||||
|
||||
step "Build Debian package" \
|
||||
npm run gulp -- "vscode-linux-$ARCH-build-deb"
|
||||
|
||||
step "Build RPM package" \
|
||||
npm run gulp -- "vscode-linux-$ARCH-build-rpm"
|
||||
|
||||
# step "Build snap package" \
|
||||
# npm run gulp -- "vscode-linux-$ARCH-build-snap"
|
||||
|
||||
# Variables
|
||||
PLATFORM_LINUX="linux-$ARCH"
|
||||
PLATFORM_DEB="linux-deb-$ARCH"
|
||||
PLATFORM_RPM="linux-rpm-$ARCH"
|
||||
[[ "$ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
|
||||
[[ "$ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
|
||||
REPO="`pwd`"
|
||||
ROOT="$REPO/.."
|
||||
BUILDNAME="VSCode-$PLATFORM_LINUX"
|
||||
BUILD="$ROOT/$BUILDNAME"
|
||||
BUILD_VERSION="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/ | sed -e 's/code-[a-z]*_//g' -e 's/\.deb$//g')"
|
||||
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
|
||||
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
|
||||
PACKAGEJSON="$BUILD/resources/app/package.json"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
|
||||
rm -rf $ROOT/code-*.tar.*
|
||||
(cd $ROOT && \
|
||||
step "Create tar.gz archive" \
|
||||
tar -czf $TARBALL_PATH $BUILDNAME)
|
||||
|
||||
step "Publish tar.gz archive" \
|
||||
node build/tfs/common/publish.js $VSCODE_QUALITY $PLATFORM_LINUX archive-unsigned $TARBALL_FILENAME $VERSION true $TARBALL_PATH
|
||||
|
||||
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
|
||||
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
|
||||
|
||||
step "Publish Debian package" \
|
||||
node build/tfs/common/publish.js $VSCODE_QUALITY $PLATFORM_DEB package $DEB_FILENAME $VERSION true $DEB_PATH
|
||||
|
||||
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
|
||||
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
|
||||
|
||||
step "Publish RPM package" \
|
||||
node build/tfs/common/publish.js $VSCODE_QUALITY $PLATFORM_RPM package $RPM_FILENAME $VERSION true $RPM_PATH
|
||||
|
||||
# SNAP_FILENAME="$(ls $REPO/.build/linux/snap/$ARCH/ | grep .snap)"
|
||||
# SNAP_PATH="$REPO/.build/linux/snap/$ARCH/$SNAP_FILENAME"
|
||||
|
||||
IS_FROZEN="$(node build/tfs/linux/frozen-check.js $VSCODE_QUALITY)"
|
||||
|
||||
if [ -z "$VSCODE_QUALITY" ]; then
|
||||
echo "VSCODE_QUALITY is not set, skipping repo package publish"
|
||||
elif [ "$IS_FROZEN" = "true" ]; then
|
||||
echo "$VSCODE_QUALITY is frozen, skipping repo package publish"
|
||||
else
|
||||
if [ "$BUILD_SOURCEBRANCH" = "master" ] || [ "$BUILD_SOURCEBRANCH" = "refs/heads/master" ]; then
|
||||
if [[ $BUILD_QUEUEDBY = *"Project Collection Service Accounts"* || $BUILD_QUEUEDBY = *"Microsoft.VisualStudio.Services.TFS"* ]]; then
|
||||
# Write config files needed by API, use eval to force environment variable expansion
|
||||
DIRNAME=$(dirname $(readlink -f $0))
|
||||
pushd $DIRNAME
|
||||
# Submit to apt repo
|
||||
if [ "$DEB_ARCH" = "amd64" ]; then
|
||||
eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4adf642421134a1a48d1a\", \"username\": \"$LINUX_REPO_USERNAME\", \"password\": \"$LINUX_REPO_PASSWORD\" }' > apt-config.json
|
||||
|
||||
step "Publish to repositories" \
|
||||
./repoapi_client.sh -config apt-config.json -addfile $DEB_PATH
|
||||
fi
|
||||
# Submit to yum repo (disabled as it's manual until signing is automated)
|
||||
# eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4ae3542421134a1a48d1b\", \"username\": \"$LINUX_REPO_USERNAME\", \"password\": \"$LINUX_REPO_PASSWORD\" }' > yum-config.json
|
||||
|
||||
# ./repoapi_client.sh -config yum-config.json -addfile $RPM_PATH
|
||||
popd
|
||||
echo "To check repo publish status run ./repoapi_client.sh -config config.json -check <id>"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
@@ -1,67 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Arguments
|
||||
ARCH="$1"
|
||||
LINUX_REPO_PASSWORD="$2"
|
||||
|
||||
# Variables
|
||||
PLATFORM_LINUX="linux-$ARCH"
|
||||
PLATFORM_DEB="linux-deb-$ARCH"
|
||||
PLATFORM_RPM="linux-rpm-$ARCH"
|
||||
[[ "$ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
|
||||
[[ "$ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
|
||||
REPO="`pwd`"
|
||||
ROOT="$REPO/.."
|
||||
BUILDNAME="VSCode-$PLATFORM_LINUX"
|
||||
BUILD="$ROOT/$BUILDNAME"
|
||||
BUILD_VERSION="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/ | sed -e 's/code-[a-z]*_//g' -e 's/\.deb$//g')"
|
||||
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
|
||||
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
|
||||
PACKAGEJSON="$BUILD/resources/app/package.json"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
|
||||
rm -rf $ROOT/code-*.tar.*
|
||||
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||
|
||||
node build/tfs/common/publish.js $VSCODE_QUALITY $PLATFORM_LINUX archive-unsigned $TARBALL_FILENAME $VERSION true $TARBALL_PATH
|
||||
|
||||
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
|
||||
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
|
||||
|
||||
node build/tfs/common/publish.js $VSCODE_QUALITY $PLATFORM_DEB package $DEB_FILENAME $VERSION true $DEB_PATH
|
||||
|
||||
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
|
||||
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
|
||||
|
||||
node build/tfs/common/publish.js $VSCODE_QUALITY $PLATFORM_RPM package $RPM_FILENAME $VERSION true $RPM_PATH
|
||||
|
||||
# SNAP_FILENAME="$(ls $REPO/.build/linux/snap/$ARCH/ | grep .snap)"
|
||||
# SNAP_PATH="$REPO/.build/linux/snap/$ARCH/$SNAP_FILENAME"
|
||||
|
||||
IS_FROZEN="$(node build/tfs/linux/frozen-check.js $VSCODE_QUALITY)"
|
||||
|
||||
if [ -z "$VSCODE_QUALITY" ]; then
|
||||
echo "VSCODE_QUALITY is not set, skipping repo package publish"
|
||||
elif [ "$IS_FROZEN" = "true" ]; then
|
||||
echo "$VSCODE_QUALITY is frozen, skipping repo package publish"
|
||||
else
|
||||
if [ "$BUILD_SOURCEBRANCH" = "master" ] || [ "$BUILD_SOURCEBRANCH" = "refs/heads/master" ]; then
|
||||
if [[ $BUILD_QUEUEDBY = *"Project Collection Service Accounts"* || $BUILD_QUEUEDBY = *"Microsoft.VisualStudio.Services.TFS"* ]]; then
|
||||
# Write config files needed by API, use eval to force environment variable expansion
|
||||
pushd build/tfs/linux
|
||||
# Submit to apt repo
|
||||
if [ "$DEB_ARCH" = "amd64" ]; then
|
||||
eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4adf642421134a1a48d1a\", \"username\": \"vscode\", \"password\": \"$LINUX_REPO_PASSWORD\" }' > apt-config.json
|
||||
|
||||
./repoapi_client.sh -config apt-config.json -addfile $DEB_PATH
|
||||
fi
|
||||
# Submit to yum repo (disabled as it's manual until signing is automated)
|
||||
# eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4ae3542421134a1a48d1b\", \"username\": \"vscode\", \"password\": \"$LINUX_REPO_PASSWORD\" }' > yum-config.json
|
||||
|
||||
# ./repoapi_client.sh -config yum-config.json -addfile $RPM_PATH
|
||||
popd
|
||||
echo "To check repo publish status run ./repoapi_client.sh -config config.json -check <id>"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
@@ -1,413 +1,38 @@
|
||||
phases:
|
||||
- phase: Windows
|
||||
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
|
||||
queue:
|
||||
name: Hosted VS2017
|
||||
parallel: 2
|
||||
matrix:
|
||||
x64:
|
||||
VSCODE_ARCH: x64
|
||||
ia32:
|
||||
VSCODE_ARCH: ia32
|
||||
|
||||
queue: Hosted VS2017
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.9.1"
|
||||
- template: win32/product-build-win32.yml
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.3.2"
|
||||
|
||||
- powershell: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
"machine monacotools.visualstudio.com password $(VSO_PAT)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
|
||||
$env:npm_config_arch="$(VSCODE_ARCH)"
|
||||
$env:CHILD_CONCURRENCY="1"
|
||||
yarn
|
||||
npm run gulp -- hygiene
|
||||
npm run monaco-compile-check
|
||||
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
|
||||
npm run gulp -- mixin
|
||||
node build/tfs/common/installDistro.js
|
||||
node build/lib/builtInExtensions.js
|
||||
|
||||
- powershell: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
|
||||
npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-min"
|
||||
npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-copy-inno-updater"
|
||||
name: build
|
||||
|
||||
- powershell: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
npm run gulp -- "electron-$(VSCODE_ARCH)"
|
||||
.\scripts\test.bat --build --tfs
|
||||
# yarn smoketest -- --build "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
|
||||
name: test
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: 'ESRP CodeSign'
|
||||
FolderPath: '$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)'
|
||||
Pattern: '*.dll,*.exe,*.node'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-229803",
|
||||
"operationSetCode": "SigntoolSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "VS Code"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "https://code.visualstudio.com/"
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/t \"http://ts4096.gtm.microsoft.com/TSS/AuthenticodeTS\""
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "VS Code"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "https://code.visualstudio.com/"
|
||||
},
|
||||
{
|
||||
"parameterName": "Append",
|
||||
"parameterValue": "/as"
|
||||
},
|
||||
{
|
||||
"parameterName": "FileDigest",
|
||||
"parameterValue": "/fd \"SHA256\""
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolVerify",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "VerifyAll",
|
||||
"parameterValue": "/all"
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
|
||||
- powershell: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-archive" "vscode-win32-$(VSCODE_ARCH)-setup"
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: 'ESRP CodeSign'
|
||||
FolderPath: '$(agent.builddirectory)'
|
||||
Pattern: VSCodeSetup.exe
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-229803",
|
||||
"operationSetCode": "SigntoolSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "VS Code"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "https://code.visualstudio.com/"
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/t \"http://ts4096.gtm.microsoft.com/TSS/AuthenticodeTS\""
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "VS Code"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "https://code.visualstudio.com/"
|
||||
},
|
||||
{
|
||||
"parameterName": "Append",
|
||||
"parameterValue": "/as"
|
||||
},
|
||||
{
|
||||
"parameterName": "FileDigest",
|
||||
"parameterValue": "/fd \"SHA256\""
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolVerify",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "VerifyAll",
|
||||
"parameterValue": "/all"
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
|
||||
- powershell: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
$Repo = "$(pwd)"
|
||||
$Root = "$Repo\.."
|
||||
$Exe = "$Repo\.build\win32-$(VSCODE_ARCH)\setup\VSCodeSetup.exe"
|
||||
$Zip = "$Repo\.build\win32-$(VSCODE_ARCH)\archive\VSCode-win32-$(VSCODE_ARCH).zip"
|
||||
$Build = "$Root\VSCode-win32-$(VSCODE_ARCH)"
|
||||
|
||||
# get version
|
||||
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
|
||||
$Version = $PackageJson.version
|
||||
$Quality = "$env:VSCODE_QUALITY"
|
||||
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(AZURE_STORAGE_ACCESS_KEY_2)"
|
||||
$env:MOONCAKE_STORAGE_ACCESS_KEY = "$(MOONCAKE_STORAGE_ACCESS_KEY)"
|
||||
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(AZURE_DOCUMENTDB_MASTERKEY)"
|
||||
|
||||
$assetPlatform = if ("$(VSCODE_ARCH)" -eq "ia32") { "win32" } else { "win32-x64" }
|
||||
|
||||
node build/tfs/common/publish.js $Quality "$global:assetPlatform-archive" archive "VSCode-win32-$(VSCODE_ARCH)-$Version.zip" $Version true $Zip
|
||||
node build/tfs/common/publish.js $Quality "$global:assetPlatform" setup "VSCodeSetup-$(VSCODE_ARCH)-$Version.exe" $Version true $Exe
|
||||
|
||||
# publish hockeyapp symbols
|
||||
$hockeyAppId = if ("$(VSCODE_ARCH)" -eq "ia32") { "$(VSCODE_HOCKEYAPP_ID_WIN32)" } else { "$(VSCODE_HOCKEYAPP_ID_WIN64)" }
|
||||
node build/tfs/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" $hockeyAppId
|
||||
- phase: Windows32
|
||||
condition: eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true')
|
||||
queue: Hosted VS2017
|
||||
variables:
|
||||
VSCODE_ARCH: ia32
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
|
||||
- phase: Linux
|
||||
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
|
||||
queue: linux-x64
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.9.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.3.2"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export npm_config_arch="$(VSCODE_ARCH)"
|
||||
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
|
||||
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
|
||||
fi
|
||||
|
||||
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
|
||||
yarn
|
||||
npm run gulp -- hygiene
|
||||
npm run monaco-compile-check
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
|
||||
node build/tfs/common/installDistro.js
|
||||
node build/lib/builtInExtensions.js
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- vscode-linux-$(VSCODE_ARCH)-min
|
||||
name: build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
npm run gulp -- "electron-$(VSCODE_ARCH)"
|
||||
DISPLAY=:10 ./scripts/test.sh --build --tfs
|
||||
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)"
|
||||
name: test
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-deb"
|
||||
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-rpm"
|
||||
#npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-snap"
|
||||
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
|
||||
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
|
||||
./build/tfs/linux/release2.sh "$(VSCODE_ARCH)" "$(LINUX_REPO_PASSWORD)"
|
||||
|
||||
# publish hockeyapp symbols
|
||||
node build/tfs/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_LINUX64)"
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
- phase: Linux32
|
||||
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
|
||||
condition: eq(variables['VSCODE_BUILD_LINUX_32BIT'], 'true')
|
||||
queue: linux-ia32
|
||||
variables:
|
||||
VSCODE_ARCH: ia32
|
||||
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.9.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.3.2"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export npm_config_arch="$(VSCODE_ARCH)"
|
||||
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
|
||||
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
|
||||
fi
|
||||
|
||||
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
|
||||
yarn
|
||||
npm run gulp -- hygiene
|
||||
npm run monaco-compile-check
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
|
||||
node build/tfs/common/installDistro.js
|
||||
node build/lib/builtInExtensions.js
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- vscode-linux-$(VSCODE_ARCH)-min
|
||||
name: build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
npm run gulp -- "electron-$(VSCODE_ARCH)"
|
||||
DISPLAY=:10 ./scripts/test.sh --build --tfs
|
||||
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)"
|
||||
name: test
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-deb"
|
||||
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-rpm"
|
||||
#npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-snap"
|
||||
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
|
||||
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
|
||||
./build/tfs/linux/release2.sh "$(VSCODE_ARCH)" "$(LINUX_REPO_PASSWORD)"
|
||||
|
||||
# publish hockeyapp symbols
|
||||
node build/tfs/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_LINUX32)"
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
- phase: macOS
|
||||
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
|
||||
queue: Hosted macOS Preview
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.9.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.3.2"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
|
||||
yarn
|
||||
npm run gulp -- hygiene
|
||||
npm run monaco-compile-check
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
|
||||
node build/tfs/common/installDistro.js
|
||||
node build/lib/builtInExtensions.js
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" \
|
||||
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
|
||||
npm run gulp -- vscode-darwin-min upload-vscode-sourcemaps
|
||||
name: build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./scripts/test.sh --build --tfs
|
||||
APP_NAME="`ls $(agent.builddirectory)/VSCode-darwin | head -n 1`"
|
||||
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-darwin/$APP_NAME"
|
||||
name: test
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
# archive the unsigned build
|
||||
pushd ../VSCode-darwin && zip -r -X -y ../VSCode-darwin-unsigned.zip * && popd
|
||||
|
||||
# publish the unsigned build
|
||||
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
|
||||
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
|
||||
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
|
||||
node build/tfs/common/publish.js \
|
||||
"$(VSCODE_QUALITY)" \
|
||||
darwin \
|
||||
archive-unsigned \
|
||||
"VSCode-darwin-$(VSCODE_QUALITY)-unsigned.zip" \
|
||||
$VERSION \
|
||||
false \
|
||||
../VSCode-darwin-unsigned.zip
|
||||
|
||||
# publish hockeyapp symbols
|
||||
node build/tfs/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_MACOS)"
|
||||
|
||||
# enqueue the unsigned build
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
|
||||
node build/tfs/common/enqueue.js "$(VSCODE_QUALITY)"
|
||||
|
||||
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
|
||||
npm run gulp -- upload-vscode-configuration
|
||||
- template: darwin/product-build-darwin.yml
|
||||
@@ -1,71 +0,0 @@
|
||||
Param(
|
||||
[string]$arch,
|
||||
[string]$mixinPassword,
|
||||
[string]$vsoPAT
|
||||
)
|
||||
|
||||
. .\build\tfs\win32\node.ps1
|
||||
. .\scripts\env.ps1
|
||||
. .\build\tfs\win32\lib.ps1
|
||||
|
||||
# Create a _netrc file to download distro dependencies
|
||||
# In order to get _netrc to work, we need a HOME variable setup
|
||||
"machine monacotools.visualstudio.com password ${vsoPAT}" | Out-File "$env:HOME\_netrc" -Encoding ASCII
|
||||
|
||||
# Set the right architecture
|
||||
$env:npm_config_arch="$arch"
|
||||
$env:CHILD_CONCURRENCY="1"
|
||||
|
||||
step "Install dependencies" {
|
||||
exec { & yarn }
|
||||
}
|
||||
|
||||
step "Hygiene" {
|
||||
exec { & npm run gulp -- hygiene }
|
||||
}
|
||||
|
||||
step "Monaco Editor Check" {
|
||||
exec { & .\node_modules\.bin\tsc -p .\src\tsconfig.monaco.json --noEmit }
|
||||
}
|
||||
|
||||
$env:VSCODE_MIXIN_PASSWORD = $mixinPassword
|
||||
step "Mix in repository from vscode-distro" {
|
||||
exec { & npm run gulp -- mixin }
|
||||
}
|
||||
|
||||
step "Get Electron" {
|
||||
exec { & npm run gulp -- "electron-$global:arch" }
|
||||
}
|
||||
|
||||
step "Install distro dependencies" {
|
||||
exec { & node build\tfs\common\installDistro.js }
|
||||
}
|
||||
|
||||
step "Build minified" {
|
||||
exec { & npm run gulp -- "vscode-win32-$global:arch-min" }
|
||||
}
|
||||
|
||||
step "Copy Inno updater" {
|
||||
exec { & npm run gulp -- "vscode-win32-$global:arch-copy-inno-updater" }
|
||||
}
|
||||
|
||||
# step "Create loader snapshot" {
|
||||
# exec { & node build\lib\snapshotLoader.js --arch=$global:arch }
|
||||
# }
|
||||
|
||||
step "Run unit tests" {
|
||||
exec { & .\scripts\test.bat --build --reporter dot }
|
||||
}
|
||||
|
||||
# step "Run integration tests" {
|
||||
# exec { & .\scripts\test-integration.bat }
|
||||
# }
|
||||
|
||||
# step "Run smoke test" {
|
||||
# $Artifacts = "$env:AGENT_BUILDDIRECTORY\smoketest-artifacts"
|
||||
# Remove-Item -Recurse -Force -ErrorAction Ignore $Artifacts
|
||||
|
||||
# exec { & npm run smoketest -- --build "$env:AGENT_BUILDDIRECTORY\VSCode-win32-$global:arch" --log "$Artifacts" }
|
||||
# }
|
||||
|
||||
done
|
||||
@@ -1,12 +0,0 @@
|
||||
Param(
|
||||
[string]$arch
|
||||
)
|
||||
|
||||
. .\build\tfs\win32\node.ps1
|
||||
. .\build\tfs\win32\lib.ps1
|
||||
|
||||
step "Create archive and setup package" {
|
||||
exec { & npm run gulp -- "vscode-win32-$global:arch-archive" "vscode-win32-$global:arch-setup" }
|
||||
}
|
||||
|
||||
done
|
||||
@@ -1,35 +0,0 @@
|
||||
Param(
|
||||
[string]$arch,
|
||||
[string]$storageKey,
|
||||
[string]$mooncakeStorageKey,
|
||||
[string]$documentDbKey
|
||||
)
|
||||
|
||||
. .\build\tfs\win32\node.ps1
|
||||
. .\build\tfs\win32\lib.ps1
|
||||
|
||||
$Repo = "$(pwd)"
|
||||
$Root = "$Repo\.."
|
||||
$Exe = "$Repo\.build\win32-$arch\setup\VSCodeSetup.exe"
|
||||
$Zip = "$Repo\.build\win32-$arch\archive\VSCode-win32-$arch.zip"
|
||||
$Build = "$Root\VSCode-win32-$arch"
|
||||
|
||||
# get version
|
||||
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
|
||||
$Version = $PackageJson.version
|
||||
$Quality = "$env:VSCODE_QUALITY"
|
||||
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
|
||||
$env:MOONCAKE_STORAGE_ACCESS_KEY = $mooncakeStorageKey
|
||||
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
||||
|
||||
$assetPlatform = if ($arch -eq "ia32") { "win32" } else { "win32-x64" }
|
||||
|
||||
step "Publish archive" {
|
||||
exec { & node build/tfs/common/publish.js $Quality "$global:assetPlatform-archive" archive "VSCode-win32-$global:arch-$Version.zip" $Version true $Zip }
|
||||
}
|
||||
|
||||
step "Publish setup package" {
|
||||
exec { & node build/tfs/common/publish.js $Quality "$global:assetPlatform" setup "VSCodeSetup-$global:arch-$Version.exe" $Version true $Exe }
|
||||
}
|
||||
|
||||
done
|
||||
6
build/tfs/win32/ESRPClient/NuGet.config
Normal file
6
build/tfs/win32/ESRPClient/NuGet.config
Normal file
@@ -0,0 +1,6 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<configuration>
|
||||
<packageSources>
|
||||
<add key="ESRP" value="https://microsoft.pkgs.visualstudio.com/_packaging/ESRP/nuget/v3/index.json" />
|
||||
</packageSources>
|
||||
</configuration>
|
||||
4
build/tfs/win32/ESRPClient/packages.config
Normal file
4
build/tfs/win32/ESRPClient/packages.config
Normal file
@@ -0,0 +1,4 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<packages>
|
||||
<package id="EsrpClient" version="1.0.27" />
|
||||
</packages>
|
||||
@@ -1,212 +0,0 @@
|
||||
steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.9.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.3.2"
|
||||
|
||||
- powershell: |
|
||||
"machine monacotools.visualstudio.com password $(VSO_PAT)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
|
||||
$env:npm_config_arch="$(VSCODE_ARCH)"
|
||||
$env:CHILD_CONCURRENCY="1"
|
||||
yarn
|
||||
npm run gulp -- hygiene
|
||||
npm run monaco-compile-check
|
||||
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
|
||||
npm run gulp -- mixin
|
||||
node build/tfs/common/installDistro.js
|
||||
|
||||
- powershell: |
|
||||
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
|
||||
npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-min"
|
||||
npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-copy-inno-updater"
|
||||
name: build
|
||||
|
||||
- powershell: |
|
||||
npm run gulp -- "electron-$(VSCODE_ARCH)"
|
||||
.\scripts\test.bat --build --tfs
|
||||
name: test
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: 'ESRP CodeSign'
|
||||
FolderPath: '$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)'
|
||||
Pattern: '*.dll,*.exe,*.node'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-229803",
|
||||
"operationSetCode": "SigntoolSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "Microsoft"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "http://www.microsoft.com"
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/t \"http://ts4096.gtm.microsoft.com/TSS/AuthenticodeTS\""
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "Microsoft"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "http://www.microsoft.com"
|
||||
},
|
||||
{
|
||||
"parameterName": "Append",
|
||||
"parameterValue": "/as"
|
||||
},
|
||||
{
|
||||
"parameterName": "FileDigest",
|
||||
"parameterValue": "/fd \"SHA256\""
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolVerify",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "VerifyAll",
|
||||
"parameterValue": "/all"
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
|
||||
- powershell: |
|
||||
npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-archive" "vscode-win32-$(VSCODE_ARCH)-setup"
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: 'ESRP CodeSign'
|
||||
FolderPath: '$(agent.builddirectory)'
|
||||
Pattern: VSCodeSetup.exe
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-229803",
|
||||
"operationSetCode": "SigntoolSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "Microsoft"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "http://www.microsoft.com"
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/t \"http://ts4096.gtm.microsoft.com/TSS/AuthenticodeTS\""
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "Microsoft"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "http://www.microsoft.com"
|
||||
},
|
||||
{
|
||||
"parameterName": "Append",
|
||||
"parameterValue": "/as"
|
||||
},
|
||||
{
|
||||
"parameterName": "FileDigest",
|
||||
"parameterValue": "/fd \"SHA256\""
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolVerify",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "VerifyAll",
|
||||
"parameterValue": "/all"
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
|
||||
- powershell: |
|
||||
$Repo = "$(pwd)"
|
||||
$Root = "$Repo\.."
|
||||
$Exe = "$Repo\.build\win32-$(VSCODE_ARCH)\setup\VSCodeSetup.exe"
|
||||
$Zip = "$Repo\.build\win32-$(VSCODE_ARCH)\archive\VSCode-win32-$(VSCODE_ARCH).zip"
|
||||
$Build = "$Root\VSCode-win32-$(VSCODE_ARCH)"
|
||||
|
||||
# get version
|
||||
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
|
||||
$Version = $PackageJson.version
|
||||
$Quality = "$env:VSCODE_QUALITY"
|
||||
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(AZURE_STORAGE_ACCESS_KEY_2)"
|
||||
$env:MOONCAKE_STORAGE_ACCESS_KEY = "$(MOONCAKE_STORAGE_ACCESS_KEY)"
|
||||
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(AZURE_DOCUMENTDB_MASTERKEY)"
|
||||
|
||||
$assetPlatform = if ("$(VSCODE_ARCH)" -eq "ia32") { "win32" } else { "win32-x64" }
|
||||
|
||||
node build/tfs/common/publish.js $Quality "$global:assetPlatform-archive" archive "VSCode-win32-$(VSCODE_ARCH)-$Version.zip" $Version true $Zip
|
||||
node build/tfs/common/publish.js $Quality "$global:assetPlatform" setup "VSCodeSetup-$(VSCODE_ARCH)-$Version.exe" $Version true $Exe
|
||||
@@ -1,86 +0,0 @@
|
||||
Param(
|
||||
[string]$arch,
|
||||
[string]$mixinPassword,
|
||||
[string]$vsoPAT,
|
||||
[string]$storageKey,
|
||||
[string]$mooncakeStorageKey,
|
||||
[string]$documentDbKey
|
||||
)
|
||||
|
||||
. .\build\tfs\win32\node.ps1
|
||||
. .\scripts\env.ps1
|
||||
. .\build\tfs\win32\lib.ps1
|
||||
|
||||
# Create a _netrc file to download distro dependencies
|
||||
# In order to get _netrc to work, we need a HOME variable setup
|
||||
"machine monacotools.visualstudio.com password ${vsoPAT}" | Out-File "$env:HOME\_netrc" -Encoding ASCII
|
||||
|
||||
# Set the right architecture
|
||||
$env:npm_config_arch="$arch"
|
||||
$env:CHILD_CONCURRENCY="1"
|
||||
|
||||
step "Install dependencies" {
|
||||
exec { & yarn }
|
||||
}
|
||||
|
||||
step "Hygiene" {
|
||||
exec { & npm run gulp -- hygiene }
|
||||
}
|
||||
|
||||
$env:VSCODE_MIXIN_PASSWORD = $mixinPassword
|
||||
step "Mix in repository from vscode-distro" {
|
||||
exec { & npm run gulp -- mixin }
|
||||
}
|
||||
|
||||
step "Get Electron" {
|
||||
exec { & npm run gulp -- "electron-$global:arch" }
|
||||
}
|
||||
|
||||
step "Install distro dependencies" {
|
||||
exec { & node build\tfs\common\installDistro.js }
|
||||
}
|
||||
|
||||
step "Build minified" {
|
||||
exec { & npm run gulp -- "vscode-win32-$global:arch-min" }
|
||||
}
|
||||
|
||||
step "Run unit tests" {
|
||||
exec { & .\scripts\test.bat --build --reporter dot }
|
||||
}
|
||||
|
||||
step "Run smoke test" {
|
||||
$Artifacts = "$env:AGENT_BUILDDIRECTORY\smoketest-artifacts"
|
||||
Remove-Item -Recurse -Force -ErrorAction Ignore $Artifacts
|
||||
|
||||
exec { & npm run smoketest -- --build "$env:AGENT_BUILDDIRECTORY\VSCode-win32-$global:arch" --log "$Artifacts" }
|
||||
}
|
||||
|
||||
step "Create archive and setup package" {
|
||||
exec { & npm run gulp -- "vscode-win32-$global:arch-archive" "vscode-win32-$global:arch-setup" }
|
||||
}
|
||||
|
||||
$Repo = "$(pwd)"
|
||||
$Root = "$Repo\.."
|
||||
$Exe = "$Repo\.build\win32-$arch\setup\VSCodeSetup.exe"
|
||||
$Zip = "$Repo\.build\win32-$arch\archive\VSCode-win32-$arch.zip"
|
||||
$Build = "$Root\VSCode-win32-$arch"
|
||||
|
||||
# get version
|
||||
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
|
||||
$Version = $PackageJson.version
|
||||
$Quality = "$env:VSCODE_QUALITY"
|
||||
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
|
||||
$env:MOONCAKE_STORAGE_ACCESS_KEY = $mooncakeStorageKey
|
||||
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
||||
|
||||
$assetPlatform = if ($arch -eq "ia32") { "win32" } else { "win32-x64" }
|
||||
|
||||
step "Publish UNSIGNED archive" {
|
||||
exec { & node build/tfs/common/publish.js $Quality "$global:assetPlatform-archive" archive-unsigned "VSCode-win32-$global:arch-$Version-unsigned.zip" $Version false $Zip }
|
||||
}
|
||||
|
||||
step "Publish UNSIGNED setup package" {
|
||||
exec { & node build/tfs/common/publish.js $Quality "$global:assetPlatform" setup-unsigned "VSCodeSetup-$global:arch-$Version-unsigned.exe" $Version false $Exe }
|
||||
}
|
||||
|
||||
done
|
||||
66
build/tfs/win32/continuous-build-win32.yml
Normal file
66
build/tfs/win32/continuous-build-win32.yml
Normal file
@@ -0,0 +1,66 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.9.1"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.3.2"
|
||||
- powershell: |
|
||||
. build/tfs/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn }
|
||||
displayName: Install Dependencies
|
||||
- powershell: |
|
||||
. build/tfs/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn gulp electron }
|
||||
displayName: Download Electron
|
||||
- powershell: |
|
||||
. build/tfs/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn gulp hygiene }
|
||||
displayName: Run Hygiene Checks
|
||||
- powershell: |
|
||||
. build/tfs/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn check-monaco-editor-compilation }
|
||||
displayName: Run Monaco Editor Checks
|
||||
- powershell: |
|
||||
. build/tfs/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn compile }
|
||||
displayName: Compile Sources
|
||||
- powershell: |
|
||||
. build/tfs/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn download-builtin-extensions }
|
||||
displayName: Download Built-in Extensions
|
||||
- powershell: |
|
||||
. build/tfs/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { .\scripts\test.bat --tfs "Unit Tests" }
|
||||
displayName: Run Unit Tests
|
||||
- powershell: |
|
||||
. build/tfs/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { .\scripts\test-integration.bat --tfs "Integration Tests" }
|
||||
displayName: Run Integration Tests
|
||||
- powershell: |
|
||||
. build/tfs/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn smoketest --screenshots "$(Build.ArtifactStagingDirectory)\artifacts" --log "$(Build.ArtifactStagingDirectory)\artifacts\smoketest.log" }
|
||||
displayName: Run Smoke Tests
|
||||
continueOnError: true
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: Publish Smoketest Artifacts
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.ArtifactStagingDirectory)/artifacts'
|
||||
ArtifactName: build-artifacts-win32
|
||||
publishLocation: Container
|
||||
condition: eq(variables['System.PullRequest.IsFork'], 'False')
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
testResultsFiles: '*-results.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||
condition: succeededOrFailed()
|
||||
24
build/tfs/win32/exec.ps1
Normal file
24
build/tfs/win32/exec.ps1
Normal file
@@ -0,0 +1,24 @@
|
||||
# Taken from psake https://github.com/psake/psake
|
||||
|
||||
<#
|
||||
.SYNOPSIS
|
||||
This is a helper function that runs a scriptblock and checks the PS variable $lastexitcode
|
||||
to see if an error occcured. If an error is detected then an exception is thrown.
|
||||
This function allows you to run command-line programs without having to
|
||||
explicitly check the $lastexitcode variable.
|
||||
|
||||
.EXAMPLE
|
||||
exec { svn info $repository_trunk } "Error executing SVN. Please verify SVN command-line client is installed"
|
||||
#>
|
||||
function Exec
|
||||
{
|
||||
[CmdletBinding()]
|
||||
param(
|
||||
[Parameter(Position=0,Mandatory=1)][scriptblock]$cmd,
|
||||
[Parameter(Position=1,Mandatory=0)][string]$errorMessage = ($msgs.error_bad_command -f $cmd)
|
||||
)
|
||||
& $cmd
|
||||
if ($lastexitcode -ne 0) {
|
||||
throw ("Exec: " + $errorMessage)
|
||||
}
|
||||
}
|
||||
14
build/tfs/win32/import-esrp-auth-cert.ps1
Normal file
14
build/tfs/win32/import-esrp-auth-cert.ps1
Normal file
@@ -0,0 +1,14 @@
|
||||
Param(
|
||||
[string]$AuthCertificateBase64,
|
||||
[string]$AuthCertificateKey
|
||||
)
|
||||
|
||||
# Import auth certificate
|
||||
$AuthCertificateFileName = [System.IO.Path]::GetTempFileName()
|
||||
$AuthCertificateBytes = [Convert]::FromBase64String($AuthCertificateBase64)
|
||||
[IO.File]::WriteAllBytes($AuthCertificateFileName, $AuthCertificateBytes)
|
||||
$AuthCertificate = Import-PfxCertificate -FilePath $AuthCertificateFileName -CertStoreLocation Cert:\LocalMachine\My -Password (ConvertTo-SecureString $AuthCertificateKey -AsPlainText -Force)
|
||||
rm $AuthCertificateFileName
|
||||
$ESRPAuthCertificateSubjectName = $AuthCertificate.Subject
|
||||
|
||||
Write-Output ("##vso[task.setvariable variable=ESRPAuthCertificateSubjectName;]$ESRPAuthCertificateSubjectName")
|
||||
@@ -1,48 +0,0 @@
|
||||
# stop when there's an error
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
$env:HOME=$env:USERPROFILE
|
||||
|
||||
if (Test-Path env:AGENT_WORKFOLDER) {
|
||||
$env:HOME="${env:AGENT_WORKFOLDER}\home"
|
||||
$env:npm_config_cache="${env:HOME}\npm-cache"
|
||||
$env:YARN_CACHE_FOLDER="${env:HOME}\yarn-cache"
|
||||
$env:npm_config_devdir="${env:HOME}\npm-devdir"
|
||||
New-Item -Path "$env:HOME" -Type directory -Force | out-null
|
||||
New-Item -Path "$env:npm_config_cache" -Type directory -Force | out-null
|
||||
}
|
||||
|
||||
# throw when a process exits with something other than 0
|
||||
function exec([scriptblock]$cmd, [string]$errorMessage = "Error executing command: " + $cmd) {
|
||||
& $cmd
|
||||
if ($LastExitCode -ne 0) {
|
||||
throw $errorMessage
|
||||
}
|
||||
}
|
||||
|
||||
$Summary = @()
|
||||
function step($Task, $Step) {
|
||||
echo ""
|
||||
echo "*****************************************************************************"
|
||||
echo "Start: $Task"
|
||||
echo "*****************************************************************************"
|
||||
echo ""
|
||||
|
||||
$Stopwatch = [Diagnostics.Stopwatch]::StartNew()
|
||||
Invoke-Command $Step
|
||||
$Stopwatch.Stop()
|
||||
$Formatted = "{0:g}" -f $Stopwatch.Elapsed
|
||||
|
||||
echo "*****************************************************************************"
|
||||
echo "End: $Task, Total: $Formatted"
|
||||
echo "*****************************************************************************"
|
||||
|
||||
$global:Summary += @{ "$Task" = $Formatted }
|
||||
}
|
||||
|
||||
function done() {
|
||||
echo ""
|
||||
echo "Build Summary"
|
||||
echo "============="
|
||||
$global:Summary | Format-Table @{L="Task";E={$_.Name}}, @{L="Duration";E={$_.Value}}
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
# install node
|
||||
$env:Path = $env:NVM_HOME + ";" + $env:NVM_SYMLINK + ";" + $env:Path
|
||||
$NodeVersion = "8.9.1"
|
||||
# nvm install $NodeVersion
|
||||
# nvm use $NodeVersion
|
||||
# npm install -g yarn
|
||||
$env:Path = $env:NVM_HOME + "\v" + $NodeVersion + ";" + $env:Path
|
||||
166
build/tfs/win32/product-build-win32.yml
Normal file
166
build/tfs/win32/product-build-win32.yml
Normal file
@@ -0,0 +1,166 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.9.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.3.2"
|
||||
|
||||
- powershell: |
|
||||
. build/tfs/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
"machine monacotools.visualstudio.com password $(VSO_PAT)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
|
||||
$env:npm_config_arch="$(VSCODE_ARCH)"
|
||||
$env:CHILD_CONCURRENCY="1"
|
||||
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
|
||||
exec { yarn }
|
||||
exec { npm run gulp -- hygiene }
|
||||
exec { npm run monaco-compile-check }
|
||||
exec { npm run gulp -- mixin }
|
||||
exec { node build/tfs/common/installDistro.js }
|
||||
exec { node build/lib/builtInExtensions.js }
|
||||
|
||||
- powershell: |
|
||||
. build/tfs/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
|
||||
exec { npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-min" }
|
||||
exec { npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-copy-inno-updater" }
|
||||
name: build
|
||||
|
||||
- powershell: |
|
||||
. build/tfs/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { npm run gulp -- "electron-$(VSCODE_ARCH)" }
|
||||
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
|
||||
# yarn smoketest -- --build "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
|
||||
name: test
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: 'ESRP CodeSign'
|
||||
FolderPath: '$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)'
|
||||
Pattern: '*.dll,*.exe,*.node'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-229803",
|
||||
"operationSetCode": "SigntoolSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "VS Code"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "https://code.visualstudio.com/"
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/t \"http://ts4096.gtm.microsoft.com/TSS/AuthenticodeTS\""
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "VS Code"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "https://code.visualstudio.com/"
|
||||
},
|
||||
{
|
||||
"parameterName": "Append",
|
||||
"parameterValue": "/as"
|
||||
},
|
||||
{
|
||||
"parameterName": "FileDigest",
|
||||
"parameterValue": "/fd \"SHA256\""
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolVerify",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "VerifyAll",
|
||||
"parameterValue": "/all"
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
|
||||
- task: NuGetCommand@2
|
||||
displayName: Install ESRPClient.exe
|
||||
inputs:
|
||||
restoreSolution: 'build\tfs\win32\ESRPClient\packages.config'
|
||||
feedsToUse: config
|
||||
nugetConfigPath: 'build\tfs\win32\ESRPClient\NuGet.config'
|
||||
externalFeedCredentials: 3fc0b7f7-da09-4ae7-a9c8-d69824b1819b
|
||||
restoreDirectory: packages
|
||||
|
||||
- task: ESRPImportCertTask@1
|
||||
displayName: Import ESRP Request Signing Certificate
|
||||
inputs:
|
||||
ESRP: 'ESRP CodeSign'
|
||||
|
||||
- powershell: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
.\build\tfs\win32\import-esrp-auth-cert.ps1 -AuthCertificateBase64 $(ESRP_AUTH_CERTIFICATE) -AuthCertificateKey $(ESRP_AUTH_CERTIFICATE_KEY)
|
||||
displayName: Import ESRP Auth Certificate
|
||||
|
||||
- powershell: |
|
||||
. build/tfs/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-archive" "vscode-win32-$(VSCODE_ARCH)-system-setup" "vscode-win32-$(VSCODE_ARCH)-user-setup" }
|
||||
|
||||
$Repo = "$(pwd)"
|
||||
$Root = "$Repo\.."
|
||||
$SystemExe = "$Repo\.build\win32-$(VSCODE_ARCH)\system-setup\VSCodeSetup.exe"
|
||||
$UserExe = "$Repo\.build\win32-$(VSCODE_ARCH)\user-setup\VSCodeSetup.exe"
|
||||
$Zip = "$Repo\.build\win32-$(VSCODE_ARCH)\archive\VSCode-win32-$(VSCODE_ARCH).zip"
|
||||
$Build = "$Root\VSCode-win32-$(VSCODE_ARCH)"
|
||||
|
||||
# get version
|
||||
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
|
||||
$Version = $PackageJson.version
|
||||
$Quality = "$env:VSCODE_QUALITY"
|
||||
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(AZURE_STORAGE_ACCESS_KEY_2)"
|
||||
$env:MOONCAKE_STORAGE_ACCESS_KEY = "$(MOONCAKE_STORAGE_ACCESS_KEY)"
|
||||
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(AZURE_DOCUMENTDB_MASTERKEY)"
|
||||
|
||||
$assetPlatform = if ("$(VSCODE_ARCH)" -eq "ia32") { "win32" } else { "win32-x64" }
|
||||
|
||||
exec { node build/tfs/common/publish.js $Quality "$global:assetPlatform-archive" archive "VSCode-win32-$(VSCODE_ARCH)-$Version.zip" $Version true $Zip }
|
||||
exec { node build/tfs/common/publish.js $Quality "$global:assetPlatform" setup "VSCodeSetup-$(VSCODE_ARCH)-$Version.exe" $Version true $SystemExe }
|
||||
exec { node build/tfs/common/publish.js $Quality "$global:assetPlatform-user" setup "VSCodeUserSetup-$(VSCODE_ARCH)-$Version.exe" $Version true $UserExe }
|
||||
|
||||
# publish hockeyapp symbols
|
||||
$hockeyAppId = if ("$(VSCODE_ARCH)" -eq "ia32") { "$(VSCODE_HOCKEYAPP_ID_WIN32)" } else { "$(VSCODE_HOCKEYAPP_ID_WIN64)" }
|
||||
exec { node build/tfs/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" $hockeyAppId }
|
||||
82
build/tfs/win32/sign.ps1
Normal file
82
build/tfs/win32/sign.ps1
Normal file
@@ -0,0 +1,82 @@
|
||||
function Create-TmpJson($Obj) {
|
||||
$FileName = [System.IO.Path]::GetTempFileName()
|
||||
ConvertTo-Json -Depth 100 $Obj | Out-File -Encoding UTF8 $FileName
|
||||
return $FileName
|
||||
}
|
||||
|
||||
$Auth = Create-TmpJson @{
|
||||
Version = "1.0.0"
|
||||
AuthenticationType = "AAD_CERT"
|
||||
ClientId = $env:ESRPClientId
|
||||
AuthCert = @{
|
||||
SubjectName = $env:ESRPAuthCertificateSubjectName
|
||||
StoreLocation = "LocalMachine"
|
||||
StoreName = "My"
|
||||
}
|
||||
RequestSigningCert = @{
|
||||
SubjectName = $env:ESRPCertificateSubjectName
|
||||
StoreLocation = "LocalMachine"
|
||||
StoreName = "My"
|
||||
}
|
||||
}
|
||||
|
||||
$Policy = Create-TmpJson @{
|
||||
Version = "1.0.0"
|
||||
}
|
||||
|
||||
$Input = Create-TmpJson @{
|
||||
Version = "1.0.0"
|
||||
SignBatches = @(
|
||||
@{
|
||||
SourceLocationType = "UNC"
|
||||
SignRequestFiles = @(
|
||||
@{
|
||||
SourceLocation = $args[0]
|
||||
}
|
||||
)
|
||||
SigningInfo = @{
|
||||
Operations = @(
|
||||
@{
|
||||
KeyCode = "CP-229803"
|
||||
OperationCode = "SigntoolSign"
|
||||
Parameters = @{
|
||||
OpusName = "VS Code"
|
||||
OpusInfo = "https://code.visualstudio.com/"
|
||||
PageHash = "/NPH"
|
||||
TimeStamp = "/t `"http://ts4096.gtm.microsoft.com/TSS/AuthenticodeTS`""
|
||||
}
|
||||
ToolName = "sign"
|
||||
ToolVersion = "1.0"
|
||||
},
|
||||
@{
|
||||
KeyCode = "CP-230012"
|
||||
OperationCode = "SigntoolSign"
|
||||
Parameters = @{
|
||||
OpusName = "VS Code"
|
||||
OpusInfo = "https://code.visualstudio.com/"
|
||||
Append = "/as"
|
||||
FileDigest = "/fd `"SHA256`""
|
||||
PageHash = "/NPH"
|
||||
TimeStamp = "/tr `"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer`" /td sha256"
|
||||
}
|
||||
ToolName = "sign"
|
||||
ToolVersion = "1.0"
|
||||
},
|
||||
@{
|
||||
KeyCode = "CP-230012"
|
||||
OperationCode = "SigntoolVerify"
|
||||
Parameters = @{
|
||||
VerifyAll = "/all"
|
||||
}
|
||||
ToolName = "sign"
|
||||
ToolVersion = "1.0"
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
$Output = [System.IO.Path]::GetTempFileName()
|
||||
# $ScriptPath = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent
|
||||
# & "$ScriptPath\ESRPClient\packages\EsrpClient.1.0.27\tools\ESRPClient.exe" Sign -a $Auth -p $Policy -i $Input -o $Output
|
||||
@@ -6,6 +6,7 @@
|
||||
"removeComments": false,
|
||||
"preserveConstEnums": true,
|
||||
"sourceMap": false,
|
||||
"resolveJsonModule": true,
|
||||
"experimentalDecorators": true,
|
||||
// enable JavaScript type checking for the language service
|
||||
// use the tsconfig.build.json for compiling wich disable JavaScript
|
||||
|
||||
1
build/win32/.gitignore
vendored
Normal file
1
build/win32/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
code-processed.iss
|
||||
@@ -37,7 +37,7 @@
|
||||
"version": "1.2.1",
|
||||
"repositoryUrl": "https://github.com/BurntSushi/byteorder",
|
||||
"licenseDetail": [
|
||||
"The Source EULA",
|
||||
"The MIT License (MIT)",
|
||||
"",
|
||||
"Copyright (c) 2015 Andrew Gallant",
|
||||
"",
|
||||
@@ -66,7 +66,7 @@
|
||||
"version": "0.2.0",
|
||||
"repositoryUrl": "https://github.com/Sgeo/take_mut",
|
||||
"licenseDetail": [
|
||||
"The Source EULA",
|
||||
"The MIT License (MIT)",
|
||||
"",
|
||||
"Copyright (c) 2016 Sgeo",
|
||||
"",
|
||||
@@ -128,7 +128,7 @@
|
||||
"version": "0.4.0",
|
||||
"repositoryUrl": "https://github.com/chronotope/chrono",
|
||||
"licenseDetail": [
|
||||
"Rust-chrono is dual-licensed under The Source EULA [1] and",
|
||||
"Rust-chrono is dual-licensed under The MIT License [1] and",
|
||||
"Apache 2.0 License [2]. Copyright (c) 2014--2017, Kang Seonghoon and",
|
||||
"contributors.",
|
||||
"",
|
||||
@@ -138,7 +138,7 @@
|
||||
"[1]: <http://opensource.org/licenses/MIT>, which is reproduced below:",
|
||||
"",
|
||||
"~~~~",
|
||||
"The Source EULA",
|
||||
"The MIT License (MIT)",
|
||||
"",
|
||||
"Copyright (c) 2014, Kang Seonghoon.",
|
||||
"",
|
||||
@@ -545,33 +545,6 @@
|
||||
],
|
||||
"isProd": true
|
||||
},
|
||||
{
|
||||
"name": "retep998/winapi-rs",
|
||||
"version": "0.3.4",
|
||||
"repositoryUrl": "https://github.com/retep998/winapi-rs",
|
||||
"licenseDetail": [
|
||||
"Copyright (c) 2015 The winapi-rs Developers",
|
||||
"",
|
||||
"Permission is hereby granted, free of charge, to any person obtaining a copy",
|
||||
"of this software and associated documentation files (the \"Software\"), to deal",
|
||||
"in the Software without restriction, including without limitation the rights",
|
||||
"to use, copy, modify, merge, publish, distribute, sublicense, and/or sell",
|
||||
"copies of the Software, and to permit persons to whom the Software is",
|
||||
"furnished to do so, subject to the following conditions:",
|
||||
"",
|
||||
"The above copyright notice and this permission notice shall be included in all",
|
||||
"copies or substantial portions of the Software.",
|
||||
"",
|
||||
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR",
|
||||
"IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,",
|
||||
"FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE",
|
||||
"AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER",
|
||||
"LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,",
|
||||
"OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE",
|
||||
"SOFTWARE."
|
||||
],
|
||||
"isProd": true
|
||||
},
|
||||
{
|
||||
"name": "retep998/winapi-rs",
|
||||
"version": "0.2.8",
|
||||
@@ -707,6 +680,33 @@
|
||||
],
|
||||
"isProd": true
|
||||
},
|
||||
{
|
||||
"name": "retep998/winapi-rs",
|
||||
"version": "0.3.4",
|
||||
"repositoryUrl": "https://github.com/retep998/winapi-rs",
|
||||
"licenseDetail": [
|
||||
"Copyright (c) 2015 The winapi-rs Developers",
|
||||
"",
|
||||
"Permission is hereby granted, free of charge, to any person obtaining a copy",
|
||||
"of this software and associated documentation files (the \"Software\"), to deal",
|
||||
"in the Software without restriction, including without limitation the rights",
|
||||
"to use, copy, modify, merge, publish, distribute, sublicense, and/or sell",
|
||||
"copies of the Software, and to permit persons to whom the Software is",
|
||||
"furnished to do so, subject to the following conditions:",
|
||||
"",
|
||||
"The above copyright notice and this permission notice shall be included in all",
|
||||
"copies or substantial portions of the Software.",
|
||||
"",
|
||||
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR",
|
||||
"IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,",
|
||||
"FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE",
|
||||
"AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER",
|
||||
"LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,",
|
||||
"OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE",
|
||||
"SOFTWARE."
|
||||
],
|
||||
"isProd": true
|
||||
},
|
||||
{
|
||||
"name": "rust-lang-nursery/lazy-static.rs",
|
||||
"version": "1.0.0",
|
||||
|
||||
@@ -12,7 +12,6 @@ AppPublisherURL=https://github.com/Microsoft/sqlopsstudio
|
||||
AppSupportURL=https://github.com/Microsoft/sqlopsstudio
|
||||
AppUpdatesURL=https://github.com/Microsoft/sqlopsstudio
|
||||
|
||||
DefaultDirName={pf}\{#DirName}
|
||||
DefaultGroupName={#NameLong}
|
||||
AllowNoIcons=yes
|
||||
OutputDir={#OutputDir}
|
||||
@@ -35,6 +34,13 @@ ShowLanguageDialog=auto
|
||||
ArchitecturesAllowed={#ArchitecturesAllowed}
|
||||
ArchitecturesInstallIn64BitMode={#ArchitecturesInstallIn64BitMode}
|
||||
|
||||
#if "user" == InstallTarget
|
||||
DefaultDirName={userpf}\{#DirName}
|
||||
PrivilegesRequired=lowest
|
||||
#else
|
||||
DefaultDirName={pf}\{#DirName}
|
||||
#endif
|
||||
|
||||
[Languages]
|
||||
Name: "english"; MessagesFile: "compiler:Default.isl,{#RepoDir}\build\win32\i18n\messages.en.isl" {#LocalizedLanguageFile}
|
||||
Name: "german"; MessagesFile: "compiler:Languages\German.isl,{#RepoDir}\build\win32\i18n\messages.de.isl" {#LocalizedLanguageFile("deu")}
|
||||
@@ -67,11 +73,12 @@ Name: "addtopath"; Description: "{cm:AddToPath}"; GroupDescription: "{cm:Other}"
|
||||
Name: "runcode"; Description: "{cm:RunAfter,{#NameShort}}"; GroupDescription: "{cm:Other}"; Check: WizardSilent
|
||||
|
||||
[Files]
|
||||
Source: "*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs createallsubdirs
|
||||
Source: "*"; Excludes: "\tools,\tools\*,\resources\app\product.json"; DestDir: "{code:GetDestDir}"; Flags: ignoreversion recursesubdirs createallsubdirs
|
||||
Source: "{#ProductJsonPath}"; DestDir: "{code:GetDestDir}\resources\app"; Flags: ignoreversion
|
||||
|
||||
[Icons]
|
||||
Name: "{group}\{#NameLong}"; Filename: "{app}\{#ExeBasename}.exe"; AppUserModelID: "{#AppUserId}"
|
||||
Name: "{commondesktop}\{#NameLong}"; Filename: "{app}\{#ExeBasename}.exe"; AppUserModelID: "{#AppUserId}"
|
||||
Name: "{commondesktop}\{#NameLong}"; Filename: "{app}\{#ExeBasename}.exe"; Tasks: desktopicon; AppUserModelID: "{#AppUserId}"
|
||||
Name: "{userappdata}\Microsoft\Internet Explorer\Quick Launch\{#NameLong}"; Filename: "{app}\{#ExeBasename}.exe"; Tasks: quicklaunchicon; AppUserModelID: "{#AppUserId}"
|
||||
|
||||
[Run]
|
||||
@@ -79,6 +86,11 @@ Filename: "{app}\{#ExeBasename}.exe"; Description: "{cm:LaunchProgram,{#NameLong
|
||||
Filename: "{app}\{#ExeBasename}.exe"; Description: "{cm:LaunchProgram,{#NameLong}}"; Flags: nowait postinstall; Check: WizardNotSilent
|
||||
|
||||
[Registry]
|
||||
#if "user" == InstallTarget
|
||||
#define SoftwareClassesRootKey "HKCU"
|
||||
#else
|
||||
#define SoftwareClassesRootKey "HKLM"
|
||||
#endif
|
||||
Root: HKCR; Subkey: "{#RegValueName}SourceFile"; ValueType: string; ValueName: ""; ValueData: "{cm:SourceFile,{#NameLong}}"; Flags: uninsdeletekey
|
||||
Root: HKCR; Subkey: "{#RegValueName}SourceFile\DefaultIcon"; ValueType: string; ValueName: ""; ValueData: "{app}\resources\app\resources\win32\code_file.ico"
|
||||
Root: HKCR; Subkey: "{#RegValueName}SourceFile\shell\open\command"; ValueType: string; ValueName: ""; ValueData: """{app}\{#ExeBasename}.exe"" ""%1"""
|
||||
@@ -90,6 +102,20 @@ Root: HKCU; Subkey: "Software\Classes\{#RegValueName}.sql"; ValueType: string; V
|
||||
Root: HKCU; Subkey: "Software\Classes\{#RegValueName}.sql"; ValueType: string; ValueName: "AppUserModelID"; ValueData: "{#AppUserId}"; Flags: uninsdeletekey; Tasks: associatewithfiles
|
||||
Root: HKCU; Subkey: "Software\Classes\{#RegValueName}.sql\DefaultIcon"; ValueType: string; ValueName: ""; ValueData: "{app}\resources\app\resources\win32\code_file.ico"; Tasks: associatewithfiles
|
||||
Root: HKCU; Subkey: "Software\Classes\{#RegValueName}.sql\shell\open\command"; ValueType: string; ValueName: ""; ValueData: """{app}\{#ExeBasename}.exe"" ""%1"""; Tasks: associatewithfiles
|
||||
; Environment
|
||||
#if "user" == InstallTarget
|
||||
#define EnvironmentRootKey "HKCU"
|
||||
#define EnvironmentKey "Environment"
|
||||
#define Uninstall64RootKey "HKCU64"
|
||||
#define Uninstall32RootKey "HKCU32"
|
||||
#else
|
||||
#define EnvironmentRootKey "HKLM"
|
||||
#define EnvironmentKey "System\CurrentControlSet\Control\Session Manager\Environment"
|
||||
#define Uninstall64RootKey "HKLM64"
|
||||
#define Uninstall32RootKey "HKLM32"
|
||||
#endif
|
||||
|
||||
Root: {#EnvironmentRootKey}; Subkey: "{#EnvironmentKey}"; ValueType: expandsz; ValueName: "Path"; ValueData: "{olddata};{app}\bin"; Tasks: addtopath; Check: NeedsAddPath(ExpandConstant('{app}\bin'))
|
||||
|
||||
[Code]
|
||||
// Don't allow installing conflicting architectures
|
||||
@@ -101,15 +127,33 @@ var
|
||||
begin
|
||||
Result := True;
|
||||
|
||||
if IsWin64 then begin
|
||||
RegKey := 'SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\' + copy('{#IncompatibleAppId}', 2, 38) + '_is1';
|
||||
#if "user" == InstallTarget
|
||||
#if "ia32" == Arch
|
||||
#define IncompatibleArchRootKey "HKLM32"
|
||||
#else
|
||||
#define IncompatibleArchRootKey "HKLM64"
|
||||
#endif
|
||||
|
||||
if not WizardSilent() then begin
|
||||
RegKey := 'SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\' + copy('{#IncompatibleTargetAppId}', 2, 38) + '_is1';
|
||||
|
||||
if RegKeyExists({#IncompatibleArchRootKey}, RegKey) then begin
|
||||
if MsgBox('{#NameShort} is already installed on this system for all users. We recommend first uninstalling that version before installing this one. Are you sure you want to continue the installation?', mbConfirmation, MB_YESNO) = IDNO then begin
|
||||
Result := false;
|
||||
end;
|
||||
end;
|
||||
end;
|
||||
#endif
|
||||
|
||||
if Result and IsWin64 then begin
|
||||
RegKey := 'SOFTWARE\Microsoft\Windows\CurrentVersion\Uninstall\' + copy('{#IncompatibleArchAppId}', 2, 38) + '_is1';
|
||||
|
||||
if '{#Arch}' = 'ia32' then begin
|
||||
Result := not RegKeyExists(HKLM64, RegKey);
|
||||
Result := not RegKeyExists({#Uninstall64RootKey}, RegKey);
|
||||
ThisArch := '32';
|
||||
AltArch := '64';
|
||||
end else begin
|
||||
Result := not RegKeyExists(HKLM32, RegKey);
|
||||
Result := not RegKeyExists({#Uninstall32RootKey}, RegKey);
|
||||
ThisArch := '64';
|
||||
AltArch := '32';
|
||||
end;
|
||||
@@ -218,7 +262,7 @@ function NeedsAddPath(Param: string): boolean;
|
||||
var
|
||||
OrigPath: string;
|
||||
begin
|
||||
if not RegQueryStringValue(HKEY_CURRENT_USER, 'Environment', 'Path', OrigPath)
|
||||
if not RegQueryStringValue({#EnvironmentRootKey}, '{#EnvironmentKey}', 'Path', OrigPath)
|
||||
then begin
|
||||
Result := True;
|
||||
exit;
|
||||
@@ -237,7 +281,7 @@ begin
|
||||
if not CurUninstallStep = usUninstall then begin
|
||||
exit;
|
||||
end;
|
||||
if not RegQueryStringValue(HKEY_CURRENT_USER, 'Environment', 'Path', Path)
|
||||
if not RegQueryStringValue({#EnvironmentRootKey}, '{#EnvironmentKey}', 'Path', Path)
|
||||
then begin
|
||||
exit;
|
||||
end;
|
||||
@@ -253,5 +297,9 @@ begin
|
||||
end;
|
||||
end;
|
||||
end;
|
||||
RegWriteExpandStringValue(HKEY_CURRENT_USER, 'Environment', 'Path', NewPath);
|
||||
RegWriteExpandStringValue({#EnvironmentRootKey}, '{#EnvironmentKey}', 'Path', NewPath);
|
||||
end;
|
||||
|
||||
#ifdef Debug
|
||||
#expr SaveToFile(AddBackslash(SourcePath) + "code-processed.iss")
|
||||
#endif
|
||||
|
||||
Binary file not shown.
@@ -2057,9 +2057,9 @@ tweetnacl@^0.14.3, tweetnacl@~0.14.0:
|
||||
version "0.14.5"
|
||||
resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64"
|
||||
|
||||
typescript@2.8.1:
|
||||
version "2.8.1"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.8.1.tgz#6160e4f8f195d5ba81d4876f9c0cc1fbc0820624"
|
||||
typescript@2.9.2:
|
||||
version "2.9.2"
|
||||
resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.9.2.tgz#1cbf61d05d6b96269244eb6a3bce4bd914e0f00c"
|
||||
|
||||
unbzip2-stream@^1.0.9:
|
||||
version "1.2.5"
|
||||
|
||||
Reference in New Issue
Block a user