SQL Operations Studio Public Preview 1 (0.23) release source code

This commit is contained in:
Karl Burtram
2017-11-09 14:30:27 -08:00
parent b88ecb8d93
commit 3cdac41339
8829 changed files with 759707 additions and 286 deletions

207
build/gulpfile.editor.js Normal file
View File

@@ -0,0 +1,207 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var gulp = require('gulp');
var path = require('path');
var util = require('./lib/util');
var common = require('./lib/optimize');
var es = require('event-stream');
var File = require('vinyl');
var root = path.dirname(__dirname);
var sha1 = util.getVersion(root);
var semver = require('./monaco/package.json').version;
var headerVersion = semver + '(' + sha1 + ')';
// Build
var editorEntryPoints = [
{
name: 'vs/editor/editor.main',
include: [],
exclude: [ 'vs/css', 'vs/nls' ],
prepend: [ 'out-build/vs/css.js', 'out-build/vs/nls.js' ],
},
{
name: 'vs/base/common/worker/simpleWorker',
include: [ 'vs/editor/common/services/editorSimpleWorker' ],
prepend: [ 'vs/loader.js' ],
append: [ 'vs/base/worker/workerMain' ],
dest: 'vs/base/worker/workerMain.js'
}
];
var editorResources = [
'out-build/vs/{base,editor}/**/*.{svg,png}',
'!out-build/vs/base/browser/ui/splitview/**/*',
'!out-build/vs/base/browser/ui/toolbar/**/*',
'!out-build/vs/base/browser/ui/octiconLabel/**/*',
'!out-build/vs/workbench/**',
'!**/test/**'
];
var editorOtherSources = [
];
var BUNDLED_FILE_HEADER = [
'/*!-----------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.',
' * Version: ' + headerVersion,
' * Released under the Source EULA',
' * https://github.com/Microsoft/vscode/blob/master/LICENSE.txt',
' *-----------------------------------------------------------*/',
''
].join('\n');
function editorLoaderConfig() {
var result = common.loaderConfig();
// never ship octicons in editor
result.paths['vs/base/browser/ui/octiconLabel/octiconLabel'] = 'out-build/vs/base/browser/ui/octiconLabel/octiconLabel.mock';
// force css inlining to use base64 -- see https://github.com/Microsoft/monaco-editor/issues/148
result['vs/css'] = {
inlineResources: 'base64',
inlineResourcesLimit: 3000 // see https://github.com/Microsoft/monaco-editor/issues/336
};
return result;
}
gulp.task('clean-optimized-editor', util.rimraf('out-editor'));
gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-client-build'], common.optimizeTask({
entryPoints: editorEntryPoints,
otherSources: editorOtherSources,
resources: editorResources,
loaderConfig: editorLoaderConfig(),
bundleLoader: false,
header: BUNDLED_FILE_HEADER,
bundleInfo: true,
out: 'out-editor'
}));
gulp.task('clean-minified-editor', util.rimraf('out-editor-min'));
gulp.task('minify-editor', ['clean-minified-editor', 'optimize-editor'], common.minifyTask('out-editor'));
gulp.task('clean-editor-distro', util.rimraf('out-monaco-editor-core'));
gulp.task('editor-distro', ['clean-editor-distro', 'minify-editor', 'optimize-editor'], function() {
return es.merge(
// other assets
es.merge(
gulp.src('build/monaco/LICENSE'),
gulp.src('build/monaco/ThirdPartyNotices.txt'),
gulp.src('src/vs/monaco.d.ts')
).pipe(gulp.dest('out-monaco-editor-core')),
// package.json
gulp.src('build/monaco/package.json')
.pipe(es.through(function(data) {
var json = JSON.parse(data.contents.toString());
json.private = false;
data.contents = new Buffer(JSON.stringify(json, null, ' '));
this.emit('data', data);
}))
.pipe(gulp.dest('out-monaco-editor-core')),
// README.md
gulp.src('build/monaco/README-npm.md')
.pipe(es.through(function(data) {
this.emit('data', new File({
path: data.path.replace(/README-npm\.md/, 'README.md'),
base: data.base,
contents: data.contents
}));
}))
.pipe(gulp.dest('out-monaco-editor-core')),
// dev folder
es.merge(
gulp.src('out-editor/**/*')
).pipe(gulp.dest('out-monaco-editor-core/dev')),
// min folder
es.merge(
gulp.src('out-editor-min/**/*')
).pipe(filterStream(function(path) {
// no map files
return !/(\.js\.map$)|(nls\.metadata\.json$)|(bundleInfo\.json$)/.test(path);
})).pipe(es.through(function(data) {
// tweak the sourceMappingURL
if (!/\.js$/.test(data.path)) {
this.emit('data', data);
return;
}
var relativePathToMap = path.relative(path.join(data.relative), path.join('min-maps', data.relative + '.map'));
var strContents = data.contents.toString();
var newStr = '//# sourceMappingURL=' + relativePathToMap.replace(/\\/g, '/');
strContents = strContents.replace(/\/\/\# sourceMappingURL=[^ ]+$/, newStr);
data.contents = new Buffer(strContents);
this.emit('data', data);
})).pipe(gulp.dest('out-monaco-editor-core/min')),
// min-maps folder
es.merge(
gulp.src('out-editor-min/**/*')
).pipe(filterStream(function(path) {
// no map files
return /\.js\.map$/.test(path);
})).pipe(gulp.dest('out-monaco-editor-core/min-maps'))
);
});
gulp.task('analyze-editor-distro', function() {
var bundleInfo = require('../out-editor/bundleInfo.json');
var graph = bundleInfo.graph;
var bundles = bundleInfo.bundles;
var inverseGraph = {};
Object.keys(graph).forEach(function(module) {
var dependencies = graph[module];
dependencies.forEach(function(dep) {
inverseGraph[dep] = inverseGraph[dep] || [];
inverseGraph[dep].push(module);
});
});
var detailed = {};
Object.keys(bundles).forEach(function(entryPoint) {
var included = bundles[entryPoint];
var includedMap = {};
included.forEach(function(included) {
includedMap[included] = true;
});
var explanation = [];
included.map(function(included) {
if (included.indexOf('!') >= 0) {
return;
}
var reason = (inverseGraph[included]||[]).filter(function(mod) {
return !!includedMap[mod];
});
explanation.push({
module: included,
reason: reason
});
});
detailed[entryPoint] = explanation;
});
console.log(JSON.stringify(detailed, null, '\t'));
});
function filterStream(testFunc) {
return es.through(function(data) {
if (!testFunc(data.relative)) {
return;
}
this.emit('data', data);
});
}

View File

@@ -0,0 +1,150 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
// Increase max listeners for event emitters
require('events').EventEmitter.defaultMaxListeners = 100;
const gulp = require('gulp');
const path = require('path');
const tsb = require('gulp-tsb');
const es = require('event-stream');
const filter = require('gulp-filter');
const rimraf = require('rimraf');
const util = require('./lib/util');
const watcher = require('./lib/watch');
const createReporter = require('./lib/reporter').createReporter;
const glob = require('glob');
const sourcemaps = require('gulp-sourcemaps');
const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
const compilations = glob.sync('**/tsconfig.json', {
cwd: extensionsPath,
ignore: ['**/out/**', '**/node_modules/**']
});
const getBaseUrl = out => `https://ticino.blob.core.windows.net/sourcemaps/${commit}/${out}`;
const languages = ['chs', 'cht', 'jpn', 'kor', 'deu', 'fra', 'esn', 'rus', 'ita'];
const tasks = compilations.map(function (tsconfigFile) {
const absolutePath = path.join(extensionsPath, tsconfigFile);
const relativeDirname = path.dirname(tsconfigFile);
const tsOptions = require(absolutePath).compilerOptions;
tsOptions.verbose = false;
tsOptions.sourceMap = true;
const name = relativeDirname.replace(/\//g, '-');
// Tasks
const clean = 'clean-extension:' + name;
const compile = 'compile-extension:' + name;
const watch = 'watch-extension:' + name;
// Build Tasks
const cleanBuild = 'clean-extension-build:' + name;
const compileBuild = 'compile-extension-build:' + name;
const watchBuild = 'watch-extension-build:' + name;
const root = path.join('extensions', relativeDirname);
const srcBase = path.join(root, 'src');
const src = path.join(srcBase, '**');
const out = path.join(root, 'out');
const i18n = path.join(__dirname, '..', 'i18n');
const baseUrl = getBaseUrl(out);
function createPipeline(build, emitError) {
const reporter = createReporter();
tsOptions.inlineSources = !!build;
const compilation = tsb.create(tsOptions, null, null, err => reporter(err.toString()));
return function () {
const input = es.through();
const tsFilter = filter(['**/*.ts', '!**/lib/lib*.d.ts', '!**/node_modules/**'], { restore: true });
const output = input
.pipe(tsFilter)
.pipe(util.loadSourcemaps())
.pipe(compilation())
.pipe(build ? nlsDev.rewriteLocalizeCalls() : es.through())
.pipe(build ? util.stripSourceMappingURL() : es.through())
.pipe(sourcemaps.write('.', {
sourceMappingURL: !build ? null : f => `${baseUrl}/${f.relative}.map`,
addComment: !!build,
includeContent: !!build,
sourceRoot: '../src'
}))
.pipe(tsFilter.restore)
.pipe(build ? nlsDev.createAdditionalLanguageFiles(languages, i18n, out) : es.through())
.pipe(reporter.end(emitError));
return es.duplex(input, output);
};
}
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
gulp.task(clean, cb => rimraf(out, cb));
gulp.task(compile, [clean], () => {
const pipeline = createPipeline(false, true);
const input = gulp.src(src, srcOpts);
return input
.pipe(pipeline())
.pipe(gulp.dest(out));
});
gulp.task(watch, [clean], () => {
const pipeline = createPipeline(false);
const input = gulp.src(src, srcOpts);
const watchInput = watcher(src, srcOpts);
return watchInput
.pipe(util.incremental(pipeline, input))
.pipe(gulp.dest(out));
});
gulp.task(cleanBuild, cb => rimraf(out, cb));
gulp.task(compileBuild, [clean], () => {
const pipeline = createPipeline(true, true);
const input = gulp.src(src, srcOpts);
return input
.pipe(pipeline())
.pipe(gulp.dest(out));
});
gulp.task(watchBuild, [clean], () => {
const pipeline = createPipeline(true);
const input = gulp.src(src, srcOpts);
const watchInput = watcher(src, srcOpts);
return watchInput
.pipe(util.incremental(() => pipeline(true), input))
.pipe(gulp.dest(out));
});
return {
clean: clean,
compile: compile,
watch: watch,
cleanBuild: cleanBuild,
compileBuild: compileBuild,
watchBuild: watchBuild
};
});
gulp.task('clean-extensions', tasks.map(t => t.clean));
gulp.task('compile-extensions', tasks.map(t => t.compile));
gulp.task('watch-extensions', tasks.map(t => t.watch));
gulp.task('clean-extensions-build', tasks.map(t => t.cleanBuild));
gulp.task('compile-extensions-build', tasks.map(t => t.compileBuild));
gulp.task('watch-extensions-build', tasks.map(t => t.watchBuild));

310
build/gulpfile.hygiene.js Normal file
View File

@@ -0,0 +1,310 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
const filter = require('gulp-filter');
const es = require('event-stream');
const gulptslint = require('gulp-tslint');
const gulpeslint = require('gulp-eslint');
const tsfmt = require('typescript-formatter');
const tslint = require('tslint');
/**
* Hygiene works by creating cascading subsets of all our files and
* passing them through a sequence of checks. Here are the current subsets,
* named according to the checks performed on them. Each subset contains
* the following one, as described in mathematical notation:
*
* all ⊃ eol ⊇ indentation ⊃ copyright ⊃ typescript
*/
const all = [
'*',
'build/**/*',
'extensions/**/*',
'scripts/**/*',
'src/**/*',
'test/**/*'
];
const eolFilter = [
'**',
'!ThirdPartyNotices.txt',
'!LICENSE.txt',
'!extensions/**/out/**',
'!**/node_modules/**',
'!**/fixtures/**',
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot}',
'!build/{lib,tslintRules}/**/*.js',
'!build/monaco/**',
'!build/win32/**',
'!build/**/*.sh',
'!build/tfs/**/*.js',
'!**/Dockerfile'
];
const indentationFilter = [
'**',
'!ThirdPartyNotices.txt',
'!**/*.md',
'!**/*.ps1',
'!**/*.template',
'!**/*.yml',
'!**/lib/**',
'!extensions/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!src/vs/*/**/*.d.ts',
'!**/*.d.ts.recipe',
'!test/assert.js',
'!**/package.json',
'!**/npm-shrinkwrap.json',
'!**/octicons/**',
'!**/vs/base/common/marked/raw.marked.js',
'!**/vs/base/common/winjs.base.raw.js',
'!**/vs/base/node/terminateProcess.sh',
'!**/vs/nls.js',
'!**/vs/css.js',
'!**/vs/loader.js',
'!extensions/**/snippets/**',
'!extensions/**/syntaxes/**',
'!extensions/**/themes/**',
'!extensions/**/colorize-fixtures/**',
'!extensions/vscode-api-tests/testWorkspace/**'
];
const copyrightFilter = [
'**',
'!**/*.desktop',
'!**/*.json',
'!**/*.html',
'!**/*.template',
'!**/*.md',
'!**/*.bat',
'!**/*.cmd',
'!**/*.xml',
'!**/*.sh',
'!**/*.txt',
'!**/*.xpm',
'!**/*.opts',
'!**/*.disabled',
'!build/**/*.init',
'!resources/win32/bin/code.js',
'!extensions/markdown/media/tomorrow.css',
'!extensions/html/server/src/modes/typescript/*'
];
const eslintFilter = [
'src/**/*.js',
'build/gulpfile.*.js',
'!src/vs/loader.js',
'!src/vs/css.js',
'!src/vs/nls.js',
'!src/vs/css.build.js',
'!src/vs/nls.build.js',
'!src/**/winjs.base.raw.js',
'!src/**/raw.marked.js',
'!**/test/**'
];
const tslintFilter = [
'src/**/*.ts',
'test/**/*.ts',
'extensions/**/*.ts',
'!**/fixtures/**',
'!**/typings/**',
'!**/node_modules/**',
'!extensions/typescript/test/colorize-fixtures/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/**/*.test.ts'
];
const copyrightHeader = [
'/*---------------------------------------------------------------------------------------------',
' * Copyright (c) Microsoft Corporation. All rights reserved.',
' * Licensed under the Source EULA. See License.txt in the project root for license information.',
' *--------------------------------------------------------------------------------------------*/'
].join('\n');
function reportFailures(failures) {
failures.forEach(failure => {
const name = failure.name || failure.fileName;
const position = failure.startPosition;
const line = position.lineAndCharacter ? position.lineAndCharacter.line : position.line;
const character = position.lineAndCharacter ? position.lineAndCharacter.character : position.character;
console.error(`${name}:${line + 1}:${character + 1}:${failure.failure}`);
});
}
gulp.task('eslint', () => {
return gulp.src(all, { base: '.' })
.pipe(filter(eslintFilter))
.pipe(gulpeslint('src/.eslintrc'))
.pipe(gulpeslint.formatEach('compact'))
.pipe(gulpeslint.failAfterError());
});
gulp.task('tslint', () => {
const options = { summarizeFailureOutput: true };
return gulp.src(all, { base: '.' })
.pipe(filter(tslintFilter))
.pipe(gulptslint({ rulesDirectory: 'build/lib/tslint' }))
.pipe(gulptslint.report(reportFailures, options));
});
const hygiene = exports.hygiene = (some, options) => {
options = options || {};
let errorCount = 0;
const eol = es.through(function (file) {
if (/\r\n?/g.test(file.contents.toString('utf8'))) {
console.error(file.relative + ': Bad EOL found');
errorCount++;
}
this.emit('data', file);
});
const indentation = es.through(function (file) {
file.contents
.toString('utf8')
.split(/\r\n|\r|\n/)
.forEach((line, i) => {
if (/^\s*$/.test(line)) {
// empty or whitespace lines are OK
} else if (/^[\t]*[^\s]/.test(line)) {
// good indent
} else if (/^[\t]* \*/.test(line)) {
// block comment using an extra space
} else {
console.error(file.relative + '(' + (i + 1) + ',1): Bad whitespace indentation');
errorCount++;
}
});
this.emit('data', file);
});
const copyrights = es.through(function (file) {
if (file.contents.toString('utf8').indexOf(copyrightHeader) !== 0) {
console.error(file.relative + ': Missing or bad copyright statement');
errorCount++;
}
this.emit('data', file);
});
const formatting = es.map(function (file, cb) {
tsfmt.processString(file.path, file.contents.toString('utf8'), {
verify: true,
tsfmt: true,
// verbose: true
}).then(result => {
if (result.error) {
console.error(result.message);
errorCount++;
}
cb(null, file);
}, err => {
cb(err);
});
});
const tsl = es.through(function (file) {
const configuration = tslint.Configuration.findConfiguration(null, '.');
const options = { formatter: 'json', rulesDirectory: 'build/lib/tslint' };
const contents = file.contents.toString('utf8');
const linter = new tslint.Linter(options);
linter.lint(file.relative, contents, configuration.results);
const result = linter.getResult();
if (result.failureCount > 0) {
reportFailures(result.failures);
errorCount += result.failureCount;
}
this.emit('data', file);
});
const result = gulp.src(some || all, { base: '.' })
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(filter(eolFilter))
// {{SQL CARBON EDIT}}
//.pipe(options.skipEOL ? es.through() : eol)
.pipe(filter(indentationFilter))
.pipe(indentation)
.pipe(filter(copyrightFilter))
// {{SQL CARBON EDIT}}
//.pipe(copyrights);
const typescript = result
.pipe(filter(tslintFilter))
.pipe(formatting)
.pipe(tsl);
const javascript = result
.pipe(filter(eslintFilter))
.pipe(gulpeslint('src/.eslintrc'))
.pipe(gulpeslint.formatEach('compact'));
// {{SQL CARBON EDIT}}
// .pipe(gulpeslint.failAfterError());
return es.merge(typescript, javascript)
.pipe(es.through(null, function () {
// {{SQL CARBON EDIT}}
// if (errorCount > 0) {
// this.emit('error', 'Hygiene failed with ' + errorCount + ' errors. Check \'build/gulpfile.hygiene.js\'.');
// } else {
// this.emit('end');
// }
this.emit('end');
}));
};
gulp.task('hygiene', () => hygiene());
// this allows us to run hygiene as a git pre-commit hook
if (require.main === module) {
const cp = require('child_process');
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
process.exit(1);
});
cp.exec('git config core.autocrlf', (err, out) => {
const skipEOL = out.trim() === 'true';
if (process.argv.length > 2) {
return hygiene(process.argv.slice(2), { skipEOL: skipEOL }).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
}
cp.exec('git diff --cached --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
}
const some = out
.split(/\r?\n/)
.filter(l => !!l);
hygiene(some, { skipEOL: skipEOL }).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
});
});
}

69
build/gulpfile.mixin.js Normal file
View File

@@ -0,0 +1,69 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
const json = require('gulp-json-editor');
const buffer = require('gulp-buffer');
const filter = require('gulp-filter');
const es = require('event-stream');
const util = require('./lib/util');
const remote = require('gulp-remote-src');
const zip = require('gulp-vinyl-zip');
const assign = require('object-assign');
const pkg = require('../package.json');
gulp.task('mixin', function () {
const repo = process.env['VSCODE_MIXIN_REPO'];
if (!repo) {
console.log('Missing VSCODE_MIXIN_REPO, skipping mixin');
return;
}
const quality = process.env['VSCODE_QUALITY'];
if (!quality) {
console.log('Missing VSCODE_QUALITY, skipping mixin');
return;
}
const url = `https://github.com/${repo}/archive/${pkg.distro}.zip`;
const opts = { base: url };
const username = process.env['VSCODE_MIXIN_USERNAME'];
const password = process.env['VSCODE_MIXIN_PASSWORD'];
if (username || password) {
opts.auth = { user: username || '', pass: password || '' };
}
console.log('Mixing in sources from \'' + url + '\':');
let all = remote('', opts)
.pipe(zip.src())
.pipe(filter(function (f) { return !f.isDirectory(); }))
.pipe(util.rebase(1));
if (quality) {
const productJsonFilter = filter('product.json', { restore: true });
const mixin = all
.pipe(filter(['quality/' + quality + '/**']))
.pipe(util.rebase(2))
.pipe(productJsonFilter)
.pipe(buffer())
.pipe(json(o => assign({}, require('../product.json'), o)))
.pipe(productJsonFilter.restore);
all = es.merge(mixin);
}
return all
.pipe(es.mapSync(function (f) {
console.log(f.relative);
return f;
}))
.pipe(gulp.dest('.'));
});

97
build/gulpfile.sql.js Normal file
View File

@@ -0,0 +1,97 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
const util = require('./lib/util');
const tsfmt = require('typescript-formatter');
const es = require('event-stream');
const filter = require('gulp-filter');
gulp.task('clean-mssql-extension', util.rimraf('extensions/mssql/node_modules'));
gulp.task('clean-credentials-extension', util.rimraf('extensions/credentials/node_modules'));
gulp.task('clean-client', util.rimraf('dataprotocol-node/client/node_modules'));
gulp.task('clean-jsonrpc', util.rimraf('dataprotocol-node/jsonrpc/node_modules'));
gulp.task('clean-server', util.rimraf('dataprotocol-node/server/node_modules'));
gulp.task('clean-types', util.rimraf('dataprotocol-node/types/node_modules'));
// {{SQL CARBON EDIT}}
gulp.task('clean-extensions-modules', util.rimraf('extensions-modules/node_modules'));
gulp.task('clean-protocol', ['clean-extensions-modules', 'clean-mssql-extension', 'clean-credentials-extension', 'clean-client', 'clean-jsonrpc', 'clean-server', 'clean-types']);
// Tasks to clean extensions modules
gulp.task('clean-mssql-ext-mod', util.rimraf('extensions/mssql/node_modules/extensions-modules'));
gulp.task('clean-credentials-ext-mod', util.rimraf('extensions/credentials/node_modules/extensions-modules'));
gulp.task('clean-build-ext-mod', util.rimraf('build/node_modules/extensions-modules'));
gulp.task('clean-ext-mod', ['clean-mssql-ext-mod', 'clean-credentials-ext-mod', 'clean-build-ext-mod', 'clean-extensions-modules']);
gulp.task('fmt', () => formatStagedFiles());
const formatFiles = (some) => {
const formatting = es.map(function (file, cb) {
tsfmt.processString(file.path, file.contents.toString('utf8'), {
replace: true,
tsfmt: true,
tslint: true,
tsconfig: true
// verbose: true
}).then(result => {
console.info('ran formatting on file ' + file.path + ' result: ' + result.message);
if (result.error) {
console.error(result.message);
errorCount++;
}
cb(null, file);
}, err => {
cb(err);
});
});
return gulp.src(some, { base: '.' })
.pipe(filter(f => !f.stat.isDirectory()))
.pipe(formatting);
}
const formatStagedFiles = () => {
const cp = require('child_process');
cp.exec('git diff --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
}
const some = out
.split(/\r?\n/)
.filter(l => !!l)
.filter(l => l.match(/.*.ts$/i));
formatFiles(some).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
});
cp.exec('git diff --cached --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
if (err) {
console.error();
console.error(err);
process.exit(1);
}
const some = out
.split(/\r?\n/)
.filter(l => !!l)
.filter(l => l.match(/.*.ts$/i));
formatFiles(some).on('error', err => {
console.error();
console.error(err);
process.exit(1);
});
});
}

15
build/gulpfile.test.js Normal file
View File

@@ -0,0 +1,15 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
const mocha = require('gulp-mocha');
gulp.task('test', function () {
return gulp.src('test/all.js')
.pipe(mocha({ ui: 'tdd', delay: true }))
.once('end', function () { process.exit(); });
});

508
build/gulpfile.vscode.js Normal file
View File

@@ -0,0 +1,508 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
const fs = require('fs');
const path = require('path');
const es = require('event-stream');
const azure = require('gulp-azure-storage');
const electron = require('gulp-atom-electron');
const vfs = require('vinyl-fs');
const rename = require('gulp-rename');
const replace = require('gulp-replace');
const filter = require('gulp-filter');
const buffer = require('gulp-buffer');
const json = require('gulp-json-editor');
const _ = require('underscore');
const util = require('./lib/util');
const ext = require('./lib/extensions');
const buildfile = require('../src/buildfile');
const common = require('./lib/optimize');
const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const packageJson = require('../package.json');
const product = require('../product.json');
const shrinkwrap = require('../npm-shrinkwrap.json');
const crypto = require('crypto');
const i18n = require('./lib/i18n');
var del = require('del');
// {{SQL CARBON EDIT}}
const serviceInstaller = require('extensions-modules/lib/languageservice/serviceInstallerUtil');
const glob = require('glob');
const productDependencies = Object.keys(product.dependencies || {});
const dependencies = Object.keys(shrinkwrap.dependencies)
.concat(productDependencies); // additional dependencies from our product configuration
const baseModules = Object.keys(process.binding('natives')).filter(n => !/^_|\//.test(n));
// {{SQL CARBON EDIT}}
const nodeModules = [
'electron',
'original-fs',
'rxjs/Observable',
'rxjs/Subject',
'rxjs/Observer',
'ng2-charts/ng2-charts',
'rangy/lib/rangy-textrange']
.concat(dependencies)
.concat(baseModules);
// Build
const builtInExtensions = [
{ name: 'ms-vscode.node-debug', version: '1.16.10' },
{ name: 'ms-vscode.node-debug2', version: '1.16.9' }
];
const excludedExtensions = [
'vscode-api-tests',
'vscode-colorize-tests'
];
const vscodeEntryPoints = _.flatten([
buildfile.entrypoint('vs/workbench/workbench.main'),
buildfile.base,
buildfile.workbench,
buildfile.code
]);
const vscodeResources = [
'out-build/main.js',
'out-build/cli.js',
'out-build/bootstrap.js',
'out-build/bootstrap-amd.js',
'out-build/paths.js',
'out-build/vs/**/*.{svg,png,cur,html}',
'out-build/vs/base/node/startupTimers.js',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh}',
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
'out-build/vs/workbench/browser/media/*-theme.css',
'out-build/vs/workbench/electron-browser/bootstrap/**',
'out-build/vs/workbench/parts/debug/**/*.json',
'out-build/vs/workbench/parts/execution/**/*.scpt',
'out-build/vs/workbench/parts/html/browser/webview-pre.js',
'out-build/vs/**/markdown.css',
'out-build/vs/workbench/parts/tasks/**/*.json',
'out-build/vs/workbench/parts/terminal/electron-browser/terminalProcess.js',
'out-build/vs/workbench/parts/welcome/walkThrough/**/*.md',
'out-build/vs/workbench/services/files/**/*.exe',
'out-build/vs/workbench/services/files/**/*.md',
'out-build/vs/code/electron-browser/sharedProcess.js',
// {{SQL CARBON EDIT}}
'out-build/sql/workbench/electron-browser/splashscreen/*',
'out-build/sql/**/*.{svg,png,cur,html}',
'out-build/sql/base/browser/ui/table/media/*.{gif,png,svg}',
'out-build/sql/base/browser/ui/checkbox/media/*.{gif,png,svg}',
'out-build/sql/parts/admin/**/*.html',
'out-build/sql/parts/connection/connectionDialog/media/*.{gif,png,svg}',
'out-build/sql/parts/common/dblist/**/*.html',
'out-build/sql/parts/dashboard/**/*.html',
'out-build/sql/parts/disasterRecovery/**/*.html',
'out-build/sql/parts/common/modal/media/**',
'out-build/sql/parts/grid/load/lib/**',
'out-build/sql/parts/grid/load/loadJquery.js',
'out-build/sql/parts/grid/media/**',
'out-build/sql/parts/grid/views/**/*.html',
'out-build/sql/parts/tasks/**/*.html',
'out-build/sql/parts/taskHistory/viewlet/media/**',
'out-build/sql/media/objectTypes/*.svg',
'out-build/sql/media/icons/*.svg',
'!**/test/**'
];
const BUNDLED_FILE_HEADER = [
'/*!--------------------------------------------------------',
' * Copyright (C) Microsoft Corporation. All rights reserved.',
' *--------------------------------------------------------*/'
].join('\n');
var languages = ['chs', 'cht', 'jpn', 'kor', 'deu', 'fra', 'esn', 'rus', 'ita'];
if (process.env.VSCODE_QUALITY !== 'stable') {
languages = languages.concat(['ptb', 'hun', 'trk']); // Add languages requested by the community to non-stable builds
}
gulp.task('clean-optimized-vscode', util.rimraf('out-vscode'));
gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compile-extensions-build'], common.optimizeTask({
entryPoints: vscodeEntryPoints,
otherSources: [],
resources: vscodeResources,
loaderConfig: common.loaderConfig(nodeModules),
header: BUNDLED_FILE_HEADER,
out: 'out-vscode',
languages: languages
}));
gulp.task('optimize-index-js', ['optimize-vscode'], () => {
const fullpath = path.join(process.cwd(), 'out-vscode/vs/workbench/electron-browser/bootstrap/index.js');
const contents = fs.readFileSync(fullpath).toString();
const newContents = contents.replace('[/*BUILD->INSERT_NODE_MODULES*/]', JSON.stringify(nodeModules));
fs.writeFileSync(fullpath, newContents);
});
const baseUrl = `https://ticino.blob.core.windows.net/sourcemaps/${commit}/core`;
gulp.task('clean-minified-vscode', util.rimraf('out-vscode-min'));
gulp.task('minify-vscode', ['clean-minified-vscode', 'optimize-index-js'], common.minifyTask('out-vscode', baseUrl));
// Package
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
const config = {
version: packageJson.electronVersion,
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2017 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [{
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
// {{SQL CARBON EDIT}}
extensions: ["csv", "json", "showplan", "sql", "xml"],
iconFile: 'resources/darwin/code_file.icns'
}],
darwinBundleURLTypes: [{
role: 'Viewer',
name: product.nameLong,
urlSchemes: [product.urlProtocol]
}],
darwinCredits: darwinCreditsTemplate ? new Buffer(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : void 0,
linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || void 0,
repo: product.electronRepository || void 0
};
function getElectron(arch) {
return () => {
const electronOpts = _.extend({}, config, {
platform: process.platform,
arch,
ffmpegChromium: true,
keepDefaultApp: true
});
return gulp.src('package.json')
.pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron'));
};
}
gulp.task('clean-electron', util.rimraf('.build/electron'));
gulp.task('electron', ['clean-electron'], getElectron(process.arch));
gulp.task('electron-ia32', ['clean-electron'], getElectron('ia32'));
gulp.task('electron-x64', ['clean-electron'], getElectron('x64'));
/**
* Compute checksums for some files.
*
* @param {string} out The out folder to read the file from.
* @param {string[]} filenames The paths to compute a checksum for.
* @return {Object} A map of paths to checksums.
*/
function computeChecksums(out, filenames) {
var result = {};
filenames.forEach(function (filename) {
var fullPath = path.join(process.cwd(), out, filename);
result[filename] = computeChecksum(fullPath);
});
return result;
}
/**
* Compute checksum for a file.
*
* @param {string} filename The absolute path to a filename.
* @return {string} The checksum for `filename`.
*/
function computeChecksum(filename) {
var contents = fs.readFileSync(filename);
var hash = crypto
.createHash('md5')
.update(contents)
.digest('base64')
.replace(/=+$/, '');
return hash;
}
function packageTask(platform, arch, opts) {
opts = opts || {};
// {{SQL CARBON EDIT}}
const destination = path.join(path.dirname(root), 'sqlops') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
platform = platform || process.platform;
return () => {
const out = opts.minified ? 'out-vscode-min' : 'out-vscode';
const checksums = computeChecksums(out, [
'vs/workbench/workbench.main.js',
'vs/workbench/workbench.main.css',
'vs/workbench/electron-browser/bootstrap/index.html',
'vs/workbench/electron-browser/bootstrap/index.js',
'vs/workbench/electron-browser/bootstrap/preload.js'
]);
const src = gulp.src(out + '/**', { base: '.' })
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }));
const root = path.resolve(path.join(__dirname, '..'));
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name));
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
const nlsFilter = filter('**/*.nls.json', { restore: true });
return ext.fromLocal(extension.path)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`))
// // TODO@Dirk: this filter / buffer is here to make sure the nls.json files are buffered
.pipe(nlsFilter)
.pipe(buffer())
.pipe(nlsDev.createAdditionalLanguageFiles(languages, path.join(__dirname, '..', 'i18n')))
.pipe(nlsFilter.restore);
}));
const localExtensionDependencies = gulp.src('extensions/node_modules/**', { base: '.' });
// {{SQL CARBON EDIT}}
const sources = es.merge(src, localExtensions, localExtensionDependencies)
.pipe(util.setExecutableBit(['**/*.sh']))
.pipe(filter(['**',
'!**/*.js.map',
'!extensions/**/node_modules/**/{test, tests}/**',
'!extensions/**/node_modules/**/test.js']));
let version = packageJson.version;
const quality = product.quality;
if (quality && quality !== 'stable') {
version += '-' + quality;
}
const name = product.nameShort;
const packageJsonStream = gulp.src(['package.json'], { base: '.' })
.pipe(json({ name, version }));
const date = new Date().toISOString();
const productJsonStream = gulp.src(['product.json'], { base: '.' })
.pipe(json({ commit, date, checksums }));
const license = gulp.src(['LICENSES.chromium.html', 'LICENSE.txt', 'ThirdPartyNotices.txt', 'licenses/**'], { base: '.' });
const watermark = gulp.src(['resources/letterpress.svg', 'resources/letterpress-dark.svg', 'resources/letterpress-hc.svg'], { base: '.' });
// TODO the API should be copied to `out` during compile, not here
const api = gulp.src('src/vs/vscode.d.ts').pipe(rename('out/vs/vscode.d.ts'));
// {{SQL CARBON EDIT}}
const dataApi = gulp.src('src/vs/data.d.ts').pipe(rename('out/sql/data.d.ts'));
const depsSrc = _.flatten(dependencies
.map(function (d) { return ['node_modules/' + d + '/**',
'!node_modules/' + d + '/**/{test,tests}/**',
'!node_modules/' + d + '/**/test.*',
'!node_modules/' + d + '/**/*.test.*']; }));
const deps = gulp.src(depsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util.cleanNodeModule('fsevents', ['binding.gyp', 'fsevents.cc', 'build/**', 'src/**', 'test/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('oniguruma', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/*.js']))
.pipe(util.cleanNodeModule('windows-mutex', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-keymap', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-watchdog', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('jschardet', ['dist/**']))
.pipe(util.cleanNodeModule('windows-foreground-love', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('windows-process-tree', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('gc-signals', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/index.js']))
.pipe(util.cleanNodeModule('v8-profiler', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/index.js']))
.pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/**']))
.pipe(util.cleanNodeModule('nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
.pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node']));
let all = es.merge(
packageJsonStream,
productJsonStream,
license,
watermark,
api,
// {{SQL CARBON EDIT}}
dataApi,
sources,
deps
);
if (platform === 'win32') {
all = es.merge(all, gulp.src('resources/win32/code_file.ico', { base: '.' }));
} else if (platform === 'linux') {
all = es.merge(all, gulp.src('resources/linux/code.png', { base: '.' }));
} else if (platform === 'darwin') {
const shortcut = gulp.src('resources/darwin/bin/code.sh')
.pipe(rename('bin/code'));
all = es.merge(all, shortcut);
}
let result = all
.pipe(util.skipDirectories())
.pipe(util.fixWin32DirectoryPermissions())
.pipe(electron(_.extend({}, config, { platform, arch, ffmpegChromium: true })))
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
if (platform === 'win32') {
result = es.merge(result, gulp.src('resources/win32/bin/code.js', { base: 'resources/win32' }));
result = es.merge(result, gulp.src('resources/win32/bin/code.cmd', { base: 'resources/win32' })
.pipe(replace('@@NAME@@', product.nameShort))
.pipe(rename(function (f) { f.basename = product.applicationName; })));
result = es.merge(result, gulp.src('resources/win32/bin/code.sh', { base: 'resources/win32' })
.pipe(replace('@@NAME@@', product.nameShort))
.pipe(rename(function (f) { f.basename = product.applicationName; f.extname = ''; })));
} else if (platform === 'linux') {
result = es.merge(result, gulp.src('resources/linux/bin/code.sh', { base: '.' })
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(rename('bin/' + product.applicationName)));
}
return result.pipe(vfs.dest(destination));
};
}
const buildRoot = path.dirname(root);
// {{SQL CARBON EDIT}}
gulp.task('clean-vscode-win32-ia32', util.rimraf(path.join(buildRoot, 'sqlops-win32-ia32')));
gulp.task('clean-vscode-win32-x64', util.rimraf(path.join(buildRoot, 'sqlops-win32-x64')));
gulp.task('clean-vscode-darwin', util.rimraf(path.join(buildRoot, 'sqlops-darwin')));
gulp.task('clean-vscode-linux-ia32', util.rimraf(path.join(buildRoot, 'sqlops-linux-ia32')));
gulp.task('clean-vscode-linux-x64', util.rimraf(path.join(buildRoot, 'sqlops-linux-x64')));
gulp.task('clean-vscode-linux-arm', util.rimraf(path.join(buildRoot, 'sqlops-linux-arm')));
gulp.task('vscode-win32-ia32', ['optimize-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32'));
gulp.task('vscode-win32-x64', ['optimize-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64'));
gulp.task('vscode-darwin', ['optimize-vscode', 'clean-vscode-darwin'], packageTask('darwin'));
gulp.task('vscode-linux-ia32', ['optimize-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32'));
gulp.task('vscode-linux-x64', ['optimize-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64'));
gulp.task('vscode-linux-arm', ['optimize-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm'));
gulp.task('vscode-win32-ia32-min', ['minify-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32', { minified: true }));
gulp.task('vscode-win32-x64-min', ['minify-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64', { minified: true }));
gulp.task('vscode-darwin-min', ['minify-vscode', 'clean-vscode-darwin'], packageTask('darwin', null, { minified: true }));
gulp.task('vscode-linux-ia32-min', ['minify-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32', { minified: true }));
gulp.task('vscode-linux-x64-min', ['minify-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64', { minified: true }));
gulp.task('vscode-linux-arm-min', ['minify-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm', { minified: true }));
// Transifex Localizations
const vscodeLanguages = [
'zh-hans',
'zh-hant',
'ja',
'ko',
'de',
'fr',
'es',
'ru',
'it',
'pt-br',
'hu',
'tr'
];
const setupDefaultLanguages = [
'zh-hans',
'zh-hant',
'ko'
];
const apiHostname = process.env.TRANSIFEX_API_URL;
const apiName = process.env.TRANSIFEX_API_NAME;
const apiToken = process.env.TRANSIFEX_API_TOKEN;
gulp.task('vscode-translations-push', ['optimize-vscode'], function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = './extensions/**/*.nls.json';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
return es.merge(
gulp.src(pathToMetadata).pipe(i18n.prepareXlfFiles()),
gulp.src(pathToSetup).pipe(i18n.prepareXlfFiles()),
gulp.src(pathToExtensions).pipe(i18n.prepareXlfFiles('vscode-extensions'))
).pipe(i18n.pushXlfFiles(apiHostname, apiName, apiToken));
});
gulp.task('vscode-translations-pull', function () {
return es.merge(
i18n.pullXlfFiles('vscode-editor', apiHostname, apiName, apiToken, vscodeLanguages),
i18n.pullXlfFiles('vscode-workbench', apiHostname, apiName, apiToken, vscodeLanguages),
i18n.pullXlfFiles('vscode-extensions', apiHostname, apiName, apiToken, vscodeLanguages),
i18n.pullXlfFiles('vscode-setup', apiHostname, apiName, apiToken, setupDefaultLanguages)
).pipe(vfs.dest('../vscode-localization'));
});
gulp.task('vscode-translations-import', function () {
return gulp.src('../vscode-localization/**/*.xlf').pipe(i18n.prepareJsonFiles()).pipe(vfs.dest('./i18n'));
});
// Sourcemaps
gulp.task('upload-vscode-sourcemaps', ['minify-vscode'], () => {
const vs = gulp.src('out-vscode-min/**/*.map', { base: 'out-vscode-min' })
.pipe(es.mapSync(f => {
f.path = `${f.base}/core/${f.relative}`;
return f;
}));
const extensions = gulp.src('extensions/**/out/**/*.map', { base: '.' });
return es.merge(vs, extensions)
.pipe(azure.upload({
account: process.env.AZURE_STORAGE_ACCOUNT,
key: process.env.AZURE_STORAGE_ACCESS_KEY,
container: 'sourcemaps',
prefix: commit + '/'
}));
});
// {{SQL CARBON EDIT}}
// Install service locally before building carbon
function installService(extObj) {
var installer = new serviceInstaller.ServiceInstaller(extObj, true);
installer.getServiceInstallDirectoryRoot().then(serviceInstallFolder => {
console.log('Cleaning up the install folder: ' + serviceInstallFolder);
del(serviceInstallFolder + '/*').then(() => {
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
installer.installService();
}, delError => {
console.log('failed to delete the install folder error: ' + delError);
});
}, getFolderPathError => {
console.log('failed to call getServiceInstallDirectoryRoot error: ' + getFolderPathError);
});
}
gulp.task('install-sqltoolsservice', () => {
var mssqlExt = require('../extensions/mssql/client/out/models/constants');
var extObj = new mssqlExt.Constants();
return installService(extObj);
});

View File

@@ -0,0 +1,289 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
const replace = require('gulp-replace');
const rename = require('gulp-rename');
const shell = require('gulp-shell');
const es = require('event-stream');
const vfs = require('vinyl-fs');
const util = require('./lib/util');
const packageJson = require('../package.json');
const product = require('../product.json');
const rpmDependencies = require('../resources/linux/rpm/dependencies');
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
const flatpakManifest = {
appId: product.darwinBundleIdentifier, // We need a reverse-url style identifier.
sdk: 'org.freedesktop.Sdk',
runtime: 'org.freedesktop.Sdk',
runtimeVersion: '1.4',
base: 'io.atom.electron.BaseApp',
baseFlatpakref: 'https://s3-us-west-2.amazonaws.com/electron-flatpak.endlessm.com/electron-base-app-master.flatpakref',
command: product.applicationName,
symlinks: [
['/share/' + product.applicationName + '/bin/' + product.applicationName, '/bin/' + product.applicationName],
],
finishArgs: [
'--share=ipc', '--socket=x11', // Allow showing X11 windows.
'--share=network', // Network access (e.g. for installing extension).
'--filesystem=host', // Allow access to the whole file system.
'--device=dri', // Allow OpenGL rendering.
'--filesystem=/tmp', // Needed for Chromium's single instance check.
'--socket=pulseaudio', // Some extensions may want to play sounds...
'--talk-name=org.freedesktop.Notifications', // ...or pop up notifications.
],
};
function getDebPackageArch(arch) {
return { x64: 'amd64', ia32: 'i386', arm: 'armhf' }[arch];
}
function prepareDebPackage(arch) {
// {{SQL CARBON EDIT}}
const binaryDir = '../sqlops-linux-' + arch;
const debArch = getDebPackageArch(arch);
const destination = '.build/linux/deb/' + debArch + '/' + product.applicationName + '-' + debArch;
return function () {
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(rename('usr/share/applications/' + product.applicationName + '.desktop'));
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@LICENSE@@', product.licenseName))
.pipe(rename('usr/share/appdata/' + product.applicationName + '.appdata.xml'));
const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename('usr/share/pixmaps/' + product.applicationName + '.png'));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; }));
let size = 0;
const control = code.pipe(es.through(
function (f) { size += f.isDirectory() ? 4096 : f.contents.length; },
function () {
const that = this;
gulp.src('resources/linux/debian/control.template', { base: '.' })
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@VERSION@@', packageJson.version + '-' + linuxPackageRevision))
.pipe(replace('@@ARCHITECTURE@@', debArch))
.pipe(replace('@@INSTALLEDSIZE@@', Math.ceil(size / 1024)))
.pipe(rename('DEBIAN/control'))
.pipe(es.through(function (f) { that.emit('data', f); }, function () { that.emit('end'); }));
}));
const prerm = gulp.src('resources/linux/debian/prerm.template', { base: '.' })
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(rename('DEBIAN/prerm'));
const postrm = gulp.src('resources/linux/debian/postrm.template', { base: '.' })
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(rename('DEBIAN/postrm'));
const postinst = gulp.src('resources/linux/debian/postinst.template', { base: '.' })
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@ARCHITECTURE@@', debArch))
.pipe(replace('@@QUALITY@@', product.quality || '@@QUALITY@@'))
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
.pipe(rename('DEBIAN/postinst'));
const all = es.merge(control, postinst, postrm, prerm, desktop, appdata, icon, code);
return all.pipe(vfs.dest(destination));
};
}
function buildDebPackage(arch) {
const debArch = getDebPackageArch(arch);
return shell.task([
'chmod 755 ' + product.applicationName + '-' + debArch + '/DEBIAN/postinst ' + product.applicationName + '-' + debArch + '/DEBIAN/prerm ' + product.applicationName + '-' + debArch + '/DEBIAN/postrm',
'mkdir -p deb',
'fakeroot dpkg-deb -b ' + product.applicationName + '-' + debArch + ' deb',
'dpkg-scanpackages deb /dev/null > Packages'
], { cwd: '.build/linux/deb/' + debArch });
}
function getRpmBuildPath(rpmArch) {
return '.build/linux/rpm/' + rpmArch + '/rpmbuild';
}
function getRpmPackageArch(arch) {
return { x64: 'x86_64', ia32: 'i386', arm: 'armhf' }[arch];
}
function prepareRpmPackage(arch) {
// {{SQL CARBON EDIT}}
const binaryDir = '../sqlops-linux-' + arch;
const rpmArch = getRpmPackageArch(arch);
return function () {
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '.desktop'));
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@LICENSE@@', product.licenseName))
.pipe(rename('usr/share/appdata/' + product.applicationName + '.appdata.xml'));
const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename('BUILD/usr/share/pixmaps/' + product.applicationName + '.png'));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = 'BUILD/usr/share/' + product.applicationName + '/' + p.dirname; }));
const spec = gulp.src('resources/linux/rpm/code.spec.template', { base: '.' })
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@VERSION@@', packageJson.version))
.pipe(replace('@@RELEASE@@', linuxPackageRevision))
.pipe(replace('@@ARCHITECTURE@@', rpmArch))
.pipe(replace('@@LICENSE@@', product.licenseName))
.pipe(replace('@@QUALITY@@', product.quality || '@@QUALITY@@'))
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
.pipe(replace('@@DEPENDENCIES@@', rpmDependencies[rpmArch].join(', ')))
.pipe(rename('SPECS/' + product.applicationName + '.spec'));
const specIcon = gulp.src('resources/linux/rpm/code.xpm', { base: '.' })
.pipe(rename('SOURCES/' + product.applicationName + '.xpm'));
const all = es.merge(code, desktop, appdata, icon, spec, specIcon);
return all.pipe(vfs.dest(getRpmBuildPath(rpmArch)));
};
}
function buildRpmPackage(arch) {
const rpmArch = getRpmPackageArch(arch);
const rpmBuildPath = getRpmBuildPath(rpmArch);
const rpmOut = rpmBuildPath + '/RPMS/' + rpmArch;
const destination = '.build/linux/rpm/' + rpmArch;
return shell.task([
'mkdir -p ' + destination,
'HOME="$(pwd)/' + destination + '" fakeroot rpmbuild -bb ' + rpmBuildPath + '/SPECS/' + product.applicationName + '.spec --target=' + rpmArch,
'cp "' + rpmOut + '/$(ls ' + rpmOut + ')" ' + destination + '/'
]);
}
function getFlatpakArch(arch) {
return { x64: 'x86_64', ia32: 'i386', arm: 'arm' }[arch];
}
function prepareFlatpak(arch) {
// {{SQL CARBON EDIT}}
const binaryDir = '../sqlops-linux-' + arch;
const flatpakArch = getFlatpakArch(arch);
const destination = '.build/linux/flatpak/' + flatpakArch;
return function () {
// This is not imported in the global scope to avoid requiring ImageMagick
// (or GraphicsMagick) when not building building Flatpak bundles.
const imgResize = require('gulp-image-resize');
const all = [16, 24, 32, 48, 64, 128, 192, 256, 512].map(function (size) {
return gulp.src('resources/linux/code.png', { base: '.' })
.pipe(imgResize({ width: size, height: size, format: "png", noProfile: true }))
.pipe(rename('share/icons/hicolor/' + size + 'x' + size + '/apps/' + flatpakManifest.appId + '.png'));
});
all.push(gulp.src('resources/linux/code.desktop', { base: '.' })
.pipe(replace('Exec=/usr/share/@@NAME@@/@@NAME@@', 'Exec=' + product.applicationName))
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(rename('share/applications/' + flatpakManifest.appId + '.desktop')));
all.push(gulp.src('resources/linux/code.appdata.xml', { base: '.' })
.pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME@@', flatpakManifest.appId))
.pipe(replace('@@LICENSE@@', product.licenseName))
.pipe(rename('share/appdata/' + flatpakManifest.appId + '.appdata.xml')));
all.push(gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) {
p.dirname = 'share/' + product.applicationName + '/' + p.dirname;
})));
return es.merge(all).pipe(vfs.dest(destination));
};
}
function buildFlatpak(arch) {
const flatpakArch = getFlatpakArch(arch);
const manifest = {};
for (var k in flatpakManifest) {
manifest[k] = flatpakManifest[k];
}
manifest.files = [
['.build/linux/flatpak/' + flatpakArch, '/'],
];
const buildOptions = {
arch: flatpakArch,
subject: product.nameLong + ' ' + packageJson.version + '.' + linuxPackageRevision,
};
// If requested, use the configured path for the OSTree repository.
if (process.env.FLATPAK_REPO) {
buildOptions.repoDir = process.env.FLATPAK_REPO;
} else {
buildOptions.bundlePath = manifest.appId + '-' + flatpakArch + '.flatpak';
}
// Setup PGP signing if requested.
if (process.env.GPG_KEY_ID !== undefined) {
buildOptions.gpgSign = process.env.GPG_KEY_ID;
if (process.env.GPG_HOMEDIR) {
buildOptions.gpgHomedir = process.env.GPG_HOME_DIR;
}
}
return function (cb) {
require('flatpak-bundler').bundle(manifest, buildOptions, cb);
};
}
gulp.task('clean-vscode-linux-ia32-deb', util.rimraf('.build/linux/deb/i386'));
gulp.task('clean-vscode-linux-x64-deb', util.rimraf('.build/linux/deb/amd64'));
gulp.task('clean-vscode-linux-arm-deb', util.rimraf('.build/linux/deb/armhf'));
gulp.task('clean-vscode-linux-ia32-rpm', util.rimraf('.build/linux/rpm/i386'));
gulp.task('clean-vscode-linux-x64-rpm', util.rimraf('.build/linux/rpm/x86_64'));
gulp.task('clean-vscode-linux-arm-rpm', util.rimraf('.build/linux/rpm/armhf'));
gulp.task('vscode-linux-ia32-prepare-deb', ['clean-vscode-linux-ia32-deb'], prepareDebPackage('ia32'));
gulp.task('vscode-linux-x64-prepare-deb', ['clean-vscode-linux-x64-deb'], prepareDebPackage('x64'));
gulp.task('vscode-linux-arm-prepare-deb', ['clean-vscode-linux-arm-deb'], prepareDebPackage('arm'));
gulp.task('vscode-linux-ia32-build-deb', ['vscode-linux-ia32-prepare-deb'], buildDebPackage('ia32'));
gulp.task('vscode-linux-x64-build-deb', ['vscode-linux-x64-prepare-deb'], buildDebPackage('x64'));
gulp.task('vscode-linux-arm-build-deb', ['vscode-linux-arm-prepare-deb'], buildDebPackage('arm'));
gulp.task('vscode-linux-ia32-prepare-rpm', ['clean-vscode-linux-ia32-rpm'], prepareRpmPackage('ia32'));
gulp.task('vscode-linux-x64-prepare-rpm', ['clean-vscode-linux-x64-rpm'], prepareRpmPackage('x64'));
gulp.task('vscode-linux-arm-prepare-rpm', ['clean-vscode-linux-arm-rpm'], prepareRpmPackage('arm'));
gulp.task('vscode-linux-ia32-build-rpm', ['vscode-linux-ia32-prepare-rpm'], buildRpmPackage('ia32'));
gulp.task('vscode-linux-x64-build-rpm', ['vscode-linux-x64-prepare-rpm'], buildRpmPackage('x64'));
gulp.task('vscode-linux-arm-build-rpm', ['vscode-linux-arm-prepare-rpm'], buildRpmPackage('arm'));
gulp.task('clean-vscode-linux-ia32-flatpak', util.rimraf('.build/linux/flatpak/i386'));
gulp.task('clean-vscode-linux-x64-flatpak', util.rimraf('.build/linux/flatpak/x86_64'));
gulp.task('clean-vscode-linux-arm-flatpak', util.rimraf('.build/linux/flatpak/arm'));
gulp.task('vscode-linux-ia32-prepare-flatpak', ['clean-vscode-linux-ia32-flatpak'], prepareFlatpak('ia32'));
gulp.task('vscode-linux-x64-prepare-flatpak', ['clean-vscode-linux-x64-flatpak'], prepareFlatpak('x64'));
gulp.task('vscode-linux-arm-prepare-flatpak', ['clean-vscode-linux-arm-flatpak'], prepareFlatpak('arm'));
gulp.task('vscode-linux-ia32-flatpak', ['vscode-linux-ia32-prepare-flatpak'], buildFlatpak('ia32'));
gulp.task('vscode-linux-x64-flatpak', ['vscode-linux-x64-prepare-flatpak'], buildFlatpak('x64'));
gulp.task('vscode-linux-arm-flatpak', ['vscode-linux-arm-prepare-flatpak'], buildFlatpak('arm'));

View File

@@ -0,0 +1,93 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const gulp = require('gulp');
const path = require('path');
const assert = require('assert');
const cp = require('child_process');
const _7z = require('7zip')['7z'];
const util = require('./lib/util');
const pkg = require('../package.json');
const product = require('../product.json');
const repoPath = path.dirname(__dirname);
// {{SQL CARBON EDIT}}
const buildPath = arch => path.join(path.dirname(repoPath), `sqlops-win32-${arch}`);
const zipDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'archive');
const zipPath = arch => path.join(zipDir(arch), `VSCode-win32-${arch}.zip`);
const setupDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'setup');
const issPath = path.join(__dirname, 'win32', 'code.iss');
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup-compiler'))), 'bin', 'ISCC.exe');
function packageInnoSetup(iss, options, cb) {
options = options || {};
const definitions = options.definitions || {};
const keys = Object.keys(definitions);
keys.forEach(key => assert(typeof definitions[key] === 'string', `Missing value for '${key}' in Inno Setup package step`));
const defs = keys.map(key => `/d${key}=${definitions[key]}`);
const args = [iss].concat(defs);
cp.spawn(innoSetupPath, args, { stdio: 'inherit' })
.on('error', cb)
.on('exit', () => cb(null));
}
function buildWin32Setup(arch) {
return cb => {
const ia32AppId = product.win32AppId;
const x64AppId = product.win32x64AppId;
const definitions = {
NameLong: product.nameLong,
NameShort: product.nameShort,
DirName: product.win32DirName,
Version: pkg.version,
RawVersion: pkg.version.replace(/-\w+$/, ''),
NameVersion: product.win32NameVersion,
ExeBasename: product.nameShort,
RegValueName: product.win32RegValueName,
ShellNameShort: product.win32ShellNameShort,
AppMutex: product.win32MutexName,
Arch: arch,
AppId: arch === 'ia32' ? ia32AppId : x64AppId,
IncompatibleAppId: arch === 'ia32' ? x64AppId : ia32AppId,
AppUserId: product.win32AppUserModelId,
ArchitecturesAllowed: arch === 'ia32' ? '' : 'x64',
ArchitecturesInstallIn64BitMode: arch === 'ia32' ? '' : 'x64',
SourceDir: buildPath(arch),
RepoDir: repoPath,
OutputDir: setupDir(arch)
};
packageInnoSetup(issPath, { definitions }, cb);
};
}
gulp.task('clean-vscode-win32-ia32-setup', util.rimraf(setupDir('ia32')));
gulp.task('vscode-win32-ia32-setup', ['clean-vscode-win32-ia32-setup'], buildWin32Setup('ia32'));
gulp.task('clean-vscode-win32-x64-setup', util.rimraf(setupDir('x64')));
gulp.task('vscode-win32-x64-setup', ['clean-vscode-win32-x64-setup'], buildWin32Setup('x64'));
function archiveWin32Setup(arch) {
return cb => {
const args = ['a', '-tzip', zipPath(arch), '.', '-r'];
cp.spawn(_7z, args, { stdio: 'inherit', cwd: buildPath(arch) })
.on('error', cb)
.on('exit', () => cb(null));
};
}
gulp.task('clean-vscode-win32-ia32-archive', util.rimraf(zipDir('ia32')));
gulp.task('vscode-win32-ia32-archive', ['clean-vscode-win32-ia32-archive'], archiveWin32Setup('ia32'));
gulp.task('clean-vscode-win32-x64-archive', util.rimraf(zipDir('x64')));
gulp.task('vscode-win32-x64-archive', ['clean-vscode-win32-x64-archive'], archiveWin32Setup('x64'));

456
build/lib/bundle.js Normal file
View File

@@ -0,0 +1,456 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
var fs = require("fs");
var path = require("path");
var vm = require("vm");
/**
* Bundle `entryPoints` given config `config`.
*/
function bundle(entryPoints, config, callback) {
var entryPointsMap = {};
entryPoints.forEach(function (module) {
entryPointsMap[module.name] = module;
});
var allMentionedModulesMap = {};
entryPoints.forEach(function (module) {
allMentionedModulesMap[module.name] = true;
(module.include || []).forEach(function (includedModule) {
allMentionedModulesMap[includedModule] = true;
});
(module.exclude || []).forEach(function (excludedModule) {
allMentionedModulesMap[excludedModule] = true;
});
});
var code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
var r = vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
var loaderModule = { exports: {} };
r.call({}, require, loaderModule, loaderModule.exports);
var loader = loaderModule.exports;
config.isBuild = true;
config.paths = config.paths || {};
config.paths['vs/nls'] = 'out-build/vs/nls.build';
config.paths['vs/css'] = 'out-build/vs/css.build';
loader.config(config);
loader(['require'], function (localRequire) {
var resolvePath = function (path) {
var r = localRequire.toUrl(path);
if (!/\.js/.test(r)) {
return r + '.js';
}
return r;
};
for (var moduleId in entryPointsMap) {
var entryPoint = entryPointsMap[moduleId];
if (entryPoint.append) {
entryPoint.append = entryPoint.append.map(resolvePath);
}
if (entryPoint.prepend) {
entryPoint.prepend = entryPoint.prepend.map(resolvePath);
}
}
});
loader(Object.keys(allMentionedModulesMap), function () {
var modules = loader.getBuildInfo();
var partialResult = emitEntryPoints(modules, entryPointsMap);
var cssInlinedResources = loader('vs/css').getInlinedResources();
callback(null, {
files: partialResult.files,
cssInlinedResources: cssInlinedResources,
bundleData: partialResult.bundleData
});
}, function (err) { return callback(err, null); });
}
exports.bundle = bundle;
function emitEntryPoints(modules, entryPoints) {
var modulesMap = {};
modules.forEach(function (m) {
modulesMap[m.id] = m;
});
var modulesGraph = {};
modules.forEach(function (m) {
modulesGraph[m.id] = m.dependencies;
});
var sortedModules = topologicalSort(modulesGraph);
var result = [];
var usedPlugins = {};
var bundleData = {
graph: modulesGraph,
bundles: {}
};
Object.keys(entryPoints).forEach(function (moduleToBundle) {
var info = entryPoints[moduleToBundle];
var rootNodes = [moduleToBundle].concat(info.include || []);
var allDependencies = visit(rootNodes, modulesGraph);
var excludes = ['require', 'exports', 'module'].concat(info.exclude || []);
excludes.forEach(function (excludeRoot) {
var allExcludes = visit([excludeRoot], modulesGraph);
Object.keys(allExcludes).forEach(function (exclude) {
delete allDependencies[exclude];
});
});
var includedModules = sortedModules.filter(function (module) {
return allDependencies[module];
});
bundleData.bundles[moduleToBundle] = includedModules;
var res = emitEntryPoint(modulesMap, modulesGraph, moduleToBundle, includedModules, info.prepend, info.append, info.dest);
result = result.concat(res.files);
for (var pluginName in res.usedPlugins) {
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
}
});
Object.keys(usedPlugins).forEach(function (pluginName) {
var plugin = usedPlugins[pluginName];
if (typeof plugin.finishBuild === 'function') {
var write = function (filename, contents) {
result.push({
dest: filename,
sources: [{
path: null,
contents: contents
}]
});
};
plugin.finishBuild(write);
}
});
return {
// TODO@TS 2.1.2
files: extractStrings(removeDuplicateTSBoilerplate(result)),
bundleData: bundleData
};
}
function extractStrings(destFiles) {
var parseDefineCall = function (moduleMatch, depsMatch) {
var module = moduleMatch.replace(/^"|"$/g, '');
var deps = depsMatch.split(',');
deps = deps.map(function (dep) {
dep = dep.trim();
dep = dep.replace(/^"|"$/g, '');
dep = dep.replace(/^'|'$/g, '');
var prefix = null;
var _path = null;
var pieces = dep.split('!');
if (pieces.length > 1) {
prefix = pieces[0] + '!';
_path = pieces[1];
}
else {
prefix = '';
_path = pieces[0];
}
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
var res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
return prefix + res;
}
return prefix + _path;
});
return {
module: module,
deps: deps
};
};
destFiles.forEach(function (destFile, index) {
if (!/\.js$/.test(destFile.dest)) {
return;
}
if (/\.nls\.js$/.test(destFile.dest)) {
return;
}
// Do one pass to record the usage counts for each module id
var useCounts = {};
destFile.sources.forEach(function (source) {
var matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
if (!matches) {
return;
}
var defineCall = parseDefineCall(matches[1], matches[2]);
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
defineCall.deps.forEach(function (dep) {
useCounts[dep] = (useCounts[dep] || 0) + 1;
});
});
var sortedByUseModules = Object.keys(useCounts);
sortedByUseModules.sort(function (a, b) {
return useCounts[b] - useCounts[a];
});
var replacementMap = {};
sortedByUseModules.forEach(function (module, index) {
replacementMap[module] = index;
});
destFile.sources.forEach(function (source) {
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, function (_, moduleMatch, depsMatch) {
var defineCall = parseDefineCall(moduleMatch, depsMatch);
return "define(__m[" + replacementMap[defineCall.module] + "/*" + defineCall.module + "*/], __M([" + defineCall.deps.map(function (dep) { return replacementMap[dep] + '/*' + dep + '*/'; }).join(',') + "])";
});
});
destFile.sources.unshift({
path: null,
contents: [
'(function() {',
"var __m = " + JSON.stringify(sortedByUseModules) + ";",
"var __M = function(deps) {",
" var result = [];",
" for (var i = 0, len = deps.length; i < len; i++) {",
" result[i] = __m[deps[i]];",
" }",
" return result;",
"};"
].join('\n')
});
destFile.sources.push({
path: null,
contents: '}).call(this);'
});
});
return destFiles;
}
function removeDuplicateTSBoilerplate(destFiles) {
// Taken from typescript compiler => emitFiles
var BOILERPLATE = [
{ start: /^var __extends/, end: /^}\)\(\);$/ },
{ start: /^var __assign/, end: /^};$/ },
{ start: /^var __decorate/, end: /^};$/ },
{ start: /^var __metadata/, end: /^};$/ },
{ start: /^var __param/, end: /^};$/ },
{ start: /^var __awaiter/, end: /^};$/ },
];
destFiles.forEach(function (destFile) {
var SEEN_BOILERPLATE = [];
destFile.sources.forEach(function (source) {
var lines = source.contents.split(/\r\n|\n|\r/);
var newLines = [];
var IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE;
for (var i = 0; i < lines.length; i++) {
var line = lines[i];
if (IS_REMOVING_BOILERPLATE) {
newLines.push('');
if (END_BOILERPLATE.test(line)) {
IS_REMOVING_BOILERPLATE = false;
}
}
else {
for (var j = 0; j < BOILERPLATE.length; j++) {
var boilerplate = BOILERPLATE[j];
if (boilerplate.start.test(line)) {
if (SEEN_BOILERPLATE[j]) {
IS_REMOVING_BOILERPLATE = true;
END_BOILERPLATE = boilerplate.end;
}
else {
SEEN_BOILERPLATE[j] = true;
}
}
}
if (IS_REMOVING_BOILERPLATE) {
newLines.push('');
}
else {
newLines.push(line);
}
}
}
source.contents = newLines.join('\n');
});
});
return destFiles;
}
function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend, append, dest) {
if (!dest) {
dest = entryPoint + '.js';
}
var mainResult = {
sources: [],
dest: dest
}, results = [mainResult];
var usedPlugins = {};
var getLoaderPlugin = function (pluginName) {
if (!usedPlugins[pluginName]) {
usedPlugins[pluginName] = modulesMap[pluginName].exports;
}
return usedPlugins[pluginName];
};
includedModules.forEach(function (c) {
var bangIndex = c.indexOf('!');
if (bangIndex >= 0) {
var pluginName = c.substr(0, bangIndex);
var plugin = getLoaderPlugin(pluginName);
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
return;
}
var module = modulesMap[c];
if (module.path === 'empty:') {
return;
}
var contents = readFileAndRemoveBOM(module.path);
if (module.shim) {
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
}
else {
mainResult.sources.push(emitNamedModule(c, deps[c], module.defineLocation, module.path, contents));
}
});
Object.keys(usedPlugins).forEach(function (pluginName) {
var plugin = usedPlugins[pluginName];
if (typeof plugin.writeFile === 'function') {
var req = (function () {
throw new Error('no-no!');
});
req.toUrl = function (something) { return something; };
var write = function (filename, contents) {
results.push({
dest: filename,
sources: [{
path: null,
contents: contents
}]
});
};
plugin.writeFile(pluginName, entryPoint, req, write, {});
}
});
var toIFile = function (path) {
var contents = readFileAndRemoveBOM(path);
return {
path: path,
contents: contents
};
};
var toPrepend = (prepend || []).map(toIFile);
var toAppend = (append || []).map(toIFile);
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
return {
files: results,
usedPlugins: usedPlugins
};
}
function readFileAndRemoveBOM(path) {
var BOM_CHAR_CODE = 65279;
var contents = fs.readFileSync(path, 'utf8');
// Remove BOM
if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
contents = contents.substring(1);
}
return contents;
}
function emitPlugin(entryPoint, plugin, pluginName, moduleName) {
var result = '';
if (typeof plugin.write === 'function') {
var write = (function (what) {
result += what;
});
write.getEntryPoint = function () {
return entryPoint;
};
write.asModule = function (moduleId, code) {
code = code.replace(/^define\(/, 'define("' + moduleId + '",');
result += code;
};
plugin.write(pluginName, moduleName, write);
}
return {
path: null,
contents: result
};
}
function emitNamedModule(moduleId, myDeps, defineCallPosition, path, contents) {
// `defineCallPosition` is the position in code: |define()
var defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
// `parensOffset` is the position in code: define|()
var parensOffset = contents.indexOf('(', defineCallOffset);
var insertStr = '"' + moduleId + '", ';
return {
path: path,
contents: contents.substr(0, parensOffset + 1) + insertStr + contents.substr(parensOffset + 1)
};
}
function emitShimmedModule(moduleId, myDeps, factory, path, contents) {
var strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
var strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
return {
path: path,
contents: contents + '\n;\n' + strDefine
};
}
/**
* Convert a position (line:col) to (offset) in string `str`
*/
function positionToOffset(str, desiredLine, desiredCol) {
if (desiredLine === 1) {
return desiredCol - 1;
}
var line = 1, lastNewLineOffset = -1;
do {
if (desiredLine === line) {
return lastNewLineOffset + 1 + desiredCol - 1;
}
lastNewLineOffset = str.indexOf('\n', lastNewLineOffset + 1);
line++;
} while (lastNewLineOffset >= 0);
return -1;
}
/**
* Return a set of reachable nodes in `graph` starting from `rootNodes`
*/
function visit(rootNodes, graph) {
var result = {}, queue = rootNodes;
rootNodes.forEach(function (node) {
result[node] = true;
});
while (queue.length > 0) {
var el = queue.shift();
var myEdges = graph[el] || [];
myEdges.forEach(function (toNode) {
if (!result[toNode]) {
result[toNode] = true;
queue.push(toNode);
}
});
}
return result;
}
/**
* Perform a topological sort on `graph`
*/
function topologicalSort(graph) {
var allNodes = {}, outgoingEdgeCount = {}, inverseEdges = {};
Object.keys(graph).forEach(function (fromNode) {
allNodes[fromNode] = true;
outgoingEdgeCount[fromNode] = graph[fromNode].length;
graph[fromNode].forEach(function (toNode) {
allNodes[toNode] = true;
outgoingEdgeCount[toNode] = outgoingEdgeCount[toNode] || 0;
inverseEdges[toNode] = inverseEdges[toNode] || [];
inverseEdges[toNode].push(fromNode);
});
});
// https://en.wikipedia.org/wiki/Topological_sorting
var S = [], L = [];
Object.keys(allNodes).forEach(function (node) {
if (outgoingEdgeCount[node] === 0) {
delete outgoingEdgeCount[node];
S.push(node);
}
});
while (S.length > 0) {
// Ensure the exact same order all the time with the same inputs
S.sort();
var n = S.shift();
L.push(n);
var myInverseEdges = inverseEdges[n] || [];
myInverseEdges.forEach(function (m) {
outgoingEdgeCount[m]--;
if (outgoingEdgeCount[m] === 0) {
delete outgoingEdgeCount[m];
S.push(m);
}
});
}
if (Object.keys(outgoingEdgeCount).length > 0) {
throw new Error('Cannot do topological sort on cyclic graph, remaining nodes: ' + Object.keys(outgoingEdgeCount));
}
return L;
}

643
build/lib/bundle.ts Normal file
View File

@@ -0,0 +1,643 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import fs = require('fs');
import path = require('path');
import vm = require('vm');
interface IPosition {
line: number;
col: number;
}
interface IBuildModuleInfo {
id: string;
path: string;
defineLocation: IPosition;
dependencies: string[];
shim: string;
exports: any;
}
interface IBuildModuleInfoMap {
[moduleId: string]: IBuildModuleInfo;
}
interface ILoaderPlugin {
write(pluginName: string, moduleName: string, write: ILoaderPluginWriteFunc): void;
writeFile(pluginName: string, entryPoint: string, req: ILoaderPluginReqFunc, write: (filename: string, contents: string) => void, config: any): void;
finishBuild(write: (filename: string, contents: string) => void): void;
}
interface ILoaderPluginWriteFunc {
(something: string): void;
getEntryPoint(): string;
asModule(moduleId: string, code: string): void;
}
interface ILoaderPluginReqFunc {
(something: string): void;
toUrl(something: string): string;
}
export interface IEntryPoint {
name: string;
include: string[];
exclude: string[];
prepend: string[];
append: string[];
dest: string;
}
interface IEntryPointMap {
[moduleId: string]: IEntryPoint;
}
export interface IGraph {
[node: string]: string[];
}
interface INodeSet {
[node: string]: boolean;
}
export interface IFile {
path: string;
contents: string;
}
export interface IConcatFile {
dest: string;
sources: IFile[];
}
export interface IBundleData {
graph: IGraph;
bundles: { [moduleId: string]: string[]; };
}
export interface IBundleResult {
files: IConcatFile[];
cssInlinedResources: string[];
bundleData: IBundleData;
}
interface IPartialBundleResult {
files: IConcatFile[];
bundleData: IBundleData;
}
export interface ILoaderConfig {
isBuild?: boolean;
paths?: { [path: string]: any; };
}
/**
* Bundle `entryPoints` given config `config`.
*/
export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callback: (err: any, result: IBundleResult) => void): void {
let entryPointsMap: IEntryPointMap = {};
entryPoints.forEach((module: IEntryPoint) => {
entryPointsMap[module.name] = module;
});
let allMentionedModulesMap: { [modules: string]: boolean; } = {};
entryPoints.forEach((module: IEntryPoint) => {
allMentionedModulesMap[module.name] = true;
(module.include || []).forEach(function (includedModule) {
allMentionedModulesMap[includedModule] = true;
});
(module.exclude || []).forEach(function (excludedModule) {
allMentionedModulesMap[excludedModule] = true;
});
});
var code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
var r: Function = <any>vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
var loaderModule = { exports: {} };
r.call({}, require, loaderModule, loaderModule.exports);
var loader: any = loaderModule.exports;
config.isBuild = true;
config.paths = config.paths || {};
config.paths['vs/nls'] = 'out-build/vs/nls.build';
config.paths['vs/css'] = 'out-build/vs/css.build';
loader.config(config);
loader(['require'], (localRequire) => {
let resolvePath = (path: string) => {
let r = localRequire.toUrl(path);
if (!/\.js/.test(r)) {
return r + '.js';
}
return r;
};
for (let moduleId in entryPointsMap) {
let entryPoint = entryPointsMap[moduleId];
if (entryPoint.append) {
entryPoint.append = entryPoint.append.map(resolvePath);
}
if (entryPoint.prepend) {
entryPoint.prepend = entryPoint.prepend.map(resolvePath);
}
}
});
loader(Object.keys(allMentionedModulesMap), () => {
let modules = <IBuildModuleInfo[]>loader.getBuildInfo();
let partialResult = emitEntryPoints(modules, entryPointsMap);
let cssInlinedResources = loader('vs/css').getInlinedResources();
callback(null, {
files: partialResult.files,
cssInlinedResources: cssInlinedResources,
bundleData: partialResult.bundleData
});
}, (err) => callback(err, null));
}
function emitEntryPoints(modules: IBuildModuleInfo[], entryPoints: IEntryPointMap): IPartialBundleResult {
let modulesMap: IBuildModuleInfoMap = {};
modules.forEach((m: IBuildModuleInfo) => {
modulesMap[m.id] = m;
});
let modulesGraph: IGraph = {};
modules.forEach((m: IBuildModuleInfo) => {
modulesGraph[m.id] = m.dependencies;
});
let sortedModules = topologicalSort(modulesGraph);
let result: IConcatFile[] = [];
let usedPlugins: IPluginMap = {};
let bundleData: IBundleData = {
graph: modulesGraph,
bundles: {}
};
Object.keys(entryPoints).forEach((moduleToBundle: string) => {
let info = entryPoints[moduleToBundle];
let rootNodes = [moduleToBundle].concat(info.include || []);
let allDependencies = visit(rootNodes, modulesGraph);
let excludes: string[] = ['require', 'exports', 'module'].concat(info.exclude || []);
excludes.forEach((excludeRoot: string) => {
let allExcludes = visit([excludeRoot], modulesGraph);
Object.keys(allExcludes).forEach((exclude: string) => {
delete allDependencies[exclude];
});
});
let includedModules = sortedModules.filter((module: string) => {
return allDependencies[module];
});
bundleData.bundles[moduleToBundle] = includedModules;
let res = emitEntryPoint(
modulesMap,
modulesGraph,
moduleToBundle,
includedModules,
info.prepend,
info.append,
info.dest
);
result = result.concat(res.files);
for (let pluginName in res.usedPlugins) {
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
}
});
Object.keys(usedPlugins).forEach((pluginName: string) => {
let plugin = usedPlugins[pluginName];
if (typeof plugin.finishBuild === 'function') {
let write = (filename: string, contents: string) => {
result.push({
dest: filename,
sources: [{
path: null,
contents: contents
}]
});
};
plugin.finishBuild(write);
}
});
return {
// TODO@TS 2.1.2
files: extractStrings(removeDuplicateTSBoilerplate(result)),
bundleData: bundleData
};
}
function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
let parseDefineCall = (moduleMatch: string, depsMatch: string) => {
let module = moduleMatch.replace(/^"|"$/g, '');
let deps = depsMatch.split(',');
deps = deps.map((dep) => {
dep = dep.trim();
dep = dep.replace(/^"|"$/g, '');
dep = dep.replace(/^'|'$/g, '');
let prefix: string = null;
let _path: string = null;
let pieces = dep.split('!');
if (pieces.length > 1) {
prefix = pieces[0] + '!';
_path = pieces[1];
} else {
prefix = '';
_path = pieces[0];
}
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
let res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
return prefix + res;
}
return prefix + _path;
});
return {
module: module,
deps: deps
};
};
destFiles.forEach((destFile, index) => {
if (!/\.js$/.test(destFile.dest)) {
return;
}
if (/\.nls\.js$/.test(destFile.dest)) {
return;
}
// Do one pass to record the usage counts for each module id
let useCounts: { [moduleId: string]: number; } = {};
destFile.sources.forEach((source) => {
let matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
if (!matches) {
return;
}
let defineCall = parseDefineCall(matches[1], matches[2]);
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
defineCall.deps.forEach((dep) => {
useCounts[dep] = (useCounts[dep] || 0) + 1;
});
});
let sortedByUseModules = Object.keys(useCounts);
sortedByUseModules.sort((a, b) => {
return useCounts[b] - useCounts[a];
});
let replacementMap: { [moduleId: string]: number; } = {};
sortedByUseModules.forEach((module, index) => {
replacementMap[module] = index;
});
destFile.sources.forEach((source) => {
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, (_, moduleMatch, depsMatch) => {
let defineCall = parseDefineCall(moduleMatch, depsMatch);
return `define(__m[${replacementMap[defineCall.module]}/*${defineCall.module}*/], __M([${defineCall.deps.map(dep => replacementMap[dep] + '/*' + dep + '*/').join(',')}])`;
});
});
destFile.sources.unshift({
path: null,
contents: [
'(function() {',
`var __m = ${JSON.stringify(sortedByUseModules)};`,
`var __M = function(deps) {`,
` var result = [];`,
` for (var i = 0, len = deps.length; i < len; i++) {`,
` result[i] = __m[deps[i]];`,
` }`,
` return result;`,
`};`
].join('\n')
});
destFile.sources.push({
path: null,
contents: '}).call(this);'
});
});
return destFiles;
}
function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
// Taken from typescript compiler => emitFiles
let BOILERPLATE = [
{ start: /^var __extends/, end: /^}\)\(\);$/ },
{ start: /^var __assign/, end: /^};$/ },
{ start: /^var __decorate/, end: /^};$/ },
{ start: /^var __metadata/, end: /^};$/ },
{ start: /^var __param/, end: /^};$/ },
{ start: /^var __awaiter/, end: /^};$/ },
];
destFiles.forEach((destFile) => {
let SEEN_BOILERPLATE = [];
destFile.sources.forEach((source) => {
let lines = source.contents.split(/\r\n|\n|\r/);
let newLines: string[] = [];
let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE: RegExp;
for (let i = 0; i < lines.length; i++) {
let line = lines[i];
if (IS_REMOVING_BOILERPLATE) {
newLines.push('');
if (END_BOILERPLATE.test(line)) {
IS_REMOVING_BOILERPLATE = false;
}
} else {
for (let j = 0; j < BOILERPLATE.length; j++) {
let boilerplate = BOILERPLATE[j];
if (boilerplate.start.test(line)) {
if (SEEN_BOILERPLATE[j]) {
IS_REMOVING_BOILERPLATE = true;
END_BOILERPLATE = boilerplate.end;
} else {
SEEN_BOILERPLATE[j] = true;
}
}
}
if (IS_REMOVING_BOILERPLATE) {
newLines.push('');
} else {
newLines.push(line);
}
}
}
source.contents = newLines.join('\n');
});
});
return destFiles;
}
interface IPluginMap {
[moduleId: string]: ILoaderPlugin;
}
interface IEmitEntryPointResult {
files: IConcatFile[];
usedPlugins: IPluginMap;
}
function emitEntryPoint(
modulesMap: IBuildModuleInfoMap,
deps: IGraph,
entryPoint: string,
includedModules: string[],
prepend: string[],
append: string[],
dest: string
): IEmitEntryPointResult {
if (!dest) {
dest = entryPoint + '.js';
}
let mainResult: IConcatFile = {
sources: [],
dest: dest
},
results: IConcatFile[] = [mainResult];
let usedPlugins: IPluginMap = {};
let getLoaderPlugin = (pluginName: string): ILoaderPlugin => {
if (!usedPlugins[pluginName]) {
usedPlugins[pluginName] = modulesMap[pluginName].exports;
}
return usedPlugins[pluginName];
};
includedModules.forEach((c: string) => {
let bangIndex = c.indexOf('!');
if (bangIndex >= 0) {
let pluginName = c.substr(0, bangIndex);
let plugin = getLoaderPlugin(pluginName);
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
return;
}
let module = modulesMap[c];
if (module.path === 'empty:') {
return;
}
let contents = readFileAndRemoveBOM(module.path);
if (module.shim) {
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
} else {
mainResult.sources.push(emitNamedModule(c, deps[c], module.defineLocation, module.path, contents));
}
});
Object.keys(usedPlugins).forEach((pluginName: string) => {
let plugin = usedPlugins[pluginName];
if (typeof plugin.writeFile === 'function') {
let req: ILoaderPluginReqFunc = <any>(() => {
throw new Error('no-no!');
});
req.toUrl = something => something;
let write = (filename: string, contents: string) => {
results.push({
dest: filename,
sources: [{
path: null,
contents: contents
}]
});
};
plugin.writeFile(pluginName, entryPoint, req, write, {});
}
});
let toIFile = (path): IFile => {
let contents = readFileAndRemoveBOM(path);
return {
path: path,
contents: contents
};
};
let toPrepend = (prepend || []).map(toIFile);
let toAppend = (append || []).map(toIFile);
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
return {
files: results,
usedPlugins: usedPlugins
};
}
function readFileAndRemoveBOM(path: string): string {
var BOM_CHAR_CODE = 65279;
var contents = fs.readFileSync(path, 'utf8');
// Remove BOM
if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
contents = contents.substring(1);
}
return contents;
}
function emitPlugin(entryPoint: string, plugin: ILoaderPlugin, pluginName: string, moduleName: string): IFile {
let result = '';
if (typeof plugin.write === 'function') {
let write: ILoaderPluginWriteFunc = <any>((what) => {
result += what;
});
write.getEntryPoint = () => {
return entryPoint;
};
write.asModule = (moduleId: string, code: string) => {
code = code.replace(/^define\(/, 'define("' + moduleId + '",');
result += code;
};
plugin.write(pluginName, moduleName, write);
}
return {
path: null,
contents: result
};
}
function emitNamedModule(moduleId: string, myDeps: string[], defineCallPosition: IPosition, path: string, contents: string): IFile {
// `defineCallPosition` is the position in code: |define()
let defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
// `parensOffset` is the position in code: define|()
let parensOffset = contents.indexOf('(', defineCallOffset);
let insertStr = '"' + moduleId + '", ';
return {
path: path,
contents: contents.substr(0, parensOffset + 1) + insertStr + contents.substr(parensOffset + 1)
};
}
function emitShimmedModule(moduleId: string, myDeps: string[], factory: string, path: string, contents: string): IFile {
let strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
let strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
return {
path: path,
contents: contents + '\n;\n' + strDefine
};
}
/**
* Convert a position (line:col) to (offset) in string `str`
*/
function positionToOffset(str: string, desiredLine: number, desiredCol: number): number {
if (desiredLine === 1) {
return desiredCol - 1;
}
let line = 1,
lastNewLineOffset = -1;
do {
if (desiredLine === line) {
return lastNewLineOffset + 1 + desiredCol - 1;
}
lastNewLineOffset = str.indexOf('\n', lastNewLineOffset + 1);
line++;
} while (lastNewLineOffset >= 0);
return -1;
}
/**
* Return a set of reachable nodes in `graph` starting from `rootNodes`
*/
function visit(rootNodes: string[], graph: IGraph): INodeSet {
let result: INodeSet = {},
queue = rootNodes;
rootNodes.forEach((node) => {
result[node] = true;
});
while (queue.length > 0) {
let el = queue.shift();
let myEdges = graph[el] || [];
myEdges.forEach((toNode) => {
if (!result[toNode]) {
result[toNode] = true;
queue.push(toNode);
}
});
}
return result;
}
/**
* Perform a topological sort on `graph`
*/
function topologicalSort(graph: IGraph): string[] {
let allNodes: INodeSet = {},
outgoingEdgeCount: { [node: string]: number; } = {},
inverseEdges: IGraph = {};
Object.keys(graph).forEach((fromNode: string) => {
allNodes[fromNode] = true;
outgoingEdgeCount[fromNode] = graph[fromNode].length;
graph[fromNode].forEach((toNode) => {
allNodes[toNode] = true;
outgoingEdgeCount[toNode] = outgoingEdgeCount[toNode] || 0;
inverseEdges[toNode] = inverseEdges[toNode] || [];
inverseEdges[toNode].push(fromNode);
});
});
// https://en.wikipedia.org/wiki/Topological_sorting
let S: string[] = [],
L: string[] = [];
Object.keys(allNodes).forEach((node: string) => {
if (outgoingEdgeCount[node] === 0) {
delete outgoingEdgeCount[node];
S.push(node);
}
});
while (S.length > 0) {
// Ensure the exact same order all the time with the same inputs
S.sort();
let n: string = S.shift();
L.push(n);
let myInverseEdges = inverseEdges[n] || [];
myInverseEdges.forEach((m: string) => {
outgoingEdgeCount[m]--;
if (outgoingEdgeCount[m] === 0) {
delete outgoingEdgeCount[m];
S.push(m);
}
});
}
if (Object.keys(outgoingEdgeCount).length > 0) {
throw new Error('Cannot do topological sort on cyclic graph, remaining nodes: ' + Object.keys(outgoingEdgeCount));
}
return L;
}

170
build/lib/compilation.js Normal file
View File

@@ -0,0 +1,170 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
var gulp = require("gulp");
var tsb = require("gulp-tsb");
var es = require("event-stream");
var watch = require('./watch');
var nls = require("./nls");
var util = require("./util");
var reporter_1 = require("./reporter");
var path = require("path");
var bom = require("gulp-bom");
var sourcemaps = require("gulp-sourcemaps");
var _ = require("underscore");
var monacodts = require("../monaco/api");
var fs = require("fs");
var reporter = reporter_1.createReporter();
var rootDir = path.join(__dirname, '../../src');
var options = require('../../src/tsconfig.json').compilerOptions;
options.verbose = false;
options.sourceMap = true;
options.rootDir = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
function createCompile(build, emitError) {
var opts = _.clone(options);
opts.inlineSources = !!build;
opts.noFilesystemLookup = true;
var ts = tsb.create(opts, null, null, function (err) { return reporter(err.toString()); });
return function (token) {
var utf8Filter = util.filter(function (data) { return /(\/|\\)test(\/|\\).*utf8/.test(data.path); });
var tsFilter = util.filter(function (data) { return /\.ts$/.test(data.path); });
var noDeclarationsFilter = util.filter(function (data) { return !(/\.d\.ts$/.test(data.path)); });
var input = es.through();
var output = input
.pipe(utf8Filter)
.pipe(bom())
.pipe(utf8Filter.restore)
.pipe(tsFilter)
.pipe(util.loadSourcemaps())
.pipe(ts(token))
.pipe(noDeclarationsFilter)
.pipe(build ? nls() : es.through())
.pipe(noDeclarationsFilter.restore)
.pipe(sourcemaps.write('.', {
addComment: false,
includeContent: !!build,
sourceRoot: options.sourceRoot
}))
.pipe(tsFilter.restore)
.pipe(reporter.end(emitError));
return es.duplex(input, output);
};
}
function compileTask(out, build) {
return function () {
var compile = createCompile(build, true);
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
return src
.pipe(compile())
.pipe(gulp.dest(out))
.pipe(monacodtsTask(out, false));
};
}
exports.compileTask = compileTask;
function watchTask(out, build) {
return function () {
var compile = createCompile(build);
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
var watchSrc = watch('src/**', { base: 'src' });
return watchSrc
.pipe(util.incremental(compile, src, true))
.pipe(gulp.dest(out))
.pipe(monacodtsTask(out, true));
};
}
exports.watchTask = watchTask;
function reloadTypeScriptNodeModule() {
var util = require('gulp-util');
function log(message) {
var rest = [];
for (var _i = 1; _i < arguments.length; _i++) {
rest[_i - 1] = arguments[_i];
}
util.log.apply(util, [util.colors.cyan('[memory watch dog]'), message].concat(rest));
}
function heapUsed() {
return (process.memoryUsage().heapUsed / 1024 / 1024).toFixed(2) + ' MB';
}
return es.through(function (data) {
this.emit('data', data);
}, function () {
log('memory usage after compilation finished: ' + heapUsed());
// It appears we are running into some variant of
// https://bugs.chromium.org/p/v8/issues/detail?id=2073
//
// Even though all references are dropped, some
// optimized methods in the TS compiler end up holding references
// to the entire TypeScript language host (>600MB)
//
// The idea is to force v8 to drop references to these
// optimized methods, by "reloading" the typescript node module
log('Reloading typescript node module...');
var resolvedName = require.resolve('typescript');
var originalModule = require.cache[resolvedName];
delete require.cache[resolvedName];
var newExports = require('typescript');
require.cache[resolvedName] = originalModule;
for (var prop in newExports) {
if (newExports.hasOwnProperty(prop)) {
originalModule.exports[prop] = newExports[prop];
}
}
log('typescript node module reloaded.');
this.emit('end');
});
}
function monacodtsTask(out, isWatch) {
var neededFiles = {};
monacodts.getFilesToWatch(out).forEach(function (filePath) {
filePath = path.normalize(filePath);
neededFiles[filePath] = true;
});
var inputFiles = {};
for (var filePath in neededFiles) {
if (/\bsrc(\/|\\)vs\b/.test(filePath)) {
// This file is needed from source => simply read it now
inputFiles[filePath] = fs.readFileSync(filePath).toString();
}
}
var setInputFile = function (filePath, contents) {
if (inputFiles[filePath] === contents) {
// no change
return;
}
inputFiles[filePath] = contents;
var neededInputFilesCount = Object.keys(neededFiles).length;
var availableInputFilesCount = Object.keys(inputFiles).length;
if (neededInputFilesCount === availableInputFilesCount) {
run();
}
};
var run = function () {
var result = monacodts.run(out, inputFiles);
if (!result.isTheSame) {
if (isWatch) {
fs.writeFileSync(result.filePath, result.content);
}
else {
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
}
}
};
var resultStream;
if (isWatch) {
watch('build/monaco/*').pipe(es.through(function () {
run();
}));
}
resultStream = es.through(function (data) {
var filePath = path.normalize(data.path);
if (neededFiles[filePath]) {
setInputFile(filePath, data.contents.toString());
}
this.emit('data', data);
});
return resultStream;
}

207
build/lib/compilation.ts Normal file
View File

@@ -0,0 +1,207 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as gulp from 'gulp';
import * as tsb from 'gulp-tsb';
import * as es from 'event-stream';
const watch = require('./watch');
import * as nls from './nls';
import * as util from './util';
import { createReporter } from './reporter';
import * as path from 'path';
import * as bom from 'gulp-bom';
import * as sourcemaps from 'gulp-sourcemaps';
import * as _ from 'underscore';
import * as monacodts from '../monaco/api';
import * as fs from 'fs';
const reporter = createReporter();
const rootDir = path.join(__dirname, '../../src');
const options = require('../../src/tsconfig.json').compilerOptions;
options.verbose = false;
options.sourceMap = true;
options.rootDir = rootDir;
options.sourceRoot = util.toFileUri(rootDir);
function createCompile(build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream {
const opts = _.clone(options);
opts.inlineSources = !!build;
opts.noFilesystemLookup = true;
const ts = tsb.create(opts, null, null, err => reporter(err.toString()));
return function (token?: util.ICancellationToken) {
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path)));
const input = es.through();
const output = input
.pipe(utf8Filter)
.pipe(bom())
.pipe(utf8Filter.restore)
.pipe(tsFilter)
.pipe(util.loadSourcemaps())
.pipe(ts(token))
// .pipe(build ? reloadTypeScriptNodeModule() : es.through())
.pipe(noDeclarationsFilter)
.pipe(build ? nls() : es.through())
.pipe(noDeclarationsFilter.restore)
.pipe(sourcemaps.write('.', {
addComment: false,
includeContent: !!build,
sourceRoot: options.sourceRoot
}))
.pipe(tsFilter.restore)
.pipe(reporter.end(emitError));
return es.duplex(input, output);
};
}
export function compileTask(out: string, build: boolean): () => NodeJS.ReadWriteStream {
return function () {
const compile = createCompile(build, true);
const src = es.merge(
gulp.src('src/**', { base: 'src' }),
gulp.src('node_modules/typescript/lib/lib.d.ts'),
);
return src
.pipe(compile())
.pipe(gulp.dest(out))
.pipe(monacodtsTask(out, false));
};
}
export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteStream {
return function () {
const compile = createCompile(build);
const src = es.merge(
gulp.src('src/**', { base: 'src' }),
gulp.src('node_modules/typescript/lib/lib.d.ts'),
);
const watchSrc = watch('src/**', { base: 'src' });
return watchSrc
.pipe(util.incremental(compile, src, true))
.pipe(gulp.dest(out))
.pipe(monacodtsTask(out, true));
};
}
function reloadTypeScriptNodeModule(): NodeJS.ReadWriteStream {
var util = require('gulp-util');
function log(message: any, ...rest: any[]): void {
util.log(util.colors.cyan('[memory watch dog]'), message, ...rest);
}
function heapUsed(): string {
return (process.memoryUsage().heapUsed / 1024 / 1024).toFixed(2) + ' MB';
}
return es.through(function (data) {
this.emit('data', data);
}, function () {
log('memory usage after compilation finished: ' + heapUsed());
// It appears we are running into some variant of
// https://bugs.chromium.org/p/v8/issues/detail?id=2073
//
// Even though all references are dropped, some
// optimized methods in the TS compiler end up holding references
// to the entire TypeScript language host (>600MB)
//
// The idea is to force v8 to drop references to these
// optimized methods, by "reloading" the typescript node module
log('Reloading typescript node module...');
var resolvedName = require.resolve('typescript');
var originalModule = require.cache[resolvedName];
delete require.cache[resolvedName];
var newExports = require('typescript');
require.cache[resolvedName] = originalModule;
for (var prop in newExports) {
if (newExports.hasOwnProperty(prop)) {
originalModule.exports[prop] = newExports[prop];
}
}
log('typescript node module reloaded.');
this.emit('end');
});
}
function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
const neededFiles: { [file: string]: boolean; } = {};
monacodts.getFilesToWatch(out).forEach(function (filePath) {
filePath = path.normalize(filePath);
neededFiles[filePath] = true;
});
const inputFiles: { [file: string]: string; } = {};
for (let filePath in neededFiles) {
if (/\bsrc(\/|\\)vs\b/.test(filePath)) {
// This file is needed from source => simply read it now
inputFiles[filePath] = fs.readFileSync(filePath).toString();
}
}
const setInputFile = (filePath: string, contents: string) => {
if (inputFiles[filePath] === contents) {
// no change
return;
}
inputFiles[filePath] = contents;
const neededInputFilesCount = Object.keys(neededFiles).length;
const availableInputFilesCount = Object.keys(inputFiles).length;
if (neededInputFilesCount === availableInputFilesCount) {
run();
}
};
const run = () => {
const result = monacodts.run(out, inputFiles);
if (!result.isTheSame) {
if (isWatch) {
fs.writeFileSync(result.filePath, result.content);
} else {
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
}
}
};
let resultStream: NodeJS.ReadWriteStream;
if (isWatch) {
watch('build/monaco/*').pipe(es.through(function () {
run();
}));
}
resultStream = es.through(function (data) {
const filePath = path.normalize(data.path);
if (neededFiles[filePath]) {
setInputFile(filePath, data.contents.toString());
}
this.emit('data', data);
});
return resultStream;
}

118
build/lib/extensions.js Normal file
View File

@@ -0,0 +1,118 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
var es = require("event-stream");
var assign = require("object-assign");
var remote = require("gulp-remote-src");
var flatmap = require('gulp-flatmap');
var vzip = require('gulp-vinyl-zip');
var filter = require('gulp-filter');
var rename = require('gulp-rename');
var util = require('gulp-util');
var buffer = require('gulp-buffer');
var json = require('gulp-json-editor');
var fs = require("fs");
var path = require("path");
var vsce = require("vsce");
var File = require("vinyl");
function fromLocal(extensionPath) {
var result = es.through();
vsce.listFiles({ cwd: extensionPath })
.then(function (fileNames) {
var files = fileNames
.map(function (fileName) { return path.join(extensionPath, fileName); })
.map(function (filePath) { return new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}); });
es.readArray(files).pipe(result);
})
.catch(function (err) { return result.emit('error', err); });
return result;
}
exports.fromLocal = fromLocal;
function error(err) {
var result = es.through();
setTimeout(function () { return result.emit('error', err); });
return result;
}
var baseHeaders = {
'X-Market-Client-Id': 'VSCode Build',
'User-Agent': 'VSCode Build',
};
function fromMarketplace(extensionName, version) {
var filterType = 7;
var value = extensionName;
var criterium = { filterType: filterType, value: value };
var criteria = [criterium];
var pageNumber = 1;
var pageSize = 1;
var sortBy = 0;
var sortOrder = 0;
var flags = 0x1 | 0x2 | 0x80;
var assetTypes = ['Microsoft.VisualStudio.Services.VSIXPackage'];
var filters = [{ criteria: criteria, pageNumber: pageNumber, pageSize: pageSize, sortBy: sortBy, sortOrder: sortOrder }];
var body = JSON.stringify({ filters: filters, assetTypes: assetTypes, flags: flags });
var headers = assign({}, baseHeaders, {
'Content-Type': 'application/json',
'Accept': 'application/json;api-version=3.0-preview.1',
'Content-Length': body.length
});
var options = {
base: 'https://marketplace.visualstudio.com/_apis/public/gallery',
requestOptions: {
method: 'POST',
gzip: true,
headers: headers,
body: body
}
};
return remote('/extensionquery', options)
.pipe(flatmap(function (stream, f) {
var rawResult = f.contents.toString('utf8');
var result = JSON.parse(rawResult);
var extension = result.results[0].extensions[0];
if (!extension) {
return error("No such extension: " + extension);
}
var metadata = {
id: extension.extensionId,
publisherId: extension.publisher,
publisherDisplayName: extension.publisher.displayName
};
var extensionVersion = extension.versions.filter(function (v) { return v.version === version; })[0];
if (!extensionVersion) {
return error("No such extension version: " + extensionName + " @ " + version);
}
var asset = extensionVersion.files.filter(function (f) { return f.assetType === 'Microsoft.VisualStudio.Services.VSIXPackage'; })[0];
if (!asset) {
return error("No VSIX found for extension version: " + extensionName + " @ " + version);
}
util.log('Downloading extension:', util.colors.yellow(extensionName + "@" + version), '...');
var options = {
base: asset.source,
requestOptions: {
gzip: true,
headers: baseHeaders
}
};
return remote('', options)
.pipe(flatmap(function (stream) {
var packageJsonFilter = filter('package.json', { restore: true });
return stream
.pipe(vzip.src())
.pipe(filter('extension/**'))
.pipe(rename(function (p) { return p.dirname = p.dirname.replace(/^extension\/?/, ''); }))
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json({ __metadata: metadata }))
.pipe(packageJsonFilter.restore);
}));
}));
}
exports.fromMarketplace = fromMarketplace;

132
build/lib/extensions.ts Normal file
View File

@@ -0,0 +1,132 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as es from 'event-stream';
import { Stream } from 'stream';
import assign = require('object-assign');
import remote = require('gulp-remote-src');
const flatmap = require('gulp-flatmap');
const vzip = require('gulp-vinyl-zip');
const filter = require('gulp-filter');
const rename = require('gulp-rename');
const util = require('gulp-util');
const buffer = require('gulp-buffer');
const json = require('gulp-json-editor');
import * as fs from 'fs';
import * as path from 'path';
import * as vsce from 'vsce';
import * as File from 'vinyl';
export function fromLocal(extensionPath: string): Stream {
const result = es.through();
vsce.listFiles({ cwd: extensionPath })
.then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath) as any
}));
es.readArray(files).pipe(result);
})
.catch(err => result.emit('error', err));
return result;
}
function error(err: any): Stream {
const result = es.through();
setTimeout(() => result.emit('error', err));
return result;
}
const baseHeaders = {
'X-Market-Client-Id': 'VSCode Build',
'User-Agent': 'VSCode Build',
};
export function fromMarketplace(extensionName: string, version: string): Stream {
const filterType = 7;
const value = extensionName;
const criterium = { filterType, value };
const criteria = [criterium];
const pageNumber = 1;
const pageSize = 1;
const sortBy = 0;
const sortOrder = 0;
const flags = 0x1 | 0x2 | 0x80;
const assetTypes = ['Microsoft.VisualStudio.Services.VSIXPackage'];
const filters = [{ criteria, pageNumber, pageSize, sortBy, sortOrder }];
const body = JSON.stringify({ filters, assetTypes, flags });
const headers: any = assign({}, baseHeaders, {
'Content-Type': 'application/json',
'Accept': 'application/json;api-version=3.0-preview.1',
'Content-Length': body.length
});
const options = {
base: 'https://marketplace.visualstudio.com/_apis/public/gallery',
requestOptions: {
method: 'POST',
gzip: true,
headers,
body: body
}
};
return remote('/extensionquery', options)
.pipe(flatmap((stream, f) => {
const rawResult = f.contents.toString('utf8');
const result = JSON.parse(rawResult);
const extension = result.results[0].extensions[0];
if (!extension) {
return error(`No such extension: ${extension}`);
}
const metadata = {
id: extension.extensionId,
publisherId: extension.publisher,
publisherDisplayName: extension.publisher.displayName
};
const extensionVersion = extension.versions.filter(v => v.version === version)[0];
if (!extensionVersion) {
return error(`No such extension version: ${extensionName} @ ${version}`);
}
const asset = extensionVersion.files.filter(f => f.assetType === 'Microsoft.VisualStudio.Services.VSIXPackage')[0];
if (!asset) {
return error(`No VSIX found for extension version: ${extensionName} @ ${version}`);
}
util.log('Downloading extension:', util.colors.yellow(`${extensionName}@${version}`), '...');
const options = {
base: asset.source,
requestOptions: {
gzip: true,
headers: baseHeaders
}
};
return remote('', options)
.pipe(flatmap(stream => {
const packageJsonFilter = filter('package.json', { restore: true });
return stream
.pipe(vzip.src())
.pipe(filter('extension/**'))
.pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, '')))
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json({ __metadata: metadata }))
.pipe(packageJsonFilter.restore);
}));
}));
}

53
build/lib/git.js Normal file
View File

@@ -0,0 +1,53 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
var path = require("path");
var fs = require("fs");
/**
* Returns the sha1 commit version of a repository or undefined in case of failure.
*/
function getVersion(repo) {
var git = path.join(repo, '.git');
var headPath = path.join(git, 'HEAD');
var head;
try {
head = fs.readFileSync(headPath, 'utf8').trim();
}
catch (e) {
return void 0;
}
if (/^[0-9a-f]{40}$/i.test(head)) {
return head;
}
var refMatch = /^ref: (.*)$/.exec(head);
if (!refMatch) {
return void 0;
}
var ref = refMatch[1];
var refPath = path.join(git, ref);
try {
return fs.readFileSync(refPath, 'utf8').trim();
}
catch (e) {
// noop
}
var packedRefsPath = path.join(git, 'packed-refs');
var refsRaw;
try {
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
}
catch (e) {
return void 0;
}
var refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
var refsMatch;
var refs = {};
while (refsMatch = refsRegex.exec(refsRaw)) {
refs[refsMatch[2]] = refsMatch[1];
}
return refs[ref];
}
exports.getVersion = getVersion;

61
build/lib/git.ts Normal file
View File

@@ -0,0 +1,61 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as path from 'path';
import * as fs from 'fs';
/**
* Returns the sha1 commit version of a repository or undefined in case of failure.
*/
export function getVersion(repo: string): string {
const git = path.join(repo, '.git');
const headPath = path.join(git, 'HEAD');
let head: string;
try {
head = fs.readFileSync(headPath, 'utf8').trim();
} catch (e) {
return void 0;
}
if (/^[0-9a-f]{40}$/i.test(head)) {
return head;
}
const refMatch = /^ref: (.*)$/.exec(head);
if (!refMatch) {
return void 0;
}
const ref = refMatch[1];
const refPath = path.join(git, ref);
try {
return fs.readFileSync(refPath, 'utf8').trim();
} catch (e) {
// noop
}
const packedRefsPath = path.join(git, 'packed-refs');
let refsRaw: string;
try {
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
} catch (e) {
return void 0;
}
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
let refsMatch: RegExpExecArray;
let refs: { [ref: string]: string } = {};
while (refsMatch = refsRegex.exec(refsRaw)) {
refs[refsMatch[2]] = refsMatch[1];
}
return refs[ref];
}

1001
build/lib/i18n.js Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,198 @@
{
"editor": [
{
"name": "vs/platform",
"project": "vscode-editor"
},
{
"name": "vs/editor/contrib",
"project": "vscode-editor"
},
{
"name": "vs/editor",
"project": "vscode-editor"
},
{
"name": "vs/base",
"project": "vscode-editor"
}
],
"workbench": [
{
"name": "vs/code",
"project": "vscode-workbench"
},
{
"name": "vs/workbench",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/cli",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/codeEditor",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/debug",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/emmet",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/execution",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/explorers",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/extensions",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/feedback",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/files",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/html",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/markers",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/nps",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/output",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/performance",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/preferences",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/quickopen",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/relauncher",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/scm",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/search",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/snippets",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/surveys",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/tasks",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/terminal",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/themes",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/trust",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/update",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/views",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/watermark",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/parts/welcome",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/configuration",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/crashReporter",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/editor",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/extensions",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/files",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/keybinding",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/message",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/mode",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/progress",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/textfile",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/themes",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/textMate",
"project": "vscode-workbench"
},
{
"name": "setup_messages",
"project": "vscode-workbench"
}
]
}

1116
build/lib/i18n.ts Normal file

File diff suppressed because it is too large Load Diff

359
build/lib/nls.js Normal file
View File

@@ -0,0 +1,359 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var ts = require("typescript");
var lazy = require("lazy.js");
var event_stream_1 = require("event-stream");
var File = require("vinyl");
var sm = require("source-map");
var assign = require("object-assign");
var path = require("path");
var CollectStepResult;
(function (CollectStepResult) {
CollectStepResult[CollectStepResult["Yes"] = 0] = "Yes";
CollectStepResult[CollectStepResult["YesAndRecurse"] = 1] = "YesAndRecurse";
CollectStepResult[CollectStepResult["No"] = 2] = "No";
CollectStepResult[CollectStepResult["NoAndRecurse"] = 3] = "NoAndRecurse";
})(CollectStepResult || (CollectStepResult = {}));
function collect(node, fn) {
var result = [];
function loop(node) {
var stepResult = fn(node);
if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) {
result.push(node);
}
if (stepResult === CollectStepResult.YesAndRecurse || stepResult === CollectStepResult.NoAndRecurse) {
ts.forEachChild(node, loop);
}
}
loop(node);
return result;
}
function clone(object) {
var result = {};
for (var id in object) {
result[id] = object[id];
}
return result;
}
function template(lines) {
var indent = '', wrap = '';
if (lines.length > 1) {
indent = '\t';
wrap = '\n';
}
return "/*---------------------------------------------------------\n * Copyright (C) Microsoft Corporation. All rights reserved.\n *--------------------------------------------------------*/\ndefine([], [" + (wrap + lines.map(function (l) { return indent + l; }).join(',\n') + wrap) + "]);";
}
/**
* Returns a stream containing the patched JavaScript and source maps.
*/
function nls() {
var input = event_stream_1.through();
var output = input.pipe(event_stream_1.through(function (f) {
var _this = this;
if (!f.sourceMap) {
return this.emit('error', new Error("File " + f.relative + " does not have sourcemaps."));
}
var source = f.sourceMap.sources[0];
if (!source) {
return this.emit('error', new Error("File " + f.relative + " does not have a source in the source map."));
}
var root = f.sourceMap.sourceRoot;
if (root) {
source = path.join(root, source);
}
var typescript = f.sourceMap.sourcesContent[0];
if (!typescript) {
return this.emit('error', new Error("File " + f.relative + " does not have the original content in the source map."));
}
nls.patchFiles(f, typescript).forEach(function (f) { return _this.emit('data', f); });
}));
return event_stream_1.duplex(input, output);
}
function isImportNode(node) {
return node.kind === ts.SyntaxKind.ImportDeclaration || node.kind === ts.SyntaxKind.ImportEqualsDeclaration;
}
(function (nls_1) {
function fileFrom(file, contents, path) {
if (path === void 0) { path = file.path; }
return new File({
contents: new Buffer(contents),
base: file.base,
cwd: file.cwd,
path: path
});
}
nls_1.fileFrom = fileFrom;
function mappedPositionFrom(source, lc) {
return { source: source, line: lc.line + 1, column: lc.character };
}
nls_1.mappedPositionFrom = mappedPositionFrom;
function lcFrom(position) {
return { line: position.line - 1, character: position.column };
}
nls_1.lcFrom = lcFrom;
var SingleFileServiceHost = (function () {
function SingleFileServiceHost(options, filename, contents) {
var _this = this;
this.options = options;
this.filename = filename;
this.getCompilationSettings = function () { return _this.options; };
this.getScriptFileNames = function () { return [_this.filename]; };
this.getScriptVersion = function () { return '1'; };
this.getScriptSnapshot = function (name) { return name === _this.filename ? _this.file : _this.lib; };
this.getCurrentDirectory = function () { return ''; };
this.getDefaultLibFileName = function () { return 'lib.d.ts'; };
this.file = ts.ScriptSnapshot.fromString(contents);
this.lib = ts.ScriptSnapshot.fromString('');
}
return SingleFileServiceHost;
}());
nls_1.SingleFileServiceHost = SingleFileServiceHost;
function isCallExpressionWithinTextSpanCollectStep(textSpan, node) {
if (!ts.textSpanContainsTextSpan({ start: node.pos, length: node.end - node.pos }, textSpan)) {
return CollectStepResult.No;
}
return node.kind === ts.SyntaxKind.CallExpression ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse;
}
function analyze(contents, options) {
if (options === void 0) { options = {}; }
var filename = 'file.ts';
var serviceHost = new SingleFileServiceHost(assign(clone(options), { noResolve: true }), filename, contents);
var service = ts.createLanguageService(serviceHost);
var sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
// all imports
var imports = lazy(collect(sourceFile, function (n) { return isImportNode(n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse; }));
// import nls = require('vs/nls');
var importEqualsDeclarations = imports
.filter(function (n) { return n.kind === ts.SyntaxKind.ImportEqualsDeclaration; })
.map(function (n) { return n; })
.filter(function (d) { return d.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference; })
.filter(function (d) { return d.moduleReference.expression.getText() === '\'vs/nls\''; });
// import ... from 'vs/nls';
var importDeclarations = imports
.filter(function (n) { return n.kind === ts.SyntaxKind.ImportDeclaration; })
.map(function (n) { return n; })
.filter(function (d) { return d.moduleSpecifier.kind === ts.SyntaxKind.StringLiteral; })
.filter(function (d) { return d.moduleSpecifier.getText() === '\'vs/nls\''; })
.filter(function (d) { return !!d.importClause && !!d.importClause.namedBindings; });
var nlsExpressions = importEqualsDeclarations
.map(function (d) { return d.moduleReference.expression; })
.concat(importDeclarations.map(function (d) { return d.moduleSpecifier; }))
.map(function (d) { return ({
start: ts.getLineAndCharacterOfPosition(sourceFile, d.getStart()),
end: ts.getLineAndCharacterOfPosition(sourceFile, d.getEnd())
}); });
// `nls.localize(...)` calls
var nlsLocalizeCallExpressions = importDeclarations
.filter(function (d) { return d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport; })
.map(function (d) { return d.importClause.namedBindings.name; })
.concat(importEqualsDeclarations.map(function (d) { return d.name; }))
.map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
.flatten()
.filter(function (r) { return !r.isWriteAccess; })
.map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
.map(function (a) { return lazy(a).last(); })
.filter(function (n) { return !!n; })
.map(function (n) { return n; })
.filter(function (n) { return n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && n.expression.name.getText() === 'localize'; });
// `localize` named imports
var allLocalizeImportDeclarations = importDeclarations
.filter(function (d) { return d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports; })
.map(function (d) { return d.importClause.namedBindings.elements; })
.flatten();
// `localize` read-only references
var localizeReferences = allLocalizeImportDeclarations
.filter(function (d) { return d.name.getText() === 'localize'; })
.map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
.flatten()
.filter(function (r) { return !r.isWriteAccess; });
// custom named `localize` read-only references
var namedLocalizeReferences = allLocalizeImportDeclarations
.filter(function (d) { return d.propertyName && d.propertyName.getText() === 'localize'; })
.map(function (n) { return service.getReferencesAtPosition(filename, n.name.pos + 1); })
.flatten()
.filter(function (r) { return !r.isWriteAccess; });
// find the deepest call expressions AST nodes that contain those references
var localizeCallExpressions = localizeReferences
.concat(namedLocalizeReferences)
.map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
.map(function (a) { return lazy(a).last(); })
.filter(function (n) { return !!n; })
.map(function (n) { return n; });
// collect everything
var localizeCalls = nlsLocalizeCallExpressions
.concat(localizeCallExpressions)
.map(function (e) { return e.arguments; })
.filter(function (a) { return a.length > 1; })
.sort(function (a, b) { return a[0].getStart() - b[0].getStart(); })
.map(function (a) { return ({
keySpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getEnd()) },
key: a[0].getText(),
valueSpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getEnd()) },
value: a[1].getText()
}); });
return {
localizeCalls: localizeCalls.toArray(),
nlsExpressions: nlsExpressions.toArray()
};
}
nls_1.analyze = analyze;
var TextModel = (function () {
function TextModel(contents) {
var regex = /\r\n|\r|\n/g;
var index = 0;
var match;
this.lines = [];
this.lineEndings = [];
while (match = regex.exec(contents)) {
this.lines.push(contents.substring(index, match.index));
this.lineEndings.push(match[0]);
index = regex.lastIndex;
}
if (contents.length > 0) {
this.lines.push(contents.substring(index, contents.length));
this.lineEndings.push('');
}
}
TextModel.prototype.get = function (index) {
return this.lines[index];
};
TextModel.prototype.set = function (index, line) {
this.lines[index] = line;
};
Object.defineProperty(TextModel.prototype, "lineCount", {
get: function () {
return this.lines.length;
},
enumerable: true,
configurable: true
});
/**
* Applies patch(es) to the model.
* Multiple patches must be ordered.
* Does not support patches spanning multiple lines.
*/
TextModel.prototype.apply = function (patch) {
var startLineNumber = patch.span.start.line;
var endLineNumber = patch.span.end.line;
var startLine = this.lines[startLineNumber] || '';
var endLine = this.lines[endLineNumber] || '';
this.lines[startLineNumber] = [
startLine.substring(0, patch.span.start.character),
patch.content,
endLine.substring(patch.span.end.character)
].join('');
for (var i = startLineNumber + 1; i <= endLineNumber; i++) {
this.lines[i] = '';
}
};
TextModel.prototype.toString = function () {
return lazy(this.lines).zip(this.lineEndings)
.flatten().toArray().join('');
};
return TextModel;
}());
nls_1.TextModel = TextModel;
function patchJavascript(patches, contents, moduleId) {
var model = new nls.TextModel(contents);
// patch the localize calls
lazy(patches).reverse().each(function (p) { return model.apply(p); });
// patch the 'vs/nls' imports
var firstLine = model.get(0);
var patchedFirstLine = firstLine.replace(/(['"])vs\/nls\1/g, "$1vs/nls!" + moduleId + "$1");
model.set(0, patchedFirstLine);
return model.toString();
}
nls_1.patchJavascript = patchJavascript;
function patchSourcemap(patches, rsm, smc) {
var smg = new sm.SourceMapGenerator({
file: rsm.file,
sourceRoot: rsm.sourceRoot
});
patches = patches.reverse();
var currentLine = -1;
var currentLineDiff = 0;
var source = null;
smc.eachMapping(function (m) {
var patch = patches[patches.length - 1];
var original = { line: m.originalLine, column: m.originalColumn };
var generated = { line: m.generatedLine, column: m.generatedColumn };
if (currentLine !== generated.line) {
currentLineDiff = 0;
}
currentLine = generated.line;
generated.column += currentLineDiff;
if (patch && m.generatedLine - 1 === patch.span.end.line && m.generatedColumn === patch.span.end.character) {
var originalLength = patch.span.end.character - patch.span.start.character;
var modifiedLength = patch.content.length;
var lengthDiff = modifiedLength - originalLength;
currentLineDiff += lengthDiff;
generated.column += lengthDiff;
patches.pop();
}
source = rsm.sourceRoot ? path.relative(rsm.sourceRoot, m.source) : m.source;
source = source.replace(/\\/g, '/');
smg.addMapping({ source: source, name: m.name, original: original, generated: generated });
}, null, sm.SourceMapConsumer.GENERATED_ORDER);
if (source) {
smg.setSourceContent(source, smc.sourceContentFor(source));
}
return JSON.parse(smg.toString());
}
nls_1.patchSourcemap = patchSourcemap;
function patch(moduleId, typescript, javascript, sourcemap) {
var _a = analyze(typescript), localizeCalls = _a.localizeCalls, nlsExpressions = _a.nlsExpressions;
if (localizeCalls.length === 0) {
return { javascript: javascript, sourcemap: sourcemap };
}
var nlsKeys = template(localizeCalls.map(function (lc) { return lc.key; }));
var nls = template(localizeCalls.map(function (lc) { return lc.value; }));
var smc = new sm.SourceMapConsumer(sourcemap);
var positionFrom = mappedPositionFrom.bind(null, sourcemap.sources[0]);
var i = 0;
// build patches
var patches = lazy(localizeCalls)
.map(function (lc) { return ([
{ range: lc.keySpan, content: '' + (i++) },
{ range: lc.valueSpan, content: 'null' }
]); })
.flatten()
.map(function (c) {
var start = lcFrom(smc.generatedPositionFor(positionFrom(c.range.start)));
var end = lcFrom(smc.generatedPositionFor(positionFrom(c.range.end)));
return { span: { start: start, end: end }, content: c.content };
})
.toArray();
javascript = patchJavascript(patches, javascript, moduleId);
// since imports are not within the sourcemap information,
// we must do this MacGyver style
if (nlsExpressions.length) {
javascript = javascript.replace(/^define\(.*$/m, function (line) {
return line.replace(/(['"])vs\/nls\1/g, "$1vs/nls!" + moduleId + "$1");
});
}
sourcemap = patchSourcemap(patches, sourcemap, smc);
return { javascript: javascript, sourcemap: sourcemap, nlsKeys: nlsKeys, nls: nls };
}
nls_1.patch = patch;
function patchFiles(javascriptFile, typescript) {
// hack?
var moduleId = javascriptFile.relative
.replace(/\.js$/, '')
.replace(/\\/g, '/');
var _a = patch(moduleId, typescript, javascriptFile.contents.toString(), javascriptFile.sourceMap), javascript = _a.javascript, sourcemap = _a.sourcemap, nlsKeys = _a.nlsKeys, nls = _a.nls;
var result = [fileFrom(javascriptFile, javascript)];
result[0].sourceMap = sourcemap;
if (nlsKeys) {
result.push(fileFrom(javascriptFile, nlsKeys, javascriptFile.path.replace(/\.js$/, '.nls.keys.js')));
}
if (nls) {
result.push(fileFrom(javascriptFile, nls, javascriptFile.path.replace(/\.js$/, '.nls.js')));
}
return result;
}
nls_1.patchFiles = patchFiles;
})(nls || (nls = {}));
module.exports = nls;

469
build/lib/nls.ts Normal file
View File

@@ -0,0 +1,469 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as lazy from 'lazy.js';
import { duplex, through } from 'event-stream';
import File = require('vinyl');
import * as sm from 'source-map';
import assign = require('object-assign');
import path = require('path');
declare class FileSourceMap extends File {
public sourceMap: sm.RawSourceMap;
}
enum CollectStepResult {
Yes,
YesAndRecurse,
No,
NoAndRecurse
}
function collect(node: ts.Node, fn: (node: ts.Node) => CollectStepResult): ts.Node[] {
const result: ts.Node[] = [];
function loop(node: ts.Node) {
var stepResult = fn(node);
if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) {
result.push(node);
}
if (stepResult === CollectStepResult.YesAndRecurse || stepResult === CollectStepResult.NoAndRecurse) {
ts.forEachChild(node, loop);
}
}
loop(node);
return result;
}
function clone<T>(object: T): T {
var result = <T>{};
for (var id in object) {
result[id] = object[id];
}
return result;
}
function template(lines: string[]): string {
let indent = '', wrap = '';
if (lines.length > 1) {
indent = '\t';
wrap = '\n';
}
return `/*---------------------------------------------------------
* Copyright (C) Microsoft Corporation. All rights reserved.
*--------------------------------------------------------*/
define([], [${ wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
}
/**
* Returns a stream containing the patched JavaScript and source maps.
*/
function nls(): NodeJS.ReadWriteStream {
var input = through();
var output = input.pipe(through(function (f: FileSourceMap) {
if (!f.sourceMap) {
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`));
}
let source = f.sourceMap.sources[0];
if (!source) {
return this.emit('error', new Error(`File ${f.relative} does not have a source in the source map.`));
}
const root = f.sourceMap.sourceRoot;
if (root) {
source = path.join(root, source);
}
const typescript = f.sourceMap.sourcesContent[0];
if (!typescript) {
return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`));
}
nls.patchFiles(f, typescript).forEach(f => this.emit('data', f));
}));
return duplex(input, output);
}
function isImportNode(node: ts.Node): boolean {
return node.kind === ts.SyntaxKind.ImportDeclaration || node.kind === ts.SyntaxKind.ImportEqualsDeclaration;
}
module nls {
export interface INlsStringResult {
javascript: string;
sourcemap: sm.RawSourceMap;
nls?: string;
nlsKeys?: string;
}
export interface ISpan {
start: ts.LineAndCharacter;
end: ts.LineAndCharacter;
}
export interface ILocalizeCall {
keySpan: ISpan;
key: string;
valueSpan: ISpan;
value: string;
}
export interface ILocalizeAnalysisResult {
localizeCalls: ILocalizeCall[];
nlsExpressions: ISpan[];
}
export interface IPatch {
span: ISpan;
content: string;
}
export function fileFrom(file: File, contents: string, path: string = file.path) {
return new File({
contents: new Buffer(contents),
base: file.base,
cwd: file.cwd,
path: path
});
}
export function mappedPositionFrom(source: string, lc: ts.LineAndCharacter): sm.MappedPosition {
return { source, line: lc.line + 1, column: lc.character };
}
export function lcFrom(position: sm.Position): ts.LineAndCharacter {
return { line: position.line - 1, character: position.column };
}
export class SingleFileServiceHost implements ts.LanguageServiceHost {
private file: ts.IScriptSnapshot;
private lib: ts.IScriptSnapshot;
constructor(private options: ts.CompilerOptions, private filename: string, contents: string) {
this.file = ts.ScriptSnapshot.fromString(contents);
this.lib = ts.ScriptSnapshot.fromString('');
}
getCompilationSettings = () => this.options;
getScriptFileNames = () => [this.filename];
getScriptVersion = () => '1';
getScriptSnapshot = (name: string) => name === this.filename ? this.file : this.lib;
getCurrentDirectory = () => '';
getDefaultLibFileName = () => 'lib.d.ts';
}
function isCallExpressionWithinTextSpanCollectStep(textSpan: ts.TextSpan, node: ts.Node): CollectStepResult {
if (!ts.textSpanContainsTextSpan({ start: node.pos, length: node.end - node.pos }, textSpan)) {
return CollectStepResult.No;
}
return node.kind === ts.SyntaxKind.CallExpression ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse;
}
export function analyze(contents: string, options: ts.CompilerOptions = {}): ILocalizeAnalysisResult {
const filename = 'file.ts';
const serviceHost = new SingleFileServiceHost(assign(clone(options), { noResolve: true }), filename, contents);
const service = ts.createLanguageService(serviceHost);
const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
// all imports
const imports = lazy(collect(sourceFile, n => isImportNode(n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse));
// import nls = require('vs/nls');
const importEqualsDeclarations = imports
.filter(n => n.kind === ts.SyntaxKind.ImportEqualsDeclaration)
.map(n => <ts.ImportEqualsDeclaration>n)
.filter(d => d.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference)
.filter(d => (<ts.ExternalModuleReference>d.moduleReference).expression.getText() === '\'vs/nls\'');
// import ... from 'vs/nls';
const importDeclarations = imports
.filter(n => n.kind === ts.SyntaxKind.ImportDeclaration)
.map(n => <ts.ImportDeclaration>n)
.filter(d => d.moduleSpecifier.kind === ts.SyntaxKind.StringLiteral)
.filter(d => d.moduleSpecifier.getText() === '\'vs/nls\'')
.filter(d => !!d.importClause && !!d.importClause.namedBindings);
const nlsExpressions = importEqualsDeclarations
.map(d => (<ts.ExternalModuleReference>d.moduleReference).expression)
.concat(importDeclarations.map(d => d.moduleSpecifier))
.map<ISpan>(d => ({
start: ts.getLineAndCharacterOfPosition(sourceFile, d.getStart()),
end: ts.getLineAndCharacterOfPosition(sourceFile, d.getEnd())
}));
// `nls.localize(...)` calls
const nlsLocalizeCallExpressions = importDeclarations
.filter(d => d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport)
.map(d => (<ts.NamespaceImport>d.importClause.namedBindings).name)
.concat(importEqualsDeclarations.map(d => d.name))
// find read-only references to `nls`
.map(n => service.getReferencesAtPosition(filename, n.pos + 1))
.flatten()
.filter(r => !r.isWriteAccess)
// find the deepest call expressions AST nodes that contain those references
.map(r => collect(sourceFile, n => isCallExpressionWithinTextSpanCollectStep(r.textSpan, n)))
.map(a => lazy(a).last())
.filter(n => !!n)
.map(n => <ts.CallExpression>n)
// only `localize` calls
.filter(n => n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && (<ts.PropertyAccessExpression>n.expression).name.getText() === 'localize');
// `localize` named imports
const allLocalizeImportDeclarations = importDeclarations
.filter(d => d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports)
.map(d => (<ts.NamedImports>d.importClause.namedBindings).elements)
.flatten();
// `localize` read-only references
const localizeReferences = allLocalizeImportDeclarations
.filter(d => d.name.getText() === 'localize')
.map(n => service.getReferencesAtPosition(filename, n.pos + 1))
.flatten()
.filter(r => !r.isWriteAccess);
// custom named `localize` read-only references
const namedLocalizeReferences = allLocalizeImportDeclarations
.filter(d => d.propertyName && d.propertyName.getText() === 'localize')
.map(n => service.getReferencesAtPosition(filename, n.name.pos + 1))
.flatten()
.filter(r => !r.isWriteAccess);
// find the deepest call expressions AST nodes that contain those references
const localizeCallExpressions = localizeReferences
.concat(namedLocalizeReferences)
.map(r => collect(sourceFile, n => isCallExpressionWithinTextSpanCollectStep(r.textSpan, n)))
.map(a => lazy(a).last())
.filter(n => !!n)
.map(n => <ts.CallExpression>n);
// collect everything
const localizeCalls = nlsLocalizeCallExpressions
.concat(localizeCallExpressions)
.map(e => e.arguments)
.filter(a => a.length > 1)
.sort((a, b) => a[0].getStart() - b[0].getStart())
.map<ILocalizeCall>(a => ({
keySpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getEnd()) },
key: a[0].getText(),
valueSpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getEnd()) },
value: a[1].getText()
}));
return {
localizeCalls: localizeCalls.toArray(),
nlsExpressions: nlsExpressions.toArray()
};
}
export class TextModel {
private lines: string[];
private lineEndings: string[];
constructor(contents: string) {
const regex = /\r\n|\r|\n/g;
let index = 0;
let match: RegExpExecArray;
this.lines = [];
this.lineEndings = [];
while (match = regex.exec(contents)) {
this.lines.push(contents.substring(index, match.index));
this.lineEndings.push(match[0]);
index = regex.lastIndex;
}
if (contents.length > 0) {
this.lines.push(contents.substring(index, contents.length));
this.lineEndings.push('');
}
}
public get(index: number): string {
return this.lines[index];
}
public set(index: number, line: string): void {
this.lines[index] = line;
}
public get lineCount(): number {
return this.lines.length;
}
/**
* Applies patch(es) to the model.
* Multiple patches must be ordered.
* Does not support patches spanning multiple lines.
*/
public apply(patch: IPatch): void {
const startLineNumber = patch.span.start.line;
const endLineNumber = patch.span.end.line;
const startLine = this.lines[startLineNumber] || '';
const endLine = this.lines[endLineNumber] || '';
this.lines[startLineNumber] = [
startLine.substring(0, patch.span.start.character),
patch.content,
endLine.substring(patch.span.end.character)
].join('');
for (let i = startLineNumber + 1; i <= endLineNumber; i++) {
this.lines[i] = '';
}
}
public toString(): string {
return lazy(this.lines).zip(this.lineEndings)
.flatten().toArray().join('');
}
}
export function patchJavascript(patches: IPatch[], contents: string, moduleId: string): string {
const model = new nls.TextModel(contents);
// patch the localize calls
lazy(patches).reverse().each(p => model.apply(p));
// patch the 'vs/nls' imports
const firstLine = model.get(0);
const patchedFirstLine = firstLine.replace(/(['"])vs\/nls\1/g, `$1vs/nls!${moduleId}$1`);
model.set(0, patchedFirstLine);
return model.toString();
}
export function patchSourcemap(patches: IPatch[], rsm: sm.RawSourceMap, smc: sm.SourceMapConsumer): sm.RawSourceMap {
const smg = new sm.SourceMapGenerator({
file: rsm.file,
sourceRoot: rsm.sourceRoot
});
patches = patches.reverse();
let currentLine = -1;
let currentLineDiff = 0;
let source = null;
smc.eachMapping(m => {
const patch = patches[patches.length - 1];
const original = { line: m.originalLine, column: m.originalColumn };
const generated = { line: m.generatedLine, column: m.generatedColumn };
if (currentLine !== generated.line) {
currentLineDiff = 0;
}
currentLine = generated.line;
generated.column += currentLineDiff;
if (patch && m.generatedLine - 1 === patch.span.end.line && m.generatedColumn === patch.span.end.character) {
const originalLength = patch.span.end.character - patch.span.start.character;
const modifiedLength = patch.content.length;
const lengthDiff = modifiedLength - originalLength;
currentLineDiff += lengthDiff;
generated.column += lengthDiff;
patches.pop();
}
source = rsm.sourceRoot ? path.relative(rsm.sourceRoot, m.source) : m.source;
source = source.replace(/\\/g, '/');
smg.addMapping({ source, name: m.name, original, generated });
}, null, sm.SourceMapConsumer.GENERATED_ORDER);
if (source) {
smg.setSourceContent(source, smc.sourceContentFor(source));
}
return JSON.parse(smg.toString());
}
export function patch(moduleId: string, typescript: string, javascript: string, sourcemap: sm.RawSourceMap): INlsStringResult {
const { localizeCalls, nlsExpressions } = analyze(typescript);
if (localizeCalls.length === 0) {
return { javascript, sourcemap };
}
const nlsKeys = template(localizeCalls.map(lc => lc.key));
const nls = template(localizeCalls.map(lc => lc.value));
const smc = new sm.SourceMapConsumer(sourcemap);
const positionFrom = mappedPositionFrom.bind(null, sourcemap.sources[0]);
let i = 0;
// build patches
const patches = lazy(localizeCalls)
.map(lc => ([
{ range: lc.keySpan, content: '' + (i++) },
{ range: lc.valueSpan, content: 'null' }
]))
.flatten()
.map<IPatch>(c => {
const start = lcFrom(smc.generatedPositionFor(positionFrom(c.range.start)));
const end = lcFrom(smc.generatedPositionFor(positionFrom(c.range.end)));
return { span: { start, end }, content: c.content };
})
.toArray();
javascript = patchJavascript(patches, javascript, moduleId);
// since imports are not within the sourcemap information,
// we must do this MacGyver style
if (nlsExpressions.length) {
javascript = javascript.replace(/^define\(.*$/m, line => {
return line.replace(/(['"])vs\/nls\1/g, `$1vs/nls!${moduleId}$1`);
});
}
sourcemap = patchSourcemap(patches, sourcemap, smc);
return { javascript, sourcemap, nlsKeys, nls };
}
export function patchFiles(javascriptFile: File, typescript: string): File[] {
// hack?
const moduleId = javascriptFile.relative
.replace(/\.js$/, '')
.replace(/\\/g, '/');
const { javascript, sourcemap, nlsKeys, nls } = patch(
moduleId,
typescript,
javascriptFile.contents.toString(),
(<any>javascriptFile).sourceMap
);
const result: File[] = [fileFrom(javascriptFile, javascript)];
(<any>result[0]).sourceMap = sourcemap;
if (nlsKeys) {
result.push(fileFrom(javascriptFile, nlsKeys, javascriptFile.path.replace(/\.js$/, '.nls.keys.js')));
}
if (nls) {
result.push(fileFrom(javascriptFile, nls, javascriptFile.path.replace(/\.js$/, '.nls.js')));
}
return result;
}
}
export = nls;

241
build/lib/optimize.js Normal file
View File

@@ -0,0 +1,241 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
var path = require("path");
var gulp = require("gulp");
var sourcemaps = require("gulp-sourcemaps");
var filter = require("gulp-filter");
var minifyCSS = require("gulp-cssnano");
var uglify = require("gulp-uglify");
var composer = require("gulp-uglify/composer");
var uglifyes = require("uglify-es");
var es = require("event-stream");
var concat = require("gulp-concat");
var VinylFile = require("vinyl");
var bundle = require("./bundle");
var util = require("./util");
var i18n = require("./i18n");
var gulpUtil = require("gulp-util");
var flatmap = require("gulp-flatmap");
var pump = require("pump");
var REPO_ROOT_PATH = path.join(__dirname, '../..');
function log(prefix, message) {
gulpUtil.log(gulpUtil.colors.cyan('[' + prefix + ']'), message);
}
// {{SQL CARBON EDIT}}
function loaderConfig(emptyPaths) {
var result = {
paths: {
'vs': 'out-build/vs',
'sql': 'out-build/sql',
'vscode': 'empty:'
},
nodeModules: emptyPaths || []
};
result['vs/css'] = { inlineResources: true };
return result;
}
exports.loaderConfig = loaderConfig;
var IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i;
function loader(bundledFileHeader, bundleLoader) {
var sources = [
'out-build/vs/loader.js'
];
if (bundleLoader) {
sources = sources.concat([
'out-build/vs/css.js',
'out-build/vs/nls.js'
]);
}
var isFirst = true;
return (gulp
.src(sources, { base: 'out-build' })
.pipe(es.through(function (data) {
if (isFirst) {
isFirst = false;
this.emit('data', new VinylFile({
path: 'fake',
base: '',
contents: new Buffer(bundledFileHeader)
}));
this.emit('data', data);
}
else {
this.emit('data', data);
}
}))
.pipe(util.loadSourcemaps())
.pipe(concat('vs/loader.js'))
.pipe(es.mapSync(function (f) {
f.sourceMap.sourceRoot = util.toFileUri(path.join(REPO_ROOT_PATH, 'src'));
return f;
})));
}
function toConcatStream(bundledFileHeader, sources, dest) {
var useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
// If a bundle ends up including in any of the sources our copyright, then
// insert a fake source at the beginning of each bundle with our copyright
var containsOurCopyright = false;
for (var i = 0, len = sources.length; i < len; i++) {
var fileContents = sources[i].contents;
if (IS_OUR_COPYRIGHT_REGEXP.test(fileContents)) {
containsOurCopyright = true;
break;
}
}
if (containsOurCopyright) {
sources.unshift({
path: null,
contents: bundledFileHeader
});
}
var treatedSources = sources.map(function (source) {
var root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
var base = source.path ? root + '/out-build' : '';
return new VinylFile({
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
base: base,
contents: new Buffer(source.contents)
});
});
return es.readArray(treatedSources)
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
.pipe(concat(dest));
}
function toBundleStream(bundledFileHeader, bundles) {
return es.merge(bundles.map(function (bundle) {
return toConcatStream(bundledFileHeader, bundle.sources, bundle.dest);
}));
}
function optimizeTask(opts) {
var entryPoints = opts.entryPoints;
var otherSources = opts.otherSources;
var resources = opts.resources;
var loaderConfig = opts.loaderConfig;
var bundledFileHeader = opts.header;
var bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader);
var out = opts.out;
return function () {
var bundlesStream = es.through(); // this stream will contain the bundled files
var resourcesStream = es.through(); // this stream will contain the resources
var bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
if (err) {
return bundlesStream.emit('error', JSON.stringify(err));
}
toBundleStream(bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources
var filteredResources = resources.slice();
result.cssInlinedResources.forEach(function (resource) {
if (process.env['VSCODE_BUILD_VERBOSE']) {
log('optimizer', 'excluding inlined: ' + resource);
}
filteredResources.push('!' + resource);
});
gulp.src(filteredResources, { base: 'out-build' }).pipe(resourcesStream);
var bundleInfoArray = [];
if (opts.bundleInfo) {
bundleInfoArray.push(new VinylFile({
path: 'bundleInfo.json',
base: '.',
contents: new Buffer(JSON.stringify(result.bundleData, null, '\t'))
}));
}
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
});
var otherSourcesStream = es.through();
var otherSourcesStreamArr = [];
gulp.src(otherSources, { base: 'out-build' })
.pipe(es.through(function (data) {
otherSourcesStreamArr.push(toConcatStream(bundledFileHeader, [data], data.relative));
}, function () {
if (!otherSourcesStreamArr.length) {
setTimeout(function () { otherSourcesStream.emit('end'); }, 0);
}
else {
es.merge(otherSourcesStreamArr).pipe(otherSourcesStream);
}
}));
var result = es.merge(loader(bundledFileHeader, bundleLoader), bundlesStream, otherSourcesStream, resourcesStream, bundleInfoStream);
return result
.pipe(sourcemaps.write('./', {
sourceRoot: null,
addComment: true,
includeContent: true
}))
.pipe(i18n.processNlsFiles({
fileHeader: bundledFileHeader,
languages: opts.languages
}))
.pipe(gulp.dest(out));
};
}
exports.optimizeTask = optimizeTask;
;
/**
* Wrap around uglify and allow the preserveComments function
* to have a file "context" to include our copyright only once per file.
*/
function uglifyWithCopyrights() {
var preserveComments = function (f) {
return function (node, comment) {
var text = comment.value;
var type = comment.type;
if (/@minifier_do_not_preserve/.test(text)) {
return false;
}
var isOurCopyright = IS_OUR_COPYRIGHT_REGEXP.test(text);
if (isOurCopyright) {
if (f.__hasOurCopyright) {
return false;
}
f.__hasOurCopyright = true;
return true;
}
if ('comment2' === type) {
// check for /*!. Note that text doesn't contain leading /*
return (text.length > 0 && text[0] === '!') || /@preserve|license|@cc_on|copyright/i.test(text);
}
else if ('comment1' === type) {
return /license|copyright/i.test(text);
}
return false;
};
};
var minify = composer(uglifyes);
var input = es.through();
var output = input
.pipe(flatmap(function (stream, f) {
return stream.pipe(minify({
output: {
comments: preserveComments(f),
// linux tfs build agent is crashing, does this help?§
max_line_len: 3200000
}
}));
}));
return es.duplex(input, output);
}
function minifyTask(src, sourceMapBaseUrl) {
var sourceMappingURL = sourceMapBaseUrl && (function (f) { return sourceMapBaseUrl + "/" + f.relative + ".map"; });
return function (cb) {
var jsFilter = filter('**/*.js', { restore: true });
var cssFilter = filter('**/*.css', { restore: true });
pump(gulp.src([src + '/**', '!' + src + '/**/*.map']), jsFilter, sourcemaps.init({ loadMaps: true }), uglifyWithCopyrights(), jsFilter.restore, cssFilter, minifyCSS({ reduceIdents: false }), cssFilter.restore, sourcemaps.write('./', {
sourceMappingURL: sourceMappingURL,
sourceRoot: null,
includeContent: true,
addComment: true
}), gulp.dest(src + '-min'), function (err) {
if (err instanceof uglify.GulpUglifyError) {
console.error("Uglify error in '" + (err.cause && err.cause.filename) + "'");
}
cb(err);
});
};
}
exports.minifyTask = minifyTask;
;

330
build/lib/optimize.ts Normal file
View File

@@ -0,0 +1,330 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as path from 'path';
import * as gulp from 'gulp';
import * as sourcemaps from 'gulp-sourcemaps';
import * as filter from 'gulp-filter';
import * as minifyCSS from 'gulp-cssnano';
import * as uglify from 'gulp-uglify';
import * as composer from 'gulp-uglify/composer';
import * as uglifyes from 'uglify-es';
import * as es from 'event-stream';
import * as concat from 'gulp-concat';
import * as VinylFile from 'vinyl';
import * as bundle from './bundle';
import * as util from './util';
import * as i18n from './i18n';
import * as gulpUtil from 'gulp-util';
import * as flatmap from 'gulp-flatmap';
import * as pump from 'pump';
import * as sm from 'source-map';
const REPO_ROOT_PATH = path.join(__dirname, '../..');
function log(prefix: string, message: string): void {
gulpUtil.log(gulpUtil.colors.cyan('[' + prefix + ']'), message);
}
// {{SQL CARBON EDIT}}
export function loaderConfig(emptyPaths: string[]) {
const result = {
paths: {
'vs': 'out-build/vs',
'sql': 'out-build/sql',
'vscode': 'empty:'
},
nodeModules: emptyPaths || []
};
result['vs/css'] = { inlineResources: true };
return result;
}
const IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i;
declare class FileSourceMap extends VinylFile {
public sourceMap: sm.RawSourceMap;
}
function loader(bundledFileHeader: string, bundleLoader: boolean): NodeJS.ReadWriteStream {
let sources = [
'out-build/vs/loader.js'
];
if (bundleLoader) {
sources = sources.concat([
'out-build/vs/css.js',
'out-build/vs/nls.js'
]);
}
let isFirst = true;
return (
gulp
.src(sources, { base: 'out-build' })
.pipe(es.through(function (data) {
if (isFirst) {
isFirst = false;
this.emit('data', new VinylFile({
path: 'fake',
base: '',
contents: new Buffer(bundledFileHeader)
}));
this.emit('data', data);
} else {
this.emit('data', data);
}
}))
.pipe(util.loadSourcemaps())
.pipe(concat('vs/loader.js'))
.pipe(es.mapSync<FileSourceMap, FileSourceMap>(function (f) {
f.sourceMap.sourceRoot = util.toFileUri(path.join(REPO_ROOT_PATH, 'src'));
return f;
}))
);
}
function toConcatStream(bundledFileHeader: string, sources: bundle.IFile[], dest: string): NodeJS.ReadWriteStream {
const useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
// If a bundle ends up including in any of the sources our copyright, then
// insert a fake source at the beginning of each bundle with our copyright
let containsOurCopyright = false;
for (let i = 0, len = sources.length; i < len; i++) {
const fileContents = sources[i].contents;
if (IS_OUR_COPYRIGHT_REGEXP.test(fileContents)) {
containsOurCopyright = true;
break;
}
}
if (containsOurCopyright) {
sources.unshift({
path: null,
contents: bundledFileHeader
});
}
const treatedSources = sources.map(function (source) {
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
const base = source.path ? root + '/out-build' : '';
return new VinylFile({
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
base: base,
contents: new Buffer(source.contents)
});
});
return es.readArray(treatedSources)
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
.pipe(concat(dest));
}
function toBundleStream(bundledFileHeader: string, bundles: bundle.IConcatFile[]): NodeJS.ReadWriteStream {
return es.merge(bundles.map(function (bundle) {
return toConcatStream(bundledFileHeader, bundle.sources, bundle.dest);
}));
}
export interface IOptimizeTaskOpts {
/**
* (for AMD files, will get bundled and get Copyright treatment)
*/
entryPoints: bundle.IEntryPoint[];
/**
* (for non-AMD files that should get Copyright treatment)
*/
otherSources: string[];
/**
* (svg, etc.)
*/
resources: string[];
loaderConfig: any;
/**
* (true by default - append css and nls to loader)
*/
bundleLoader?: boolean;
/**
* (basically the Copyright treatment)
*/
header: string;
/**
* (emit bundleInfo.json file)
*/
bundleInfo: boolean;
/**
* (out folder name)
*/
out: string;
/**
* (languages to process)
*/
languages: string[];
}
export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStream {
const entryPoints = opts.entryPoints;
const otherSources = opts.otherSources;
const resources = opts.resources;
const loaderConfig = opts.loaderConfig;
const bundledFileHeader = opts.header;
const bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader);
const out = opts.out;
return function () {
const bundlesStream = es.through(); // this stream will contain the bundled files
const resourcesStream = es.through(); // this stream will contain the resources
const bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
if (err) { return bundlesStream.emit('error', JSON.stringify(err)); }
toBundleStream(bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources
const filteredResources = resources.slice();
result.cssInlinedResources.forEach(function (resource) {
if (process.env['VSCODE_BUILD_VERBOSE']) {
log('optimizer', 'excluding inlined: ' + resource);
}
filteredResources.push('!' + resource);
});
gulp.src(filteredResources, { base: 'out-build' }).pipe(resourcesStream);
const bundleInfoArray: VinylFile[] = [];
if (opts.bundleInfo) {
bundleInfoArray.push(new VinylFile({
path: 'bundleInfo.json',
base: '.',
contents: new Buffer(JSON.stringify(result.bundleData, null, '\t'))
}));
}
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
});
const otherSourcesStream = es.through();
const otherSourcesStreamArr: NodeJS.ReadWriteStream[] = [];
gulp.src(otherSources, { base: 'out-build' })
.pipe(es.through(function (data) {
otherSourcesStreamArr.push(toConcatStream(bundledFileHeader, [data], data.relative));
}, function () {
if (!otherSourcesStreamArr.length) {
setTimeout(function () { otherSourcesStream.emit('end'); }, 0);
} else {
es.merge(otherSourcesStreamArr).pipe(otherSourcesStream);
}
}));
const result = es.merge(
loader(bundledFileHeader, bundleLoader),
bundlesStream,
otherSourcesStream,
resourcesStream,
bundleInfoStream
);
return result
.pipe(sourcemaps.write('./', {
sourceRoot: null,
addComment: true,
includeContent: true
}))
.pipe(i18n.processNlsFiles({
fileHeader: bundledFileHeader,
languages: opts.languages
}))
.pipe(gulp.dest(out));
};
};
declare class FileWithCopyright extends VinylFile {
public __hasOurCopyright: boolean;
}
/**
* Wrap around uglify and allow the preserveComments function
* to have a file "context" to include our copyright only once per file.
*/
function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
const preserveComments = (f: FileWithCopyright) => {
return (node, comment: { value: string; type: string; }) => {
const text = comment.value;
const type = comment.type;
if (/@minifier_do_not_preserve/.test(text)) {
return false;
}
const isOurCopyright = IS_OUR_COPYRIGHT_REGEXP.test(text);
if (isOurCopyright) {
if (f.__hasOurCopyright) {
return false;
}
f.__hasOurCopyright = true;
return true;
}
if ('comment2' === type) {
// check for /*!. Note that text doesn't contain leading /*
return (text.length > 0 && text[0] === '!') || /@preserve|license|@cc_on|copyright/i.test(text);
} else if ('comment1' === type) {
return /license|copyright/i.test(text);
}
return false;
};
};
const minify = composer(uglifyes);
const input = es.through();
const output = input
.pipe(flatmap((stream, f) => {
return stream.pipe(minify({
output: {
comments: preserveComments(<FileWithCopyright>f),
// linux tfs build agent is crashing, does this help?§
max_line_len: 3200000
}
}));
}));
return es.duplex(input, output);
}
export function minifyTask(src: string, sourceMapBaseUrl: string): (cb: any) => void {
const sourceMappingURL = sourceMapBaseUrl && (f => `${sourceMapBaseUrl}/${f.relative}.map`);
return cb => {
const jsFilter = filter('**/*.js', { restore: true });
const cssFilter = filter('**/*.css', { restore: true });
pump(
gulp.src([src + '/**', '!' + src + '/**/*.map']),
jsFilter,
sourcemaps.init({ loadMaps: true }),
uglifyWithCopyrights(),
jsFilter.restore,
cssFilter,
minifyCSS({ reduceIdents: false }),
cssFilter.restore,
sourcemaps.write('./', {
sourceMappingURL,
sourceRoot: null,
includeContent: true,
addComment: true
}),
gulp.dest(src + '-min')
, (err: any) => {
if (err instanceof uglify.GulpUglifyError) {
console.error(`Uglify error in '${err.cause && err.cause.filename}'`);
}
cb(err);
});
};
};

83
build/lib/reporter.js Normal file
View File

@@ -0,0 +1,83 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
var es = require("event-stream");
var _ = require("underscore");
var util = require("gulp-util");
var fs = require("fs");
var path = require("path");
var allErrors = [];
var startTime = null;
var count = 0;
function onStart() {
if (count++ > 0) {
return;
}
startTime = new Date().getTime();
util.log("Starting " + util.colors.green('compilation') + "...");
}
function onEnd() {
if (--count > 0) {
return;
}
log();
}
var buildLogPath = path.join(path.dirname(path.dirname(__dirname)), '.build', 'log');
try {
fs.mkdirSync(path.dirname(buildLogPath));
}
catch (err) {
// ignore
}
function log() {
var errors = _.flatten(allErrors);
errors.map(function (err) { return util.log(util.colors.red('Error') + ": " + err); });
var regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
var messages = errors
.map(function (err) { return regex.exec(err); })
.filter(function (match) { return !!match; })
.map(function (_a) {
var path = _a[1], line = _a[2], column = _a[3], message = _a[4];
return ({ path: path, line: parseInt(line), column: parseInt(column), message: message });
});
try {
fs.writeFileSync(buildLogPath, JSON.stringify(messages));
}
catch (err) {
//noop
}
util.log("Finished " + util.colors.green('compilation') + " with " + errors.length + " errors after " + util.colors.magenta((new Date().getTime() - startTime) + ' ms'));
}
function createReporter() {
var errors = [];
allErrors.push(errors);
var ReportFunc = (function () {
function ReportFunc(err) {
errors.push(err);
}
ReportFunc.hasErrors = function () {
return errors.length > 0;
};
ReportFunc.end = function (emitError) {
errors.length = 0;
onStart();
return es.through(null, function () {
onEnd();
if (emitError && errors.length > 0) {
log();
this.emit('error');
}
else {
this.emit('end');
}
});
};
return ReportFunc;
}());
return ReportFunc;
}
exports.createReporter = createReporter;
;

100
build/lib/reporter.ts Normal file
View File

@@ -0,0 +1,100 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as es from 'event-stream';
import * as _ from 'underscore';
import * as util from 'gulp-util';
import * as fs from 'fs';
import * as path from 'path';
const allErrors: Error[][] = [];
let startTime: number = null;
let count = 0;
function onStart(): void {
if (count++ > 0) {
return;
}
startTime = new Date().getTime();
util.log(`Starting ${util.colors.green('compilation')}...`);
}
function onEnd(): void {
if (--count > 0) {
return;
}
log();
}
const buildLogPath = path.join(path.dirname(path.dirname(__dirname)), '.build', 'log');
try {
fs.mkdirSync(path.dirname(buildLogPath));
} catch (err) {
// ignore
}
function log(): void {
const errors = _.flatten(allErrors);
errors.map(err => util.log(`${util.colors.red('Error')}: ${err}`));
const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
const messages = errors
.map(err => regex.exec(err))
.filter(match => !!match)
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message }));
try {
fs.writeFileSync(buildLogPath, JSON.stringify(messages));
} catch (err) {
//noop
}
util.log(`Finished ${util.colors.green('compilation')} with ${errors.length} errors after ${util.colors.magenta((new Date().getTime() - startTime) + ' ms')}`);
}
export interface IReporter {
(err: Error): void;
hasErrors(): boolean;
end(emitError: boolean): NodeJS.ReadWriteStream;
}
export function createReporter(): IReporter {
const errors: Error[] = [];
allErrors.push(errors);
class ReportFunc {
constructor(err: Error) {
errors.push(err);
}
static hasErrors(): boolean {
return errors.length > 0;
}
static end(emitError: boolean): NodeJS.ReadWriteStream {
errors.length = 0;
onStart();
return es.through(null, function () {
onEnd();
if (emitError && errors.length > 0) {
log();
this.emit('error');
} else {
this.emit('end');
}
});
}
}
return <IReporter><any>ReportFunc;
};

View File

@@ -0,0 +1,39 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
var snaps;
(function (snaps) {
var fs = require('fs');
var path = require('path');
var os = require('os');
var cp = require('child_process');
var mksnapshot = path.join(__dirname, "../../node_modules/.bin/" + (process.platform === 'win32' ? 'mksnapshot.cmd' : 'mksnapshot'));
var product = require('../../product.json');
var arch = (process.argv.join('').match(/--arch=(.*)/) || [])[1];
//
var loaderFilepath;
var startupBlobFilepath;
switch (process.platform) {
case 'darwin':
loaderFilepath = "VSCode-darwin/" + product.nameLong + ".app/Contents/Resources/app/out/vs/loader.js";
startupBlobFilepath = "VSCode-darwin/" + product.nameLong + ".app/Contents/Frameworks/Electron Framework.framework/Resources/snapshot_blob.bin";
break;
case 'win32':
case 'linux':
loaderFilepath = "VSCode-" + process.platform + "-" + arch + "/resources/app/out/vs/loader.js";
startupBlobFilepath = "VSCode-" + process.platform + "-" + arch + "/snapshot_blob.bin";
}
loaderFilepath = path.join(__dirname, '../../../', loaderFilepath);
startupBlobFilepath = path.join(__dirname, '../../../', startupBlobFilepath);
snapshotLoader(loaderFilepath, startupBlobFilepath);
function snapshotLoader(loaderFilepath, startupBlobFilepath) {
var inputFile = fs.readFileSync(loaderFilepath);
var wrappedInputFile = "\n\t\tvar Monaco_Loader_Init;\n\t\t(function() {\n\t\t\tvar doNotInitLoader = true;\n\t\t\t" + inputFile.toString() + ";\n\t\t\tMonaco_Loader_Init = function() {\n\t\t\t\tAMDLoader.init();\n\t\t\t\tCSSLoaderPlugin.init();\n\t\t\t\tNLSLoaderPlugin.init();\n\n\t\t\t\treturn { define, require };\n\t\t\t}\n\t\t})();\n\t\t";
var wrappedInputFilepath = path.join(os.tmpdir(), 'wrapped-loader.js');
console.log(wrappedInputFilepath);
fs.writeFileSync(wrappedInputFilepath, wrappedInputFile);
cp.execFileSync(mksnapshot, [wrappedInputFilepath, "--startup_blob", startupBlobFilepath]);
}
})(snaps || (snaps = {}));

View File

@@ -0,0 +1,63 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
namespace snaps {
const fs = require('fs');
const path = require('path');
const os = require('os');
const cp = require('child_process');
const mksnapshot = path.join(__dirname, `../../node_modules/.bin/${process.platform === 'win32' ? 'mksnapshot.cmd' : 'mksnapshot'}`);
const product = require('../../product.json');
const arch = (process.argv.join('').match(/--arch=(.*)/) || [])[1];
//
let loaderFilepath: string;
let startupBlobFilepath: string;
switch (process.platform) {
case 'darwin':
loaderFilepath = `VSCode-darwin/${product.nameLong}.app/Contents/Resources/app/out/vs/loader.js`;
startupBlobFilepath = `VSCode-darwin/${product.nameLong}.app/Contents/Frameworks/Electron Framework.framework/Resources/snapshot_blob.bin`;
break;
case 'win32':
case 'linux':
loaderFilepath = `VSCode-${process.platform}-${arch}/resources/app/out/vs/loader.js`;
startupBlobFilepath = `VSCode-${process.platform}-${arch}/snapshot_blob.bin`;
}
loaderFilepath = path.join(__dirname, '../../../', loaderFilepath);
startupBlobFilepath = path.join(__dirname, '../../../', startupBlobFilepath);
snapshotLoader(loaderFilepath, startupBlobFilepath);
function snapshotLoader(loaderFilepath: string, startupBlobFilepath: string): void {
const inputFile = fs.readFileSync(loaderFilepath);
const wrappedInputFile = `
var Monaco_Loader_Init;
(function() {
var doNotInitLoader = true;
${inputFile.toString()};
Monaco_Loader_Init = function() {
AMDLoader.init();
CSSLoaderPlugin.init();
NLSLoaderPlugin.init();
return { define, require };
}
})();
`;
const wrappedInputFilepath = path.join(os.tmpdir(), 'wrapped-loader.js');
console.log(wrappedInputFilepath);
fs.writeFileSync(wrappedInputFilepath, wrappedInputFile);
cp.execFileSync(mksnapshot, [wrappedInputFilepath, `--startup_blob`, startupBlobFilepath]);
}
}

View File

@@ -0,0 +1,40 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
var assert = require("assert");
var i18n = require("../i18n");
suite('XLF Parser Tests', function () {
var sampleXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &amp;</source></trans-unit></body></file></xliff>';
var sampleTranslatedXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" target-language="ru" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source><target>Кнопка #1</target></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &amp;</source><target>Кнопка #2 &amp;</target></trans-unit></body></file></xliff>';
var originalFilePath = 'vs/base/common/keybinding';
var keys = ['key1', 'key2'];
var messages = ['Key #1', 'Key #2 &'];
var translatedMessages = { key1: 'Кнопка #1', key2: 'Кнопка #2 &' };
test('Keys & messages to XLF conversion', function () {
var xlf = new i18n.XLF('vscode-workbench');
xlf.addFile(originalFilePath, keys, messages);
var xlfString = xlf.toString();
assert.strictEqual(xlfString.replace(/\s{2,}/g, ''), sampleXlf);
});
test('XLF to keys & messages conversion', function () {
i18n.XLF.parse(sampleTranslatedXlf).then(function (resolvedFiles) {
assert.deepEqual(resolvedFiles[0].messages, translatedMessages);
assert.strictEqual(resolvedFiles[0].originalFilePath, originalFilePath);
});
});
test('JSON file source path to Transifex resource match', function () {
var editorProject = 'vscode-editor', workbenchProject = 'vscode-workbench';
var platform = { name: 'vs/platform', project: editorProject }, editorContrib = { name: 'vs/editor/contrib', project: editorProject }, editor = { name: 'vs/editor', project: editorProject }, base = { name: 'vs/base', project: editorProject }, code = { name: 'vs/code', project: workbenchProject }, workbenchParts = { name: 'vs/workbench/parts/html', project: workbenchProject }, workbenchServices = { name: 'vs/workbench/services/files', project: workbenchProject }, workbench = { name: 'vs/workbench', project: workbenchProject };
assert.deepEqual(i18n.getResource('vs/platform/actions/browser/menusExtensionPoint'), platform);
assert.deepEqual(i18n.getResource('vs/editor/contrib/clipboard/browser/clipboard'), editorContrib);
assert.deepEqual(i18n.getResource('vs/editor/common/modes/modesRegistry'), editor);
assert.deepEqual(i18n.getResource('vs/base/common/errorMessage'), base);
assert.deepEqual(i18n.getResource('vs/code/electron-main/window'), code);
assert.deepEqual(i18n.getResource('vs/workbench/parts/html/browser/webview'), workbenchParts);
assert.deepEqual(i18n.getResource('vs/workbench/services/files/node/fileService'), workbenchServices);
assert.deepEqual(i18n.getResource('vs/workbench/browser/parts/panel/panelActions'), workbench);
});
});

View File

@@ -0,0 +1,54 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import assert = require('assert');
import i18n = require('../i18n');
suite('XLF Parser Tests', () => {
const sampleXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &amp;</source></trans-unit></body></file></xliff>';
const sampleTranslatedXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" target-language="ru" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source><target>Кнопка #1</target></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &amp;</source><target>Кнопка #2 &amp;</target></trans-unit></body></file></xliff>';
const originalFilePath = 'vs/base/common/keybinding';
const keys = ['key1', 'key2'];
const messages = ['Key #1', 'Key #2 &'];
const translatedMessages = { key1: 'Кнопка #1', key2: 'Кнопка #2 &' };
test('Keys & messages to XLF conversion', () => {
let xlf = new i18n.XLF('vscode-workbench');
xlf.addFile(originalFilePath, keys, messages);
const xlfString = xlf.toString();
assert.strictEqual(xlfString.replace(/\s{2,}/g, ''), sampleXlf);
});
test('XLF to keys & messages conversion', () => {
i18n.XLF.parse(sampleTranslatedXlf).then(function(resolvedFiles) {
assert.deepEqual(resolvedFiles[0].messages, translatedMessages);
assert.strictEqual(resolvedFiles[0].originalFilePath, originalFilePath);
});
});
test('JSON file source path to Transifex resource match', () => {
const editorProject: string = 'vscode-editor',
workbenchProject: string = 'vscode-workbench';
const platform: i18n.Resource = { name: 'vs/platform', project: editorProject },
editorContrib = { name: 'vs/editor/contrib', project: editorProject },
editor = { name: 'vs/editor', project: editorProject },
base = { name: 'vs/base', project: editorProject },
code = { name: 'vs/code', project: workbenchProject },
workbenchParts = { name: 'vs/workbench/parts/html', project: workbenchProject },
workbenchServices = { name: 'vs/workbench/services/files', project: workbenchProject },
workbench = { name: 'vs/workbench', project: workbenchProject};
assert.deepEqual(i18n.getResource('vs/platform/actions/browser/menusExtensionPoint'), platform);
assert.deepEqual(i18n.getResource('vs/editor/contrib/clipboard/browser/clipboard'), editorContrib);
assert.deepEqual(i18n.getResource('vs/editor/common/modes/modesRegistry'), editor);
assert.deepEqual(i18n.getResource('vs/base/common/errorMessage'), base);
assert.deepEqual(i18n.getResource('vs/code/electron-main/window'), code);
assert.deepEqual(i18n.getResource('vs/workbench/parts/html/browser/webview'), workbenchParts);
assert.deepEqual(i18n.getResource('vs/workbench/services/files/node/fileService'), workbenchServices);
assert.deepEqual(i18n.getResource('vs/workbench/browser/parts/panel/panelActions'), workbench);
});
});

View File

@@ -0,0 +1,59 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var ts = require("typescript");
var Lint = require("tslint");
var Rule = (function (_super) {
__extends(Rule, _super);
function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
var allowed = this.getOptions().ruleArguments[0];
return this.applyWithWalker(new AsyncRuleWalker(sourceFile, this.getOptions(), allowed));
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule;
var AsyncRuleWalker = (function (_super) {
__extends(AsyncRuleWalker, _super);
function AsyncRuleWalker(file, opts, allowed) {
var _this = _super.call(this, file, opts) || this;
_this.allowed = allowed;
return _this;
}
AsyncRuleWalker.prototype.visitMethodDeclaration = function (node) {
this.visitFunctionLikeDeclaration(node);
};
AsyncRuleWalker.prototype.visitFunctionDeclaration = function (node) {
this.visitFunctionLikeDeclaration(node);
};
AsyncRuleWalker.prototype.visitFunctionLikeDeclaration = function (node) {
var _this = this;
var flags = ts.getCombinedModifierFlags(node);
if (!(flags & ts.ModifierFlags.Async)) {
return;
}
var path = node.getSourceFile().path;
var pathParts = path.split(/\\|\//);
if (pathParts.some(function (part) { return _this.allowed.some(function (allowed) { return part === allowed; }); })) {
return;
}
var message = "You are not allowed to use async function in this layer. Allowed layers are: [" + this.allowed + "]";
this.addFailureAtNode(node, message);
};
return AsyncRuleWalker;
}(Lint.RuleWalker));

View File

@@ -0,0 +1,47 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
export class Rule extends Lint.Rules.AbstractRule {
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
const allowed = this.getOptions().ruleArguments[0] as string[];
return this.applyWithWalker(new AsyncRuleWalker(sourceFile, this.getOptions(), allowed));
}
}
class AsyncRuleWalker extends Lint.RuleWalker {
constructor(file: ts.SourceFile, opts: Lint.IOptions, private allowed: string[]) {
super(file, opts);
}
protected visitMethodDeclaration(node: ts.MethodDeclaration): void {
this.visitFunctionLikeDeclaration(node);
}
protected visitFunctionDeclaration(node: ts.FunctionDeclaration): void {
this.visitFunctionLikeDeclaration(node);
}
private visitFunctionLikeDeclaration(node: ts.FunctionLikeDeclaration) {
const flags = ts.getCombinedModifierFlags(node);
if (!(flags & ts.ModifierFlags.Async)) {
return;
}
const path = (node.getSourceFile() as any).path;
const pathParts = path.split(/\\|\//);
if (pathParts.some(part => this.allowed.some(allowed => part === allowed))) {
return;
}
const message = `You are not allowed to use async function in this layer. Allowed layers are: [${this.allowed}]`;
this.addFailureAtNode(node, message);
}
}

View File

@@ -0,0 +1,50 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var path_1 = require("path");
var Lint = require("tslint");
var Rule = (function (_super) {
__extends(Rule, _super);
function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
return this.applyWithWalker(new ImportPatterns(sourceFile, this.getOptions()));
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule;
var ImportPatterns = (function (_super) {
__extends(ImportPatterns, _super);
function ImportPatterns(file, opts) {
var _this = _super.call(this, file, opts) || this;
_this.imports = Object.create(null);
return _this;
}
ImportPatterns.prototype.visitImportDeclaration = function (node) {
var path = node.moduleSpecifier.getText();
// remove quotes
path = path.slice(1, -1);
if (path[0] === '.') {
path = path_1.join(path_1.dirname(node.getSourceFile().fileName), path);
}
if (this.imports[path]) {
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Duplicate imports for '" + path + "'."));
}
this.imports[path] = true;
};
return ImportPatterns;
}(Lint.RuleWalker));

View File

@@ -0,0 +1,40 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import { join, dirname } from 'path';
import * as Lint from 'tslint';
export class Rule extends Lint.Rules.AbstractRule {
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
return this.applyWithWalker(new ImportPatterns(sourceFile, this.getOptions()));
}
}
class ImportPatterns extends Lint.RuleWalker {
private imports: { [path: string]: boolean; } = Object.create(null);
constructor(file: ts.SourceFile, opts: Lint.IOptions) {
super(file, opts);
}
protected visitImportDeclaration(node: ts.ImportDeclaration): void {
let path = node.moduleSpecifier.getText();
// remove quotes
path = path.slice(1, -1);
if (path[0] === '.') {
path = join(dirname(node.getSourceFile().fileName), path);
}
if (this.imports[path]) {
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Duplicate imports for '${path}'.`));
}
this.imports[path] = true;
}
}

View File

@@ -0,0 +1,90 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var ts = require("typescript");
var Lint = require("tslint");
var minimatch = require("minimatch");
var path_1 = require("path");
var Rule = (function (_super) {
__extends(Rule, _super);
function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
var configs = this.getOptions().ruleArguments;
for (var _i = 0, configs_1 = configs; _i < configs_1.length; _i++) {
var config = configs_1[_i];
if (minimatch(sourceFile.fileName, config.target)) {
return this.applyWithWalker(new ImportPatterns(sourceFile, this.getOptions(), config));
}
}
return [];
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule;
var ImportPatterns = (function (_super) {
__extends(ImportPatterns, _super);
function ImportPatterns(file, opts, _config) {
var _this = _super.call(this, file, opts) || this;
_this._config = _config;
return _this;
}
ImportPatterns.prototype.visitImportEqualsDeclaration = function (node) {
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
this._validateImport(node.moduleReference.expression.getText(), node);
}
};
ImportPatterns.prototype.visitImportDeclaration = function (node) {
this._validateImport(node.moduleSpecifier.getText(), node);
};
ImportPatterns.prototype.visitCallExpression = function (node) {
_super.prototype.visitCallExpression.call(this, node);
// import('foo') statements inside the code
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
var path = node.arguments[0];
this._validateImport(path.getText(), node);
}
};
ImportPatterns.prototype._validateImport = function (path, node) {
// remove quotes
path = path.slice(1, -1);
// resolve relative paths
if (path[0] === '.') {
path = path_1.join(this.getSourceFile().fileName, path);
}
var restrictions;
if (typeof this._config.restrictions === 'string') {
restrictions = [this._config.restrictions];
}
else {
restrictions = this._config.restrictions;
}
var matched = false;
for (var _i = 0, restrictions_1 = restrictions; _i < restrictions_1.length; _i++) {
var pattern = restrictions_1[_i];
if (minimatch(path, pattern)) {
matched = true;
break;
}
}
if (!matched) {
// None of the restrictions matched
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Imports violates '" + restrictions.join(' or ') + "' restrictions. See https://github.com/Microsoft/vscode/wiki/Code-Organization"));
}
};
return ImportPatterns;
}(Lint.RuleWalker));

View File

@@ -0,0 +1,87 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
import * as minimatch from 'minimatch';
import { join } from 'path';
interface ImportPatternsConfig {
target: string;
restrictions: string | string[];
}
export class Rule extends Lint.Rules.AbstractRule {
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
const configs = <ImportPatternsConfig[]>this.getOptions().ruleArguments;
for (const config of configs) {
if (minimatch(sourceFile.fileName, config.target)) {
return this.applyWithWalker(new ImportPatterns(sourceFile, this.getOptions(), config));
}
}
return [];
}
}
class ImportPatterns extends Lint.RuleWalker {
constructor(file: ts.SourceFile, opts: Lint.IOptions, private _config: ImportPatternsConfig) {
super(file, opts);
}
protected visitImportEqualsDeclaration(node: ts.ImportEqualsDeclaration): void {
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
this._validateImport(node.moduleReference.expression.getText(), node);
}
}
protected visitImportDeclaration(node: ts.ImportDeclaration): void {
this._validateImport(node.moduleSpecifier.getText(), node);
}
protected visitCallExpression(node: ts.CallExpression): void {
super.visitCallExpression(node);
// import('foo') statements inside the code
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
const [path] = node.arguments;
this._validateImport(path.getText(), node);
}
}
private _validateImport(path: string, node: ts.Node): void {
// remove quotes
path = path.slice(1, -1);
// resolve relative paths
if (path[0] === '.') {
path = join(this.getSourceFile().fileName, path);
}
let restrictions: string[];
if (typeof this._config.restrictions === 'string') {
restrictions = [this._config.restrictions];
} else {
restrictions = this._config.restrictions;
}
let matched = false;
for (const pattern of restrictions) {
if (minimatch(path, pattern)) {
matched = true;
break;
}
}
if (!matched) {
// None of the restrictions matched
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Imports violates '${restrictions.join(' or ')}' restrictions. See https://github.com/Microsoft/vscode/wiki/Code-Organization`));
}
}
}

View File

@@ -0,0 +1,101 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var ts = require("typescript");
var Lint = require("tslint");
var path_1 = require("path");
var Rule = (function (_super) {
__extends(Rule, _super);
function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
var parts = path_1.dirname(sourceFile.fileName).split(/\\|\//);
var ruleArgs = this.getOptions().ruleArguments[0];
var config;
for (var i = parts.length - 1; i >= 0; i--) {
if (ruleArgs[parts[i]]) {
config = {
allowed: new Set(ruleArgs[parts[i]]).add(parts[i]),
disallowed: new Set()
};
Object.keys(ruleArgs).forEach(function (key) {
if (!config.allowed.has(key)) {
config.disallowed.add(key);
}
});
break;
}
}
if (!config) {
return [];
}
return this.applyWithWalker(new LayeringRule(sourceFile, config, this.getOptions()));
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule;
var LayeringRule = (function (_super) {
__extends(LayeringRule, _super);
function LayeringRule(file, config, opts) {
var _this = _super.call(this, file, opts) || this;
_this._config = config;
return _this;
}
LayeringRule.prototype.visitImportEqualsDeclaration = function (node) {
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
this._validateImport(node.moduleReference.expression.getText(), node);
}
};
LayeringRule.prototype.visitImportDeclaration = function (node) {
this._validateImport(node.moduleSpecifier.getText(), node);
};
LayeringRule.prototype.visitCallExpression = function (node) {
_super.prototype.visitCallExpression.call(this, node);
// import('foo') statements inside the code
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
var path = node.arguments[0];
this._validateImport(path.getText(), node);
}
};
LayeringRule.prototype._validateImport = function (path, node) {
// remove quotes
path = path.slice(1, -1);
if (path[0] === '.') {
path = path_1.join(path_1.dirname(node.getSourceFile().fileName), path);
}
var parts = path_1.dirname(path).split(/\\|\//);
for (var i = parts.length - 1; i >= 0; i--) {
var part = parts[i];
if (this._config.allowed.has(part)) {
// GOOD - same layer
return;
}
if (this._config.disallowed.has(part)) {
// BAD - wrong layer
var message = "Bad layering. You are not allowed to access '" + part + "' from here, allowed layers are: [" + LayeringRule._print(this._config.allowed) + "]";
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), message));
return;
}
}
};
LayeringRule._print = function (set) {
var r = [];
set.forEach(function (e) { return r.push(e); });
return r.join(', ');
};
return LayeringRule;
}(Lint.RuleWalker));

View File

@@ -0,0 +1,105 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
import { join, dirname } from 'path';
interface Config {
allowed: Set<string>;
disallowed: Set<string>;
}
export class Rule extends Lint.Rules.AbstractRule {
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
const parts = dirname(sourceFile.fileName).split(/\\|\//);
let ruleArgs = this.getOptions().ruleArguments[0];
let config: Config;
for (let i = parts.length - 1; i >= 0; i--) {
if (ruleArgs[parts[i]]) {
config = {
allowed: new Set<string>(<string[]>ruleArgs[parts[i]]).add(parts[i]),
disallowed: new Set<string>()
};
Object.keys(ruleArgs).forEach(key => {
if (!config.allowed.has(key)) {
config.disallowed.add(key);
}
});
break;
}
}
if (!config) {
return [];
}
return this.applyWithWalker(new LayeringRule(sourceFile, config, this.getOptions()));
}
}
class LayeringRule extends Lint.RuleWalker {
private _config: Config;
constructor(file: ts.SourceFile, config: Config, opts: Lint.IOptions) {
super(file, opts);
this._config = config;
}
protected visitImportEqualsDeclaration(node: ts.ImportEqualsDeclaration): void {
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
this._validateImport(node.moduleReference.expression.getText(), node);
}
}
protected visitImportDeclaration(node: ts.ImportDeclaration): void {
this._validateImport(node.moduleSpecifier.getText(), node);
}
protected visitCallExpression(node: ts.CallExpression): void {
super.visitCallExpression(node);
// import('foo') statements inside the code
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
const [path] = node.arguments;
this._validateImport(path.getText(), node);
}
}
private _validateImport(path: string, node: ts.Node): void {
// remove quotes
path = path.slice(1, -1);
if (path[0] === '.') {
path = join(dirname(node.getSourceFile().fileName), path);
}
const parts = dirname(path).split(/\\|\//);
for (let i = parts.length - 1; i >= 0; i--) {
const part = parts[i];
if (this._config.allowed.has(part)) {
// GOOD - same layer
return;
}
if (this._config.disallowed.has(part)) {
// BAD - wrong layer
const message = `Bad layering. You are not allowed to access '${part}' from here, allowed layers are: [${LayeringRule._print(this._config.allowed)}]`;
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), message));
return;
}
}
}
static _print(set: Set<string>): string {
let r: string[] = [];
set.forEach(e => r.push(e));
return r.join(', ');
}
}

View File

@@ -0,0 +1,182 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var ts = require("typescript");
var Lint = require("tslint");
/**
* Implementation of the no-unexternalized-strings rule.
*/
var Rule = (function (_super) {
__extends(Rule, _super);
function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
return this.applyWithWalker(new NoUnexternalizedStringsRuleWalker(sourceFile, this.getOptions()));
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule;
function isStringLiteral(node) {
return node && node.kind === ts.SyntaxKind.StringLiteral;
}
function isObjectLiteral(node) {
return node && node.kind === ts.SyntaxKind.ObjectLiteralExpression;
}
function isPropertyAssignment(node) {
return node && node.kind === ts.SyntaxKind.PropertyAssignment;
}
var NoUnexternalizedStringsRuleWalker = (function (_super) {
__extends(NoUnexternalizedStringsRuleWalker, _super);
function NoUnexternalizedStringsRuleWalker(file, opts) {
var _this = _super.call(this, file, opts) || this;
_this.signatures = Object.create(null);
_this.ignores = Object.create(null);
_this.messageIndex = undefined;
_this.keyIndex = undefined;
_this.usedKeys = Object.create(null);
var options = _this.getOptions();
var first = options && options.length > 0 ? options[0] : null;
if (first) {
if (Array.isArray(first.signatures)) {
first.signatures.forEach(function (signature) { return _this.signatures[signature] = true; });
}
if (Array.isArray(first.ignores)) {
first.ignores.forEach(function (ignore) { return _this.ignores[ignore] = true; });
}
if (typeof first.messageIndex !== 'undefined') {
_this.messageIndex = first.messageIndex;
}
if (typeof first.keyIndex !== 'undefined') {
_this.keyIndex = first.keyIndex;
}
}
return _this;
}
NoUnexternalizedStringsRuleWalker.prototype.visitSourceFile = function (node) {
var _this = this;
_super.prototype.visitSourceFile.call(this, node);
Object.keys(this.usedKeys).forEach(function (key) {
var occurrences = _this.usedKeys[key];
if (occurrences.length > 1) {
occurrences.forEach(function (occurrence) {
_this.addFailure((_this.createFailure(occurrence.key.getStart(), occurrence.key.getWidth(), "Duplicate key " + occurrence.key.getText() + " with different message value.")));
});
}
});
};
NoUnexternalizedStringsRuleWalker.prototype.visitStringLiteral = function (node) {
this.checkStringLiteral(node);
_super.prototype.visitStringLiteral.call(this, node);
};
NoUnexternalizedStringsRuleWalker.prototype.checkStringLiteral = function (node) {
var text = node.getText();
var doubleQuoted = text.length >= 2 && text[0] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE && text[text.length - 1] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE;
var info = this.findDescribingParent(node);
// Ignore strings in import and export nodes.
if (info && info.isImport && doubleQuoted) {
this.addFailureAtNode(node, NoUnexternalizedStringsRuleWalker.ImportFailureMessage, new Lint.Fix(NoUnexternalizedStringsRuleWalker.ImportFailureMessage, [
this.createReplacement(node.getStart(), 1, '\''),
this.createReplacement(node.getStart() + text.length - 1, 1, '\''),
]));
return;
}
var callInfo = info ? info.callInfo : null;
var functionName = callInfo ? callInfo.callExpression.expression.getText() : null;
if (functionName && this.ignores[functionName]) {
return;
}
if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) {
var s = node.getText();
var replacement = new Lint.Replacement(node.getStart(), node.getWidth(), "nls.localize('KEY-" + s.substring(1, s.length - 1) + "', " + s + ")");
var fix = new Lint.Fix('Unexternalitzed string', [replacement]);
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Unexternalized string found: " + node.getText(), fix));
return;
}
// We have a single quoted string outside a localize function name.
if (!doubleQuoted && !this.signatures[functionName]) {
return;
}
// We have a string that is a direct argument into the localize call.
var keyArg = callInfo.argIndex === this.keyIndex
? callInfo.callExpression.arguments[this.keyIndex]
: null;
if (keyArg) {
if (isStringLiteral(keyArg)) {
this.recordKey(keyArg, this.messageIndex ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
}
else if (isObjectLiteral(keyArg)) {
for (var i = 0; i < keyArg.properties.length; i++) {
var property = keyArg.properties[i];
if (isPropertyAssignment(property)) {
var name_1 = property.name.getText();
if (name_1 === 'key') {
var initializer = property.initializer;
if (isStringLiteral(initializer)) {
this.recordKey(initializer, this.messageIndex ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
}
break;
}
}
}
}
}
var messageArg = callInfo.argIndex === this.messageIndex
? callInfo.callExpression.arguments[this.messageIndex]
: null;
if (messageArg && messageArg !== node) {
this.addFailure(this.createFailure(messageArg.getStart(), messageArg.getWidth(), "Message argument to '" + callInfo.callExpression.expression.getText() + "' must be a string literal."));
return;
}
};
NoUnexternalizedStringsRuleWalker.prototype.recordKey = function (keyNode, messageNode) {
var text = keyNode.getText();
var occurrences = this.usedKeys[text];
if (!occurrences) {
occurrences = [];
this.usedKeys[text] = occurrences;
}
if (messageNode) {
if (occurrences.some(function (pair) { return pair.message ? pair.message.getText() === messageNode.getText() : false; })) {
return;
}
}
occurrences.push({ key: keyNode, message: messageNode });
};
NoUnexternalizedStringsRuleWalker.prototype.findDescribingParent = function (node) {
var parent;
while ((parent = node.parent)) {
var kind = parent.kind;
if (kind === ts.SyntaxKind.CallExpression) {
var callExpression = parent;
return { callInfo: { callExpression: callExpression, argIndex: callExpression.arguments.indexOf(node) } };
}
else if (kind === ts.SyntaxKind.ImportEqualsDeclaration || kind === ts.SyntaxKind.ImportDeclaration || kind === ts.SyntaxKind.ExportDeclaration) {
return { isImport: true };
}
else if (kind === ts.SyntaxKind.VariableDeclaration || kind === ts.SyntaxKind.FunctionDeclaration || kind === ts.SyntaxKind.PropertyDeclaration
|| kind === ts.SyntaxKind.MethodDeclaration || kind === ts.SyntaxKind.VariableDeclarationList || kind === ts.SyntaxKind.InterfaceDeclaration
|| kind === ts.SyntaxKind.ClassDeclaration || kind === ts.SyntaxKind.EnumDeclaration || kind === ts.SyntaxKind.ModuleDeclaration
|| kind === ts.SyntaxKind.TypeAliasDeclaration || kind === ts.SyntaxKind.SourceFile) {
return null;
}
node = parent;
}
};
NoUnexternalizedStringsRuleWalker.ImportFailureMessage = 'Do not use double qoutes for imports.';
NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE = '"';
return NoUnexternalizedStringsRuleWalker;
}(Lint.RuleWalker));

View File

@@ -0,0 +1,201 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
/**
* Implementation of the no-unexternalized-strings rule.
*/
export class Rule extends Lint.Rules.AbstractRule {
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
return this.applyWithWalker(new NoUnexternalizedStringsRuleWalker(sourceFile, this.getOptions()));
}
}
interface Map<V> {
[key: string]: V;
}
interface UnexternalizedStringsOptions {
signatures?: string[];
messageIndex?: number;
keyIndex?: number;
ignores?: string[];
}
function isStringLiteral(node: ts.Node): node is ts.StringLiteral {
return node && node.kind === ts.SyntaxKind.StringLiteral;
}
function isObjectLiteral(node: ts.Node): node is ts.ObjectLiteralExpression {
return node && node.kind === ts.SyntaxKind.ObjectLiteralExpression;
}
function isPropertyAssignment(node: ts.Node): node is ts.PropertyAssignment {
return node && node.kind === ts.SyntaxKind.PropertyAssignment;
}
interface KeyMessagePair {
key: ts.StringLiteral;
message: ts.Node;
}
class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
private static ImportFailureMessage = 'Do not use double qoutes for imports.';
private static DOUBLE_QUOTE: string = '"';
private signatures: Map<boolean>;
private messageIndex: number;
private keyIndex: number;
private ignores: Map<boolean>;
private usedKeys: Map<KeyMessagePair[]>;
constructor(file: ts.SourceFile, opts: Lint.IOptions) {
super(file, opts);
this.signatures = Object.create(null);
this.ignores = Object.create(null);
this.messageIndex = undefined;
this.keyIndex = undefined;
this.usedKeys = Object.create(null);
let options: any[] = this.getOptions();
let first: UnexternalizedStringsOptions = options && options.length > 0 ? options[0] : null;
if (first) {
if (Array.isArray(first.signatures)) {
first.signatures.forEach((signature: string) => this.signatures[signature] = true);
}
if (Array.isArray(first.ignores)) {
first.ignores.forEach((ignore: string) => this.ignores[ignore] = true);
}
if (typeof first.messageIndex !== 'undefined') {
this.messageIndex = first.messageIndex;
}
if (typeof first.keyIndex !== 'undefined') {
this.keyIndex = first.keyIndex;
}
}
}
protected visitSourceFile(node: ts.SourceFile): void {
super.visitSourceFile(node);
Object.keys(this.usedKeys).forEach(key => {
let occurrences = this.usedKeys[key];
if (occurrences.length > 1) {
occurrences.forEach(occurrence => {
this.addFailure((this.createFailure(occurrence.key.getStart(), occurrence.key.getWidth(), `Duplicate key ${occurrence.key.getText()} with different message value.`)));
});
}
});
}
protected visitStringLiteral(node: ts.StringLiteral): void {
this.checkStringLiteral(node);
super.visitStringLiteral(node);
}
private checkStringLiteral(node: ts.StringLiteral): void {
let text = node.getText();
let doubleQuoted = text.length >= 2 && text[0] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE && text[text.length - 1] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE;
let info = this.findDescribingParent(node);
// Ignore strings in import and export nodes.
if (info && info.isImport && doubleQuoted) {
this.addFailureAtNode(
node,
NoUnexternalizedStringsRuleWalker.ImportFailureMessage,
new Lint.Fix(NoUnexternalizedStringsRuleWalker.ImportFailureMessage, [
this.createReplacement(node.getStart(), 1, '\''),
this.createReplacement(node.getStart() + text.length - 1, 1, '\''),
])
);
return;
}
let callInfo = info ? info.callInfo : null;
let functionName = callInfo ? callInfo.callExpression.expression.getText() : null;
if (functionName && this.ignores[functionName]) {
return;
}
if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) {
const s = node.getText();
const replacement = new Lint.Replacement(node.getStart(), node.getWidth(), `nls.localize('KEY-${s.substring(1, s.length - 1)}', ${s})`);
const fix = new Lint.Fix('Unexternalitzed string', [replacement]);
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Unexternalized string found: ${node.getText()}`, fix));
return;
}
// We have a single quoted string outside a localize function name.
if (!doubleQuoted && !this.signatures[functionName]) {
return;
}
// We have a string that is a direct argument into the localize call.
let keyArg: ts.Expression = callInfo.argIndex === this.keyIndex
? callInfo.callExpression.arguments[this.keyIndex]
: null;
if (keyArg) {
if (isStringLiteral(keyArg)) {
this.recordKey(keyArg, this.messageIndex ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
} else if (isObjectLiteral(keyArg)) {
for (let i = 0; i < keyArg.properties.length; i++) {
let property = keyArg.properties[i];
if (isPropertyAssignment(property)) {
let name = property.name.getText();
if (name === 'key') {
let initializer = property.initializer;
if (isStringLiteral(initializer)) {
this.recordKey(initializer, this.messageIndex ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
}
break;
}
}
}
}
}
let messageArg: ts.Expression = callInfo.argIndex === this.messageIndex
? callInfo.callExpression.arguments[this.messageIndex]
: null;
if (messageArg && messageArg !== node) {
this.addFailure(this.createFailure(
messageArg.getStart(), messageArg.getWidth(),
`Message argument to '${callInfo.callExpression.expression.getText()}' must be a string literal.`));
return;
}
}
private recordKey(keyNode: ts.StringLiteral, messageNode: ts.Node) {
let text = keyNode.getText();
let occurrences: KeyMessagePair[] = this.usedKeys[text];
if (!occurrences) {
occurrences = [];
this.usedKeys[text] = occurrences;
}
if (messageNode) {
if (occurrences.some(pair => pair.message ? pair.message.getText() === messageNode.getText() : false)) {
return;
}
}
occurrences.push({ key: keyNode, message: messageNode });
}
private findDescribingParent(node: ts.Node): { callInfo?: { callExpression: ts.CallExpression, argIndex: number }, isImport?: boolean; } {
let parent: ts.Node;
while ((parent = node.parent)) {
let kind = parent.kind;
if (kind === ts.SyntaxKind.CallExpression) {
let callExpression = parent as ts.CallExpression;
return { callInfo: { callExpression: callExpression, argIndex: callExpression.arguments.indexOf(<any>node) } };
} else if (kind === ts.SyntaxKind.ImportEqualsDeclaration || kind === ts.SyntaxKind.ImportDeclaration || kind === ts.SyntaxKind.ExportDeclaration) {
return { isImport: true };
} else if (kind === ts.SyntaxKind.VariableDeclaration || kind === ts.SyntaxKind.FunctionDeclaration || kind === ts.SyntaxKind.PropertyDeclaration
|| kind === ts.SyntaxKind.MethodDeclaration || kind === ts.SyntaxKind.VariableDeclarationList || kind === ts.SyntaxKind.InterfaceDeclaration
|| kind === ts.SyntaxKind.ClassDeclaration || kind === ts.SyntaxKind.EnumDeclaration || kind === ts.SyntaxKind.ModuleDeclaration
|| kind === ts.SyntaxKind.TypeAliasDeclaration || kind === ts.SyntaxKind.SourceFile) {
return null;
}
node = parent;
}
}
}

View File

@@ -0,0 +1,79 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
var Lint = require("tslint");
var fs = require("fs");
var Rule = (function (_super) {
__extends(Rule, _super);
function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
return this.applyWithWalker(new TranslationRemindRuleWalker(sourceFile, this.getOptions()));
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule;
var TranslationRemindRuleWalker = (function (_super) {
__extends(TranslationRemindRuleWalker, _super);
function TranslationRemindRuleWalker(file, opts) {
return _super.call(this, file, opts) || this;
}
TranslationRemindRuleWalker.prototype.visitImportDeclaration = function (node) {
var declaration = node.moduleSpecifier.getText();
if (declaration !== "'" + TranslationRemindRuleWalker.NLS_MODULE + "'") {
return;
}
this.visitImportLikeDeclaration(node);
};
TranslationRemindRuleWalker.prototype.visitImportEqualsDeclaration = function (node) {
var reference = node.moduleReference.getText();
if (reference !== "require('" + TranslationRemindRuleWalker.NLS_MODULE + "')") {
return;
}
this.visitImportLikeDeclaration(node);
};
TranslationRemindRuleWalker.prototype.visitImportLikeDeclaration = function (node) {
var currentFile = node.getSourceFile().fileName;
var matchService = currentFile.match(/vs\/workbench\/services\/\w+/);
var matchPart = currentFile.match(/vs\/workbench\/parts\/\w+/);
if (!matchService && !matchPart) {
return;
}
var resource = matchService ? matchService[0] : matchPart[0];
var resourceDefined = false;
var json;
try {
json = fs.readFileSync('./build/lib/i18n.resources.json', 'utf8');
}
catch (e) {
console.error('[translation-remind rule]: File with resources to pull from Transifex was not found. Aborting translation resource check for newly defined workbench part/service.');
return;
}
var workbenchResources = JSON.parse(json).workbench;
workbenchResources.forEach(function (existingResource) {
if (existingResource.name === resource) {
resourceDefined = true;
return;
}
});
if (!resourceDefined) {
this.addFailureAtNode(node, "Please add '" + resource + "' to ./builds/lib/i18n.resources.json file to use translations here.");
}
};
TranslationRemindRuleWalker.NLS_MODULE = 'vs/nls';
return TranslationRemindRuleWalker;
}(Lint.RuleWalker));

View File

@@ -0,0 +1,73 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
import * as fs from 'fs';
export class Rule extends Lint.Rules.AbstractRule {
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
return this.applyWithWalker(new TranslationRemindRuleWalker(sourceFile, this.getOptions()));
}
}
class TranslationRemindRuleWalker extends Lint.RuleWalker {
private static NLS_MODULE: string = 'vs/nls';
constructor(file: ts.SourceFile, opts: Lint.IOptions) {
super(file, opts);
}
protected visitImportDeclaration(node: ts.ImportDeclaration): void {
const declaration = node.moduleSpecifier.getText();
if (declaration !== `'${TranslationRemindRuleWalker.NLS_MODULE}'`) {
return;
}
this.visitImportLikeDeclaration(node);
}
protected visitImportEqualsDeclaration(node: ts.ImportEqualsDeclaration): void {
const reference = node.moduleReference.getText();
if (reference !== `require('${TranslationRemindRuleWalker.NLS_MODULE}')`) {
return;
}
this.visitImportLikeDeclaration(node);
}
private visitImportLikeDeclaration(node: ts.ImportDeclaration | ts.ImportEqualsDeclaration) {
const currentFile = node.getSourceFile().fileName;
const matchService = currentFile.match(/vs\/workbench\/services\/\w+/);
const matchPart = currentFile.match(/vs\/workbench\/parts\/\w+/);
if (!matchService && !matchPart) {
return;
}
const resource = matchService ? matchService[0] : matchPart[0];
let resourceDefined = false;
let json;
try {
json = fs.readFileSync('./build/lib/i18n.resources.json', 'utf8');
} catch (e) {
console.error('[translation-remind rule]: File with resources to pull from Transifex was not found. Aborting translation resource check for newly defined workbench part/service.');
return;
}
const workbenchResources = JSON.parse(json).workbench;
workbenchResources.forEach(existingResource => {
if (existingResource.name === resource) {
resourceDefined = true;
return;
}
});
if (!resourceDefined) {
this.addFailureAtNode(node, `Please add '${resource}' to ./builds/lib/i18n.resources.json file to use translations here.`);
}
}
}

View File

@@ -0,0 +1,10 @@
// ATTENTION - THIS DIRECTORY CONTAINS THIRD PARTY OPEN SOURCE MATERIALS:
// All OSS in this folder is development time only
[{
"name": "definitelytyped",
"repositoryURL": "https://github.com/DefinitelyTyped/DefinitelyTyped",
"license": "MIT",
"isDev": true
}
]

361
build/lib/typings/Q.d.ts vendored Normal file
View File

@@ -0,0 +1,361 @@
// Type definitions for Q
// Project: https://github.com/kriskowal/q
// Definitions by: Barrie Nemetchek <https://github.com/bnemetchek>, Andrew Gaspar <https://github.com/AndrewGaspar/>, John Reilly <https://github.com/johnnyreilly>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
/**
* If value is a Q promise, returns the promise.
* If value is a promise from another library it is coerced into a Q promise (where possible).
*/
declare function Q<T>(promise: Q.IPromise<T>): Q.Promise<T>;
/**
* If value is not a promise, returns a promise that is fulfilled with value.
*/
declare function Q<T>(value: T): Q.Promise<T>;
/**
* Calling with nothing at all creates a void promise
*/
declare function Q(): Q.Promise<void>;
declare namespace Q {
type IWhenable<T> = IPromise<T> | T;
interface IPromise<T> {
then<U>(onFulfill?: (value: T) => IWhenable<U>, onReject?: (error: any) => IWhenable<U>): IPromise<U>;
}
interface Deferred<T> {
promise: Promise<T>;
resolve(value?: IWhenable<T>): void;
reject(reason: any): void;
notify(value: any): void;
makeNodeResolver(): (reason: any, value: T) => void;
}
interface Promise<T> {
/**
* Like a finally clause, allows you to observe either the fulfillment or rejection of a promise, but to do so without modifying the final value. This is useful for collecting resources regardless of whether a job succeeded, like closing a database connection, shutting a server down, or deleting an unneeded key from an object.
* finally returns a promise, which will become resolved with the same fulfillment value or rejection reason as promise. However, if callback returns a promise, the resolution of the returned promise will be delayed until the promise returned from callback is finished.
*/
fin(finallyCallback: () => any): Promise<T>;
/**
* Like a finally clause, allows you to observe either the fulfillment or rejection of a promise, but to do so without modifying the final value. This is useful for collecting resources regardless of whether a job succeeded, like closing a database connection, shutting a server down, or deleting an unneeded key from an object.
* finally returns a promise, which will become resolved with the same fulfillment value or rejection reason as promise. However, if callback returns a promise, the resolution of the returned promise will be delayed until the promise returned from callback is finished.
*/
finally(finallyCallback: () => any): Promise<T>;
/**
* The then method from the Promises/A+ specification, with an additional progress handler.
*/
then<U>(onFulfill?: (value: T) => IWhenable<U>, onReject?: (error: any) => IWhenable<U>, onProgress?: Function): Promise<U>;
/**
* Like then, but "spreads" the array into a variadic fulfillment handler. If any of the promises in the array are rejected, instead calls onRejected with the first rejected promise's rejection reason.
*
* This is especially useful in conjunction with all
*/
spread<U>(onFulfill: (...args: any[]) => IWhenable<U>, onReject?: (reason: any) => IWhenable<U>): Promise<U>;
fail<U>(onRejected: (reason: any) => IWhenable<U>): Promise<U>;
/**
* A sugar method, equivalent to promise.then(undefined, onRejected).
*/
catch<U>(onRejected: (reason: any) => IWhenable<U>): Promise<U>;
/**
* A sugar method, equivalent to promise.then(undefined, undefined, onProgress).
*/
progress(onProgress: (progress: any) => any): Promise<T>;
/**
* Much like then, but with different behavior around unhandled rejection. If there is an unhandled rejection, either because promise is rejected and no onRejected callback was provided, or because onFulfilled or onRejected threw an error or returned a rejected promise, the resulting rejection reason is thrown as an exception in a future turn of the event loop.
*
* This method should be used to terminate chains of promises that will not be passed elsewhere. Since exceptions thrown in then callbacks are consumed and transformed into rejections, exceptions at the end of the chain are easy to accidentally, silently ignore. By arranging for the exception to be thrown in a future turn of the event loop, so that it won't be caught, it causes an onerror event on the browser window, or an uncaughtException event on Node.js's process object.
*
* Exceptions thrown by done will have long stack traces, if Q.longStackSupport is set to true. If Q.onerror is set, exceptions will be delivered there instead of thrown in a future turn.
*
* The Golden Rule of done vs. then usage is: either return your promise to someone else, or if the chain ends with you, call done to terminate it.
*/
done(onFulfilled?: (value: T) => any, onRejected?: (reason: any) => any, onProgress?: (progress: any) => any): void;
/**
* If callback is a function, assumes it's a Node.js-style callback, and calls it as either callback(rejectionReason) when/if promise becomes rejected, or as callback(null, fulfillmentValue) when/if promise becomes fulfilled. If callback is not a function, simply returns promise.
*/
nodeify(callback: (reason: any, value: any) => void): Promise<T>;
/**
* Returns a promise to get the named property of an object. Essentially equivalent to
*
* promise.then(function (o) {
* return o[propertyName];
* });
*/
get<U>(propertyName: String): Promise<U>;
set<U>(propertyName: String, value: any): Promise<U>;
delete<U>(propertyName: String): Promise<U>;
/**
* Returns a promise for the result of calling the named method of an object with the given array of arguments. The object itself is this in the function, just like a synchronous method call. Essentially equivalent to
*
* promise.then(function (o) {
* return o[methodName].apply(o, args);
* });
*/
post<U>(methodName: String, args: any[]): Promise<U>;
/**
* Returns a promise for the result of calling the named method of an object with the given variadic arguments. The object itself is this in the function, just like a synchronous method call.
*/
invoke<U>(methodName: String, ...args: any[]): Promise<U>;
fapply<U>(args: any[]): Promise<U>;
fcall<U>(...args: any[]): Promise<U>;
/**
* Returns a promise for an array of the property names of an object. Essentially equivalent to
*
* promise.then(function (o) {
* return Object.keys(o);
* });
*/
keys(): Promise<string[]>;
/**
* A sugar method, equivalent to promise.then(function () { return value; }).
*/
thenResolve<U>(value: U): Promise<U>;
/**
* A sugar method, equivalent to promise.then(function () { throw reason; }).
*/
thenReject(reason: any): Promise<T>;
/**
* Attaches a handler that will observe the value of the promise when it becomes fulfilled, returning a promise for that same value, perhaps deferred but not replaced by the promise returned by the onFulfilled handler.
*/
tap(onFulfilled: (value: T) => any): Promise<T>;
timeout(ms: number, message?: string): Promise<T>;
/**
* Returns a promise that will have the same result as promise, but will only be fulfilled or rejected after at least ms milliseconds have passed.
*/
delay(ms: number): Promise<T>;
/**
* Returns whether a given promise is in the fulfilled state. When the static version is used on non-promises, the result is always true.
*/
isFulfilled(): boolean;
/**
* Returns whether a given promise is in the rejected state. When the static version is used on non-promises, the result is always false.
*/
isRejected(): boolean;
/**
* Returns whether a given promise is in the pending state. When the static version is used on non-promises, the result is always false.
*/
isPending(): boolean;
valueOf(): any;
/**
* Returns a "state snapshot" object, which will be in one of three forms:
*
* - { state: "pending" }
* - { state: "fulfilled", value: <fulfllment value> }
* - { state: "rejected", reason: <rejection reason> }
*/
inspect(): PromiseState<T>;
}
interface PromiseState<T> {
/**
* "fulfilled", "rejected", "pending"
*/
state: string;
value?: T;
reason?: any;
}
// If no value provided, returned promise will be of void type
export function when(): Promise<void>;
// if no fulfill, reject, or progress provided, returned promise will be of same type
export function when<T>(value: IWhenable<T>): Promise<T>;
// If a non-promise value is provided, it will not reject or progress
export function when<T, U>(value: IWhenable<T>, onFulfilled: (val: T) => IWhenable<U>, onRejected?: (reason: any) => IWhenable<U>, onProgress?: (progress: any) => any): Promise<U>;
/**
* Currently "impossible" (and I use the term loosely) to implement due to TypeScript limitations as it is now.
* See: https://github.com/Microsoft/TypeScript/issues/1784 for discussion on it.
*/
// export function try(method: Function, ...args: any[]): Promise<any>;
export function fbind<T>(method: (...args: any[]) => IWhenable<T>, ...args: any[]): (...args: any[]) => Promise<T>;
export function fcall<T>(method: (...args: any[]) => T, ...args: any[]): Promise<T>;
export function send<T>(obj: any, functionName: string, ...args: any[]): Promise<T>;
export function invoke<T>(obj: any, functionName: string, ...args: any[]): Promise<T>;
export function mcall<T>(obj: any, functionName: string, ...args: any[]): Promise<T>;
export function denodeify<T>(nodeFunction: Function, ...args: any[]): (...args: any[]) => Promise<T>;
export function nbind<T>(nodeFunction: Function, thisArg: any, ...args: any[]): (...args: any[]) => Promise<T>;
export function nfbind<T>(nodeFunction: Function, ...args: any[]): (...args: any[]) => Promise<T>;
export function nfcall<T>(nodeFunction: Function, ...args: any[]): Promise<T>;
export function nfapply<T>(nodeFunction: Function, args: any[]): Promise<T>;
export function ninvoke<T>(nodeModule: any, functionName: string, ...args: any[]): Promise<T>;
export function npost<T>(nodeModule: any, functionName: string, args: any[]): Promise<T>;
export function nsend<T>(nodeModule: any, functionName: string, ...args: any[]): Promise<T>;
export function nmcall<T>(nodeModule: any, functionName: string, ...args: any[]): Promise<T>;
/**
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
*/
export function all<A, B, C, D, E, F>(promises: IWhenable<[IWhenable<A>, IWhenable<B>, IWhenable<C>, IWhenable<D>, IWhenable<E>, IWhenable<F>]>): Promise<[A, B, C, D, E, F]>;
/**
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
*/
export function all<A, B, C, D, E>(promises: IWhenable<[IWhenable<A>, IWhenable<B>, IWhenable<C>, IWhenable<D>, IWhenable<E>]>): Promise<[A, B, C, D, E]>;
/**
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
*/
export function all<A, B, C, D>(promises: IWhenable<[IWhenable<A>, IWhenable<B>, IWhenable<C>, IWhenable<D>]>): Promise<[A, B, C, D]>;
/**
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
*/
export function all<A, B, C>(promises: IWhenable<[IWhenable<A>, IWhenable<B>, IWhenable<C>]>): Promise<[A, B, C]>;
/**
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
*/
export function all<A, B>(promises: IWhenable<[IWhenable<A>, IWhenable<B>]>): Promise<[A, B]>;
/**
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
*/
export function all<T>(promises: IWhenable<IWhenable<T>[]>): Promise<T[]>;
/**
* Returns a promise for the first of an array of promises to become settled.
*/
export function race<T>(promises: IWhenable<T>[]): Promise<T>;
/**
* Returns a promise that is fulfilled with an array of promise state snapshots, but only after all the original promises have settled, i.e. become either fulfilled or rejected.
*/
export function allSettled<T>(promises: IWhenable<IWhenable<T>[]>): Promise<PromiseState<T>[]>;
export function allResolved<T>(promises: IWhenable<IWhenable<T>[]>): Promise<Promise<T>[]>;
/**
* Like then, but "spreads" the array into a variadic fulfillment handler. If any of the promises in the array are rejected, instead calls onRejected with the first rejected promise's rejection reason.
* This is especially useful in conjunction with all.
*/
export function spread<T, U>(promises: IWhenable<T>[], onFulfilled: (...args: T[]) => IWhenable<U>, onRejected?: (reason: any) => IWhenable<U>): Promise<U>;
/**
* Returns a promise that will have the same result as promise, except that if promise is not fulfilled or rejected before ms milliseconds, the returned promise will be rejected with an Error with the given message. If message is not supplied, the message will be "Timed out after " + ms + " ms".
*/
export function timeout<T>(promise: Promise<T>, ms: number, message?: string): Promise<T>;
/**
* Returns a promise that will have the same result as promise, but will only be fulfilled or rejected after at least ms milliseconds have passed.
*/
export function delay<T>(promise: Promise<T>, ms: number): Promise<T>;
/**
* Returns a promise that will have the same result as promise, but will only be fulfilled or rejected after at least ms milliseconds have passed.
*/
export function delay<T>(value: T, ms: number): Promise<T>;
/**
* Returns a promise that will be fulfilled with undefined after at least ms milliseconds have passed.
*/
export function delay(ms: number): Promise <void>;
/**
* Returns whether a given promise is in the fulfilled state. When the static version is used on non-promises, the result is always true.
*/
export function isFulfilled(promise: Promise<any>): boolean;
/**
* Returns whether a given promise is in the rejected state. When the static version is used on non-promises, the result is always false.
*/
export function isRejected(promise: Promise<any>): boolean;
/**
* Returns whether a given promise is in the pending state. When the static version is used on non-promises, the result is always false.
*/
export function isPending(promise: Promise<any>): boolean;
/**
* Returns a "deferred" object with a:
* promise property
* resolve(value) method
* reject(reason) method
* notify(value) method
* makeNodeResolver() method
*/
export function defer<T>(): Deferred<T>;
/**
* Returns a promise that is rejected with reason.
*/
export function reject<T>(reason?: any): Promise<T>;
export function Promise<T>(resolver: (resolve: (val: IWhenable<T>) => void , reject: (reason: any) => void , notify: (progress: any) => void ) => void ): Promise<T>;
/**
* Creates a new version of func that accepts any combination of promise and non-promise values, converting them to their fulfillment values before calling the original func. The returned version also always returns a promise: if func does a return or throw, then Q.promised(func) will return fulfilled or rejected promise, respectively.
*
* This can be useful for creating functions that accept either promises or non-promise values, and for ensuring that the function always returns a promise even in the face of unintentional thrown exceptions.
*/
export function promised<T>(callback: (...args: any[]) => T): (...args: any[]) => Promise<T>;
/**
* Returns whether the given value is a Q promise.
*/
export function isPromise(object: any): boolean;
/**
* Returns whether the given value is a promise (i.e. it's an object with a then function).
*/
export function isPromiseAlike(object: any): boolean;
/**
* Returns whether a given promise is in the pending state. When the static version is used on non-promises, the result is always false.
*/
export function isPending(object: any): boolean;
/**
* If an object is not a promise, it is as "near" as possible.
* If a promise is rejected, it is as "near" as possible too.
* If its a fulfilled promise, the fulfillment value is nearer.
* If its a deferred promise and the deferred has been resolved, the
* resolution is "nearer".
*/
export function nearer<T>(promise: Promise<T>): T;
/**
* This is an experimental tool for converting a generator function into a deferred function. This has the potential of reducing nested callbacks in engines that support yield.
*/
export function async<T>(generatorFunction: any): (...args: any[]) => Promise<T>;
export function nextTick(callback: Function): void;
/**
* A settable property that will intercept any uncaught errors that would otherwise be thrown in the next tick of the event loop, usually as a result of done. Can be useful for getting the full stack trace of an error in browsers, which is not usually possible with window.onerror.
*/
export var onerror: (reason: any) => void;
/**
* A settable property that lets you turn on long stack trace support. If turned on, "stack jumps" will be tracked across asynchronous promise operations, so that if an uncaught error is thrown by done or a rejection reason's stack property is inspected in a rejection callback, a long stack trace is produced.
*/
export var longStackSupport: boolean;
/**
* Calling resolve with a pending promise causes promise to wait on the passed promise, becoming fulfilled with its fulfillment value or rejected with its rejection reason (or staying pending forever, if the passed promise does).
* Calling resolve with a rejected promise causes promise to be rejected with the passed promise's rejection reason.
* Calling resolve with a fulfilled promise causes promise to be fulfilled with the passed promise's fulfillment value.
* Calling resolve with a non-promise value causes promise to be fulfilled with that value.
*/
export function resolve<T>(object: IWhenable<T>): Promise<T>;
/**
* Resets the global "Q" variable to the value it has before Q was loaded.
* This will either be undefined if there was no version or the version of Q which was already loaded before.
* @returns { The last version of Q. }
*/
export function noConflict(): typeof Q;
}
declare module "q" {
export = Q;
}

121
build/lib/typings/chalk.d.ts vendored Normal file
View File

@@ -0,0 +1,121 @@
// Type definitions for chalk v0.4.0
// Project: https://github.com/sindresorhus/chalk
// Definitions by: Diullei Gomes <https://github.com/Diullei>, Bart van der Schoor <https://github.com/Bartvds>, Nico Jansen <https://github.com/nicojs>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
declare namespace Chalk {
export var enabled: boolean;
export var supportsColor: boolean;
export var styles: ChalkStyleMap;
export function stripColor(value: string): any;
export function hasColor(str: string): boolean;
export interface ChalkChain extends ChalkStyle {
(...text: string[]): string;
}
export interface ChalkStyleElement {
open: string;
close: string;
}
// General
export var reset: ChalkChain;
export var bold: ChalkChain;
export var italic: ChalkChain;
export var underline: ChalkChain;
export var inverse: ChalkChain;
export var strikethrough: ChalkChain;
// Text colors
export var black: ChalkChain;
export var red: ChalkChain;
export var green: ChalkChain;
export var yellow: ChalkChain;
export var blue: ChalkChain;
export var magenta: ChalkChain;
export var cyan: ChalkChain;
export var white: ChalkChain;
export var gray: ChalkChain;
export var grey: ChalkChain;
// Background colors
export var bgBlack: ChalkChain;
export var bgRed: ChalkChain;
export var bgGreen: ChalkChain;
export var bgYellow: ChalkChain;
export var bgBlue: ChalkChain;
export var bgMagenta: ChalkChain;
export var bgCyan: ChalkChain;
export var bgWhite: ChalkChain;
export interface ChalkStyle {
// General
reset: ChalkChain;
bold: ChalkChain;
italic: ChalkChain;
underline: ChalkChain;
inverse: ChalkChain;
strikethrough: ChalkChain;
// Text colors
black: ChalkChain;
red: ChalkChain;
green: ChalkChain;
yellow: ChalkChain;
blue: ChalkChain;
magenta: ChalkChain;
cyan: ChalkChain;
white: ChalkChain;
gray: ChalkChain;
grey: ChalkChain;
// Background colors
bgBlack: ChalkChain;
bgRed: ChalkChain;
bgGreen: ChalkChain;
bgYellow: ChalkChain;
bgBlue: ChalkChain;
bgMagenta: ChalkChain;
bgCyan: ChalkChain;
bgWhite: ChalkChain;
}
export interface ChalkStyleMap {
// General
reset: ChalkStyleElement;
bold: ChalkStyleElement;
italic: ChalkStyleElement;
underline: ChalkStyleElement;
inverse: ChalkStyleElement;
strikethrough: ChalkStyleElement;
// Text colors
black: ChalkStyleElement;
red: ChalkStyleElement;
green: ChalkStyleElement;
yellow: ChalkStyleElement;
blue: ChalkStyleElement;
magenta: ChalkStyleElement;
cyan: ChalkStyleElement;
white: ChalkStyleElement;
gray: ChalkStyleElement;
// Background colors
bgBlack: ChalkStyleElement;
bgRed: ChalkStyleElement;
bgGreen: ChalkStyleElement;
bgYellow: ChalkStyleElement;
bgBlue: ChalkStyleElement;
bgMagenta: ChalkStyleElement;
bgCyan: ChalkStyleElement;
bgWhite: ChalkStyleElement;
}
}
declare module "chalk" {
export = Chalk;
}

18
build/lib/typings/debounce.d.ts vendored Normal file
View File

@@ -0,0 +1,18 @@
// Type definitions for compose-function
// Project: https://github.com/component/debounce
// Definitions by: Denis Sokolov <https://github.com/denis-sokolov>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
declare module "debounce" {
// Overload on boolean constants would allow us to narrow further,
// but it is not implemented for TypeScript yet
function f<A extends Function>(f: A, interval?: number, immediate?: boolean): A
/**
* This is required as per:
* https://github.com/Microsoft/TypeScript/issues/5073
*/
namespace f {}
export = f;
}

21
build/lib/typings/event-stream.d.ts vendored Normal file
View File

@@ -0,0 +1,21 @@
declare module "event-stream" {
import { Stream } from 'stream';
import { ThroughStream } from 'through';
import { MapStream } from 'map-stream';
function merge(streams: Stream[]): ThroughStream;
function merge(...streams: Stream[]): ThroughStream;
function concat(...stream: Stream[]): ThroughStream;
function duplex(istream: Stream, ostream: Stream): ThroughStream;
function through(write?: (data: any) => void, end?: () => void,
opts?: {autoDestroy: boolean; }): ThroughStream;
function readArray<T>(array: T[]): ThroughStream;
function writeArray<T>(cb: (err:Error, array:T[]) => void): ThroughStream;
function mapSync<I,O>(cb: (data:I) => O): ThroughStream;
function map<I,O>(cb: (data:I, cb:(err?:Error, data?: O)=>void) => O): ThroughStream;
function readable(asyncFunction: Function): MapStream;
}

12
build/lib/typings/gulp-bom.d.ts vendored Normal file
View File

@@ -0,0 +1,12 @@
declare module "gulp-bom" {
function f(): NodeJS.ReadWriteStream;
/**
* This is required as per:
* https://github.com/Microsoft/TypeScript/issues/5073
*/
namespace f {}
export = f;
}

43
build/lib/typings/gulp-concat.d.ts vendored Normal file
View File

@@ -0,0 +1,43 @@
// Type definitions for gulp-concat
// Project: http://github.com/wearefractal/gulp-concat
// Definitions by: Keita Kagurazaka <https://github.com/k-kagurazaka>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
declare module "gulp-concat" {
interface IOptions {
newLine: string;
}
interface IFsStats {
dev?: number;
ino?: number;
mode?: number;
nlink?: number;
uid?: number;
gid?: number;
rdev?: number;
size?: number;
blksize?: number;
blocks?: number;
atime?: Date;
mtime?: Date;
ctime?: Date;
}
interface IVinylOptions {
cwd?: string;
base?: string;
path?: string;
stat?: IFsStats;
contents?: NodeJS.ReadableStream | Buffer;
}
interface IConcat {
(filename: string, options?: IOptions): NodeJS.ReadWriteStream;
(options: IVinylOptions): NodeJS.ReadWriteStream;
}
var _tmp: IConcat;
export = _tmp;
}

12
build/lib/typings/gulp-cssnano.d.ts vendored Normal file
View File

@@ -0,0 +1,12 @@
declare module "gulp-cssnano" {
function f(opts:{reduceIdents:boolean;}): NodeJS.ReadWriteStream;
/**
* This is required as per:
* https://github.com/Microsoft/TypeScript/issues/5073
*/
namespace f {}
export = f;
}

29
build/lib/typings/gulp-filter.d.ts vendored Normal file
View File

@@ -0,0 +1,29 @@
// Type definitions for gulp-filter v3.0.1
// Project: https://github.com/sindresorhus/gulp-filter
// Definitions by: Tanguy Krotoff <https://github.com/tkrotoff>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
declare module 'gulp-filter' {
import File = require('vinyl');
import * as Minimatch from 'minimatch';
namespace filter {
interface FileFunction {
(file: File): boolean;
}
interface Options extends Minimatch.IOptions {
restore?: boolean;
passthrough?: boolean;
}
// A transform stream with a .restore object
interface Filter extends NodeJS.ReadWriteStream {
restore: NodeJS.ReadWriteStream
}
}
function filter(pattern: string | string[] | filter.FileFunction, options?: filter.Options): filter.Filter;
export = filter;
}

12
build/lib/typings/gulp-flatmap.d.ts vendored Normal file
View File

@@ -0,0 +1,12 @@
declare module 'gulp-flatmap' {
import File = require('vinyl');
function f(fn:(stream:NodeJS.ReadWriteStream, file:File)=>NodeJS.ReadWriteStream): NodeJS.ReadWriteStream;
/**
* This is required as per:
* https://github.com/Microsoft/TypeScript/issues/5073
*/
namespace f {}
export = f;
}

23
build/lib/typings/gulp-remote-src.d.ts vendored Normal file
View File

@@ -0,0 +1,23 @@
declare module 'gulp-remote-src' {
import stream = require("stream");
function remote(url: string, options: remote.IOptions): stream.Stream;
module remote {
export interface IRequestOptions {
body?: any;
json?: boolean;
method?: string;
headers?: any;
}
export interface IOptions {
base?: string;
buffer?: boolean;
requestOptions?: IRequestOptions;
}
}
export = remote;
}

29
build/lib/typings/gulp-rename.d.ts vendored Normal file
View File

@@ -0,0 +1,29 @@
// Type definitions for gulp-rename
// Project: https://github.com/hparra/gulp-rename
// Definitions by: Asana <https://asana.com>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
declare module "gulp-rename" {
interface ParsedPath {
dirname?: string;
basename?: string;
extname?: string;
}
interface Options extends ParsedPath {
prefix?: string;
suffix?: string;
}
function rename(name: string): NodeJS.ReadWriteStream;
function rename(callback: (path: ParsedPath) => any): NodeJS.ReadWriteStream;
function rename(opts: Options): NodeJS.ReadWriteStream;
/**
* This is required as per:
* https://github.com/Microsoft/TypeScript/issues/5073
*/
namespace rename {}
export = rename;
}

27
build/lib/typings/gulp-sourcemaps.d.ts vendored Normal file
View File

@@ -0,0 +1,27 @@
// Type definitions for gulp-sourcemaps
// Project: https://github.com/floridoo/gulp-sourcemaps
// Definitions by: Asana <https://asana.com>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
declare module "gulp-sourcemaps" {
interface InitOptions {
loadMaps?: boolean;
debug?: boolean;
}
interface WriteMapper {
(file: string): string;
}
interface WriteOptions {
addComment?: boolean;
includeContent?: boolean;
sourceRoot?: string | WriteMapper;
sourceMappingURLPrefix?: string | WriteMapper;
sourceMappingURL?: (f:{relative:string})=>string;
}
export function init(opts?: InitOptions): NodeJS.ReadWriteStream;
export function write(path?: string, opts?: WriteOptions): NodeJS.ReadWriteStream;
export function write(opts?: WriteOptions): NodeJS.ReadWriteStream;
}

16
build/lib/typings/gulp-tsb.d.ts vendored Normal file
View File

@@ -0,0 +1,16 @@
declare module "gulp-tsb" {
export interface ICancellationToken {
isCancellationRequested(): boolean;
}
export interface IncrementalCompiler {
(token?:ICancellationToken): NodeJS.ReadWriteStream;
// program?: ts.Program;
}
export function create(configOrName: { [option: string]: string | number | boolean; } | string, verbose?: boolean, json?: boolean, onError?: (message: any) => void): IncrementalCompiler;
}

133
build/lib/typings/gulp-util.d.ts vendored Normal file
View File

@@ -0,0 +1,133 @@
// Type definitions for gulp-util v3.0.x
// Project: https://github.com/gulpjs/gulp-util
// Definitions by: jedmao <https://github.com/jedmao>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
declare module 'gulp-util' {
import vinyl = require('vinyl');
import chalk = require('chalk');
import through2 = require('through2');
export class File extends vinyl { }
/**
* Replaces a file extension in a path. Returns the new path.
*/
export function replaceExtension(npath: string, ext: string): string;
export var colors: typeof chalk;
export var date: {
(now?: Date, mask?: string, convertLocalTimeToUTC?: boolean): any;
(date?: string, mask?: string, convertLocalTimeToUTC?: boolean): any;
masks: any;
};
/**
* Logs stuff. Already prefixed with [gulp] and all that. Use the right colors
* for values. If you pass in multiple arguments it will join them by a space.
*/
export function log(message?: any, ...optionalParams: any[]): void;
/**
* This is a lodash.template function wrapper. You must pass in a valid gulp
* file object so it is available to the user or it will error. You can not
* configure any of the delimiters. Look at the lodash docs for more info.
*/
export function template(tmpl: string): (opt: { file: { path: string } }) => string;
export function template(tmpl: string, opt: { file: { path: string } }): string;
export var env: any;
export function beep(): void;
/**
* Returns a stream that does nothing but pass data straight through.
*/
export var noop: typeof through2;
export function isStream(obj: any): boolean;
export function isBuffer(obj: any): boolean;
export function isNull(obj: any): boolean;
export var linefeed: string;
export function combine(streams: NodeJS.ReadWriteStream[]): () => NodeJS.ReadWriteStream;
export function combine(...streams: NodeJS.ReadWriteStream[]): () => NodeJS.ReadWriteStream;
/**
* This is similar to es.wait but instead of buffering text into one string
* it buffers anything into an array (so very useful for file objects).
*/
export function buffer(cb?: (err: Error, data: any[]) => void): NodeJS.ReadWriteStream;
export class PluginError implements Error, PluginErrorOptions {
constructor(options?: PluginErrorOptions);
constructor(pluginName: string, options?: PluginErrorOptions);
constructor(pluginName: string, message: string, options?: PluginErrorOptions);
constructor(pluginName: string, message: Error, options?: PluginErrorOptions);
/**
* The module name of your plugin.
*/
name: string;
/**
* Can be a string or an existing error.
*/
message: any;
fileName: string;
lineNumber: number;
/**
* You need to include the message along with this stack. If you pass an
* error in as the message the stack will be pulled from that, otherwise one
* will be created.
*/
stack: string;
/**
* By default the stack will not be shown. Set this to true if you think the
* stack is important for your error.
*/
showStack: boolean;
/**
* Error properties will be included in err.toString(). Can be omitted by
* setting this to false.
*/
showProperties: boolean;
plugin: string;
error: Error;
}
}
interface PluginErrorOptions {
/**
* The module name of your plugin.
*/
name?: string;
/**
* Can be a string or an existing error.
*/
message?: any;
fileName?: string;
lineNumber?: number;
/**
* You need to include the message along with this stack. If you pass an
* error in as the message the stack will be pulled from that, otherwise one
* will be created.
*/
stack?: string;
/**
* By default the stack will not be shown. Set this to true if you think the
* stack is important for your error.
*/
showStack?: boolean;
/**
* Error properties will be included in err.toString(). Can be omitted by
* setting this to false.
*/
showProperties?: boolean;
plugin?: string;
error?: Error;
}

293
build/lib/typings/gulp.d.ts vendored Normal file
View File

@@ -0,0 +1,293 @@
// Type definitions for Gulp v3.8.x
// Project: http://gulpjs.com
// Definitions by: Drew Noakes <https://drewnoakes.com>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
declare module "gulp" {
import Orchestrator = require("orchestrator");
import VinylFile = require("vinyl");
namespace gulp {
interface Gulp extends Orchestrator {
/**
* Define a task
* @param name The name of the task.
* @param deps An array of task names to be executed and completed before your task will run.
* @param fn The function that performs the task's operations. For asynchronous tasks, you need to provide a hint when the task is complete:
* <ul>
* <li>Take in a callback</li>
* <li>Return a stream or a promise</li>
* </ul>
*/
task: Orchestrator.AddMethod;
/**
* Emits files matching provided glob or an array of globs. Returns a stream of Vinyl files that can be piped to plugins.
* @param glob Glob or array of globs to read.
* @param opt Options to pass to node-glob through glob-stream.
*/
src: SrcMethod;
/**
* Can be piped to and it will write files. Re-emits all data passed to it so you can pipe to multiple folders.
* Folders that don't exist will be created.
*
* @param outFolder The path (output folder) to write files to. Or a function that returns it, the function will be provided a vinyl File instance.
* @param opt
*/
dest: DestMethod;
/**
* Watch files and do something when a file changes. This always returns an EventEmitter that emits change events.
*
* @param glob a single glob or array of globs that indicate which files to watch for changes.
* @param opt options, that are passed to the gaze library.
* @param fn a callback or array of callbacks to be called on each change, or names of task(s) to run when a file changes, added with task().
*/
watch: WatchMethod;
}
interface GulpPlugin {
(...args: any[]): NodeJS.ReadWriteStream;
}
interface WatchMethod {
/**
* Watch files and do something when a file changes. This always returns an EventEmitter that emits change events.
*
* @param glob a single glob or array of globs that indicate which files to watch for changes.
* @param fn a callback or array of callbacks to be called on each change, or names of task(s) to run when a file changes, added with task().
*/
(glob: string|string[], fn: (WatchCallback|string)): NodeJS.EventEmitter;
/**
* Watch files and do something when a file changes. This always returns an EventEmitter that emits change events.
*
* @param glob a single glob or array of globs that indicate which files to watch for changes.
* @param fn a callback or array of callbacks to be called on each change, or names of task(s) to run when a file changes, added with task().
*/
(glob: string|string[], fn: (WatchCallback|string)[]): NodeJS.EventEmitter;
/**
* Watch files and do something when a file changes. This always returns an EventEmitter that emits change events.
*
* @param glob a single glob or array of globs that indicate which files to watch for changes.
* @param opt options, that are passed to the gaze library.
* @param fn a callback or array of callbacks to be called on each change, or names of task(s) to run when a file changes, added with task().
*/
(glob: string|string[], opt: WatchOptions, fn: (WatchCallback|string)): NodeJS.EventEmitter;
/**
* Watch files and do something when a file changes. This always returns an EventEmitter that emits change events.
*
* @param glob a single glob or array of globs that indicate which files to watch for changes.
* @param opt options, that are passed to the gaze library.
* @param fn a callback or array of callbacks to be called on each change, or names of task(s) to run when a file changes, added with task().
*/
(glob: string|string[], opt: WatchOptions, fn: (WatchCallback|string)[]): NodeJS.EventEmitter;
}
interface DestMethod {
/**
* Can be piped to and it will write files. Re-emits all data passed to it so you can pipe to multiple folders.
* Folders that don't exist will be created.
*
* @param outFolder The path (output folder) to write files to. Or a function that returns it, the function will be provided a vinyl File instance.
* @param opt
*/
(outFolder: string|((file: VinylFile) => string), opt?: DestOptions): NodeJS.ReadWriteStream;
}
interface SrcMethod {
/**
* Emits files matching provided glob or an array of globs. Returns a stream of Vinyl files that can be piped to plugins.
* @param glob Glob or array of globs to read.
* @param opt Options to pass to node-glob through glob-stream.
*/
(glob: string|string[], opt?: SrcOptions): NodeJS.ReadWriteStream;
}
/**
* Options to pass to node-glob through glob-stream.
* Specifies two options in addition to those used by node-glob:
* https://github.com/isaacs/node-glob#options
*/
interface SrcOptions {
/**
* Setting this to <code>false</code> will return <code>file.contents</code> as <code>null</code>
* and not read the file at all.
* Default: <code>true</code>.
*/
read?: boolean;
/**
* Setting this to false will return <code>file.contents</code> as a stream and not buffer files.
* This is useful when working with large files.
* Note: Plugins might not implement support for streams.
* Default: <code>true</code>.
*/
buffer?: boolean;
/**
* The base path of a glob.
*
* Default is everything before a glob starts.
*/
base?: string;
/**
* The current working directory in which to search.
* Defaults to process.cwd().
*/
cwd?: string;
/**
* The place where patterns starting with / will be mounted onto.
* Defaults to path.resolve(options.cwd, "/") (/ on Unix systems, and C:\ or some such on Windows.)
*/
root?: string;
/**
* Include .dot files in normal matches and globstar matches.
* Note that an explicit dot in a portion of the pattern will always match dot files.
*/
dot?: boolean;
/**
* Set to match only fles, not directories. Set this flag to prevent copying empty directories
*/
nodir?: boolean;
/**
* By default, a pattern starting with a forward-slash will be "mounted" onto the root setting, so that a valid
* filesystem path is returned. Set this flag to disable that behavior.
*/
nomount?: boolean;
/**
* Add a / character to directory matches. Note that this requires additional stat calls.
*/
mark?: boolean;
/**
* Don't sort the results.
*/
nosort?: boolean;
/**
* Set to true to stat all results. This reduces performance somewhat, and is completely unnecessary, unless
* readdir is presumed to be an untrustworthy indicator of file existence. It will cause ELOOP to be triggered one
* level sooner in the case of cyclical symbolic links.
*/
stat?: boolean;
/**
* When an unusual error is encountered when attempting to read a directory, a warning will be printed to stderr.
* Set the silent option to true to suppress these warnings.
*/
silent?: boolean;
/**
* When an unusual error is encountered when attempting to read a directory, the process will just continue on in
* search of other matches. Set the strict option to raise an error in these cases.
*/
strict?: boolean;
/**
* See cache property above. Pass in a previously generated cache object to save some fs calls.
*/
cache?: boolean;
/**
* A cache of results of filesystem information, to prevent unnecessary stat calls.
* While it should not normally be necessary to set this, you may pass the statCache from one glob() call to the
* options object of another, if you know that the filesystem will not change between calls.
*/
statCache?: boolean;
/**
* Perform a synchronous glob search.
*/
sync?: boolean;
/**
* In some cases, brace-expanded patterns can result in the same file showing up multiple times in the result set.
* By default, this implementation prevents duplicates in the result set. Set this flag to disable that behavior.
*/
nounique?: boolean;
/**
* Set to never return an empty set, instead returning a set containing the pattern itself.
* This is the default in glob(3).
*/
nonull?: boolean;
/**
* Perform a case-insensitive match. Note that case-insensitive filesystems will sometimes result in glob returning
* results that are case-insensitively matched anyway, since readdir and stat will not raise an error.
*/
nocase?: boolean;
/**
* Set to enable debug logging in minimatch and glob.
*/
debug?: boolean;
/**
* Set to enable debug logging in glob, but not minimatch.
*/
globDebug?: boolean;
}
interface DestOptions {
/**
* The output folder. Only has an effect if provided output folder is relative.
* Default: process.cwd()
*/
cwd?: string;
/**
* Octal permission string specifying mode for any folders that need to be created for output folder.
* Default: 0777.
*/
mode?: string;
}
/**
* Options that are passed to <code>gaze</code>.
* https://github.com/shama/gaze
*/
interface WatchOptions {
/** Interval to pass to fs.watchFile. */
interval?: number;
/** Delay for events called in succession for the same file/event. */
debounceDelay?: number;
/** Force the watch mode. Either 'auto' (default), 'watch' (force native events), or 'poll' (force stat polling). */
mode?: string;
/** The current working directory to base file patterns from. Default is process.cwd().. */
cwd?: string;
}
interface WatchEvent {
/** The type of change that occurred, either added, changed or deleted. */
type: string;
/** The path to the file that triggered the event. */
path: string;
}
/**
* Callback to be called on each watched file change.
*/
interface WatchCallback {
(event: WatchEvent): void;
}
interface TaskCallback {
/**
* Defines a task.
* Tasks may be made asynchronous if they are passing a callback or return a promise or a stream.
* @param cb callback used to signal asynchronous completion. Caller includes <code>err</code> in case of error.
*/
(cb?: (err?: any) => void): any;
}
}
var gulp: gulp.Gulp;
export = gulp;
}

11
build/lib/typings/is.d.ts vendored Normal file
View File

@@ -0,0 +1,11 @@
declare module 'is' {
function a(value: any, type: string): boolean;
function defined(value: any): boolean;
function undef(value: any): boolean;
function object(value: any): boolean;
function string(value: any): value is string;
function boolean(value: any): boolean;
function array(value: any): boolean;
function empty<T>(value: Object | Array<T>): boolean;
function equal<T extends Object | Array<any> | Function | Date>(value: T, other: T): boolean;
}

273
build/lib/typings/lazy.js.d.ts vendored Normal file
View File

@@ -0,0 +1,273 @@
// Type definitions for Lazy.js 0.3.2
// Project: https://github.com/dtao/lazy.js/
// Definitions by: Bart van der Schoor <https://github.com/Bartvds>
// Definitions: https://github.com/borisyankov/DefinitelyTyped
declare function Lazy(value: string): Lazy.StringLikeSequence;
declare function Lazy<T>(value: T[]): Lazy.ArrayLikeSequence<T>;
declare function Lazy(value: any[]): Lazy.ArrayLikeSequence<any>;
declare function Lazy<T>(value: Object): Lazy.ObjectLikeSequence<T>;
declare function Lazy(value: Object): Lazy.ObjectLikeSequence<any>;
declare module Lazy {
function strict(): StrictLazy;
function generate<T>(generatorFn: GeneratorCallback<T>, length?: number): GeneratedSequence<T>;
function range(to: number): GeneratedSequence<number>;
function range(from: number, to: number, step?: number): GeneratedSequence<number>;
function repeat<T>(value: T, count?: number): GeneratedSequence<T>;
function on<T>(eventType: string): Sequence<T>;
function readFile(path: string): StringLikeSequence;
function makeHttpRequest(path: string): StringLikeSequence;
interface StrictLazy {
(value: string): StringLikeSequence;
<T>(value: T[]): ArrayLikeSequence<T>;
(value: any[]): ArrayLikeSequence<any>;
<T>(value: Object): ObjectLikeSequence<T>;
(value: Object): ObjectLikeSequence<any>;
strict(): StrictLazy;
generate<T>(generatorFn: GeneratorCallback<T>, length?: number): GeneratedSequence<T>;
range(to: number): GeneratedSequence<number>;
range(from: number, to: number, step?: number): GeneratedSequence<number>;
repeat<T>(value: T, count?: number): GeneratedSequence<T>;
on<T>(eventType: string): Sequence<T>;
readFile(path: string): StringLikeSequence;
makeHttpRequest(path: string): StringLikeSequence;
}
interface ArrayLike<T> {
length: number;
[index: number]: T;
}
interface Callback {
(): void;
}
interface ErrorCallback {
(error: any): void;
}
interface ValueCallback<T> {
(value: T): void;
}
interface GetKeyCallback<T> {
(value: T): string;
}
interface TestCallback<T> {
(value: T): boolean;
}
interface MapCallback<T, U> {
(value: T): U;
}
interface MapStringCallback {
(value: string): string;
}
interface NumberCallback<T> {
(value: T): number;
}
interface MemoCallback<T, U> {
(memo: U, value: T): U;
}
interface GeneratorCallback<T> {
(index: number): T;
}
interface CompareCallback {
(x: any, y: any): number;
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
interface Iterator<T> {
new (sequence: Sequence<T>): Iterator<T>;
current(): T;
moveNext(): boolean;
}
interface GeneratedSequence<T> extends Sequence<T> {
new(generatorFn: GeneratorCallback<T>, length: number): GeneratedSequence<T>;
length(): number;
}
interface AsyncSequence<T> extends SequenceBase<T> {
each(callback: ValueCallback<T>): AsyncHandle<T>;
}
interface AsyncHandle<T> {
cancel(): void;
onComplete(callback: Callback): void;
onError(callback: ErrorCallback): void;
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
module Sequence {
function define(methodName: string[], overrides: Object): Function;
}
interface Sequence<T> extends SequenceBase<T> {
each(eachFn: ValueCallback<T>): Sequence<T>;
}
interface ArraySequence<T> extends SequenceBase<T[]> {
flatten(): Sequence<T>;
}
interface SequenceBase<T> extends SequenceBaser<T> {
first(): any;
first(count: number): Sequence<T>;
indexOf(value: any, startIndex?: number): Sequence<T>;
last(): any;
last(count: number): Sequence<T>;
lastIndexOf(value: any): Sequence<T>;
reverse(): Sequence<T>;
}
interface SequenceBaser<T> {
// TODO improve define() (needs ugly overload)
async(interval: number): AsyncSequence<T>;
chunk(size: number): Sequence<T>;
compact(): Sequence<T>;
concat(var_args: T[]): Sequence<T>;
concat(sequence: Sequence<T>): Sequence<T>;
consecutive(length: number): Sequence<T>;
contains(value: T): boolean;
countBy(keyFn: GetKeyCallback<T>): ObjectLikeSequence<T>;
countBy(propertyName: string): ObjectLikeSequence<T>;
dropWhile(predicateFn: TestCallback<T>): Sequence<T>;
every(predicateFn: TestCallback<T>): boolean;
filter(predicateFn: TestCallback<T>): Sequence<T>;
find(predicateFn: TestCallback<T>): Sequence<T>;
findWhere(properties: Object): Sequence<T>;
groupBy(keyFn: GetKeyCallback<T>): ObjectLikeSequence<T>;
initial(count?: number): Sequence<T>;
intersection(var_args: T[]): Sequence<T>;
invoke(methodName: string): Sequence<T>;
isEmpty(): boolean;
join(delimiter?: string): string;
map<U>(mapFn: MapCallback<T, U[]>): ArraySequence<U>;
map<U>(mapFn: MapCallback<T, U>): Sequence<U>;
max(valueFn?: NumberCallback<T>): T;
min(valueFn?: NumberCallback<T>): T;
none(valueFn?: TestCallback<T>): boolean;
pluck(propertyName: string): Sequence<T>;
reduce<U>(aggregatorFn: MemoCallback<T, U>, memo?: U): U;
reduceRight<U>(aggregatorFn: MemoCallback<T, U>, memo: U): U;
reject(predicateFn: TestCallback<T>): Sequence<T>;
rest(count?: number): Sequence<T>;
shuffle(): Sequence<T>;
some(predicateFn?: TestCallback<T>): boolean;
sort(sortFn?: CompareCallback, descending?: boolean): Sequence<T>;
sortBy(sortFn: string, descending?: boolean): Sequence<T>;
sortBy(sortFn: NumberCallback<T>, descending?: boolean): Sequence<T>;
sortedIndex(value: T): Sequence<T>;
size(): number;
sum(valueFn?: NumberCallback<T>): Sequence<T>;
takeWhile(predicateFn: TestCallback<T>): Sequence<T>;
union(var_args: T[]): Sequence<T>;
uniq(): Sequence<T>;
where(properties: Object): Sequence<T>;
without(...var_args: T[]): Sequence<T>;
without(var_args: T[]): Sequence<T>;
zip(var_args: T[]): ArraySequence<T>;
toArray(): T[];
toObject(): Object;
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
module ArrayLikeSequence {
function define(methodName: string[], overrides: Object): Function;
}
interface ArrayLikeSequence<T> extends Sequence<T> {
// define()X;
concat(var_args: T[]): ArrayLikeSequence<T>;
concat(sequence: Sequence<T>): Sequence<T>;
first(count?: number): ArrayLikeSequence<T>;
get(index: number): T;
length(): number;
map<U>(mapFn: MapCallback<T, U[]>): ArraySequence<U>;
map<U>(mapFn: MapCallback<T, U>): ArrayLikeSequence<U>;
pop(): ArrayLikeSequence<T>;
rest(count?: number): ArrayLikeSequence<T>;
reverse(): ArrayLikeSequence<T>;
shift(): ArrayLikeSequence<T>;
slice(begin: number, end?: number): ArrayLikeSequence<T>;
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
module ObjectLikeSequence {
function define(methodName: string[], overrides: Object): Function;
}
interface ObjectLikeSequence<T> extends Sequence<T> {
assign(other: Object): ObjectLikeSequence<T>;
// throws error
//async(): X;
defaults(defaults: Object): ObjectLikeSequence<T>;
functions(): Sequence<T>;
get(property: string): ObjectLikeSequence<T>;
invert(): ObjectLikeSequence<T>;
keys(): Sequence<string>;
omit(properties: string[]): ObjectLikeSequence<T>;
pairs(): Sequence<T>;
pick(properties: string[]): ObjectLikeSequence<T>;
toArray(): T[];
toObject(): Object;
values(): Sequence<T>;
}
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
module StringLikeSequence {
function define(methodName: string[], overrides: Object): Function;
}
interface StringLikeSequence extends SequenceBaser<string> {
charAt(index: number): string;
charCodeAt(index: number): number;
contains(value: string): boolean;
endsWith(suffix: string): boolean;
first(): string;
first(count: number): StringLikeSequence;
indexOf(substring: string, startIndex?: number): number;
last(): string;
last(count: number): StringLikeSequence;
lastIndexOf(substring: string, startIndex?: number): number;
mapString(mapFn: MapStringCallback): StringLikeSequence;
match(pattern: RegExp): StringLikeSequence;
reverse(): StringLikeSequence;
split(delimiter: string): StringLikeSequence;
split(delimiter: RegExp): StringLikeSequence;
startsWith(prefix: string): boolean;
substring(start: number, stop?: number): StringLikeSequence;
toLowerCase(): StringLikeSequence;
toUpperCase(): StringLikeSequence;
}
}
declare module 'lazy.js' {
export = Lazy;
}

64
build/lib/typings/minimatch.d.ts vendored Normal file
View File

@@ -0,0 +1,64 @@
// Type definitions for Minimatch 2.0.8
// Project: https://github.com/isaacs/minimatch
// Definitions by: vvakame <https://github.com/vvakame/>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
declare module "minimatch" {
function M(target: string, pattern: string, options?: M.IOptions): boolean;
namespace M {
function match(list: string[], pattern: string, options?: IOptions): string[];
function filter(pattern: string, options?: IOptions): (element: string, indexed: number, array: string[]) => boolean;
function makeRe(pattern: string, options?: IOptions): RegExp;
var Minimatch: IMinimatchStatic;
interface IOptions {
debug?: boolean;
nobrace?: boolean;
noglobstar?: boolean;
dot?: boolean;
noext?: boolean;
nocase?: boolean;
nonull?: boolean;
matchBase?: boolean;
nocomment?: boolean;
nonegate?: boolean;
flipNegate?: boolean;
}
interface IMinimatchStatic {
new (pattern: string, options?: IOptions): IMinimatch;
prototype: IMinimatch;
}
interface IMinimatch {
pattern: string;
options: IOptions;
/** 2-dimensional array of regexp or string expressions. */
set: any[][]; // (RegExp | string)[][]
regexp: RegExp;
negate: boolean;
comment: boolean;
empty: boolean;
makeRe(): RegExp; // regexp or boolean
match(fname: string): boolean;
matchOne(files: string[], pattern: string[], partial: boolean): boolean;
/** Deprecated. For internal use. */
debug(): void;
/** Deprecated. For internal use. */
make(): void;
/** Deprecated. For internal use. */
parseNegate(): void;
/** Deprecated. For internal use. */
braceExpand(pattern: string, options: IOptions): void;
/** Deprecated. For internal use. */
parse(pattern: string, isSub?: boolean): void;
}
}
export = M;
}

4
build/lib/typings/object-assign.d.ts vendored Normal file
View File

@@ -0,0 +1,4 @@
declare module 'object-assign' {
function fn(target: any, ...sources: any[]): any;
export = fn;
}

125
build/lib/typings/orchestrator.d.ts vendored Normal file
View File

@@ -0,0 +1,125 @@
// Type definitions for Orchestrator
// Project: https://github.com/orchestrator/orchestrator
// Definitions by: Qubo <https://github.com/tkQubo>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
declare type Strings = string|string[];
declare module "orchestrator" {
class Orchestrator {
add: Orchestrator.AddMethod;
/**
* Have you defined a task with this name?
* @param name The task name to query
*/
hasTask(name: string): boolean;
start: Orchestrator.StartMethod;
stop(): void;
/**
* Listen to orchestrator internals
* @param event Event name to listen to:
* <ul>
* <li>start: from start() method, shows you the task sequence
* <li>stop: from stop() method, the queue finished successfully
* <li>err: from stop() method, the queue was aborted due to a task error
* <li>task_start: from _runTask() method, task was started
* <li>task_stop: from _runTask() method, task completed successfully
* <li>task_err: from _runTask() method, task errored
* <li>task_not_found: from start() method, you're trying to start a task that doesn't exist
* <li>task_recursion: from start() method, there are recursive dependencies in your task list
* </ul>
* @param cb Passes single argument: e: event details
*/
on(event: string, cb: (e: Orchestrator.OnCallbackEvent) => any): Orchestrator;
/**
* Listen to all orchestrator events from one callback
* @param cb Passes single argument: e: event details
*/
onAll(cb: (e: Orchestrator.OnAllCallbackEvent) => any): void;
}
namespace Orchestrator {
interface AddMethodCallback {
/**
* Accept a callback
* @param callback
*/
(callback?: Function): any;
/**
* Return a promise
*/
(): Q.Promise<any>;
/**
* Return a stream: (task is marked complete when stream ends)
*/
(): any; //TODO: stream type should be here e.g. map-stream
}
/**
* Define a task
*/
interface AddMethod {
/**
* Define a task
* @param name The name of the task.
* @param deps An array of task names to be executed and completed before your task will run.
* @param fn The function that performs the task's operations. For asynchronous tasks, you need to provide a hint when the task is complete:
* <ul>
* <li>Take in a callback</li>
* <li>Return a stream or a promise</li>
* </ul>
*/
(name: string, deps?: string[], fn?: AddMethodCallback|Function): Orchestrator;
/**
* Define a task
* @param name The name of the task.
* @param fn The function that performs the task's operations. For asynchronous tasks, you need to provide a hint when the task is complete:
* <ul>
* <li>Take in a callback</li>
* <li>Return a stream or a promise</li>
* </ul>
*/
(name: string, fn?: AddMethodCallback|Function): Orchestrator;
}
/**
* Start running the tasks
*/
interface StartMethod {
/**
* Start running the tasks
* @param tasks Tasks to be executed. You may pass any number of tasks as individual arguments.
* @param cb Callback to call after run completed.
*/
(tasks: Strings, cb?: (error?: any) => any): Orchestrator;
/**
* Start running the tasks
* @param tasks Tasks to be executed. You may pass any number of tasks as individual arguments.
* @param cb Callback to call after run completed.
*/
(...tasks: Strings[]/*, cb?: (error: any) => any */): Orchestrator;
//TODO: TypeScript 1.5.3 cannot express varargs followed by callback as a last argument...
(task1: Strings, task2: Strings, cb?: (error?: any) => any): Orchestrator;
(task1: Strings, task2: Strings, task3: Strings, cb?: (error?: any) => any): Orchestrator;
(task1: Strings, task2: Strings, task3: Strings, task4: Strings, cb?: (error?: any) => any): Orchestrator;
(task1: Strings, task2: Strings, task3: Strings, task4: Strings, task5: Strings, cb?: (error?: any) => any): Orchestrator;
(task1: Strings, task2: Strings, task3: Strings, task4: Strings, task5: Strings, task6: Strings, cb?: (error?: any) => any): Orchestrator;
}
interface OnCallbackEvent {
message: string;
task: string;
err: any;
duration?: number;
}
interface OnAllCallbackEvent extends OnCallbackEvent {
src: string;
}
}
export = Orchestrator;
}

23
build/lib/typings/pump.d.ts vendored Normal file
View File

@@ -0,0 +1,23 @@
declare module 'pump' {
function f(
str1:NodeJS.WritableStream,
str2:NodeJS.WritableStream,
str3:NodeJS.WritableStream,
str4:NodeJS.WritableStream,
str5:NodeJS.WritableStream,
str6:NodeJS.WritableStream,
str7:NodeJS.WritableStream,
str8:NodeJS.WritableStream,
str9:NodeJS.WritableStream,
str10:NodeJS.WritableStream,
cb:(err:any)=>void
): NodeJS.WritableStream;
/**
* This is required as per:
* https://github.com/Microsoft/TypeScript/issues/5073
*/
namespace f {}
export = f;
}

17
build/lib/typings/rimraf.d.ts vendored Normal file
View File

@@ -0,0 +1,17 @@
// Type definitions for rimraf
// Project: https://github.com/isaacs/rimraf
// Definitions by: Carlos Ballesteros Velasco <https://github.com/soywiz>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
// Imported from: https://github.com/soywiz/typescript-node-definitions/rimraf.d.ts
declare module "rimraf" {
function rimraf(path: string, callback: (error: Error) => void): void;
function rimraf(path: string, opts:{}, callback: (error: Error) => void): void;
namespace rimraf {
export function sync(path: string): void;
export var EMFILE_MAX: number;
export var BUSYTRIES_MAX: number;
}
export = rimraf;
}

91
build/lib/typings/source-map.d.ts vendored Normal file
View File

@@ -0,0 +1,91 @@
// Type definitions for source-map v0.1.38
// Project: https://github.com/mozilla/source-map
// Definitions by: Morten Houston Ludvigsen <https://github.com/MortenHoustonLudvigsen>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
declare namespace SourceMap {
interface StartOfSourceMap {
file?: string;
sourceRoot?: string;
}
interface RawSourceMap extends StartOfSourceMap {
version: string|number;
sources: Array<string>;
names: Array<string>;
sourcesContent?: string[];
mappings: string;
}
interface Position {
line: number;
column: number;
}
interface MappedPosition extends Position {
source: string;
name?: string;
}
interface MappingItem {
source: string;
generatedLine: number;
generatedColumn: number;
originalLine: number;
originalColumn: number;
name: string;
}
interface Mapping {
generated: Position;
original: Position;
source: string;
name?: string;
}
interface CodeWithSourceMap {
code: string;
map: SourceMapGenerator;
}
class SourceMapConsumer {
public static GENERATED_ORDER: number;
public static ORIGINAL_ORDER: number;
constructor(rawSourceMap: RawSourceMap);
public originalPositionFor(generatedPosition: Position): MappedPosition;
public generatedPositionFor(originalPosition: MappedPosition): Position;
public sourceContentFor(source: string): string;
public eachMapping(callback: (mapping: MappingItem) => void, context?: any, order?: number): void;
}
class SourceMapGenerator {
constructor(startOfSourceMap?: StartOfSourceMap);
public static fromSourceMap(sourceMapConsumer: SourceMapConsumer): SourceMapGenerator;
public addMapping(mapping: Mapping): void;
public setSourceContent(sourceFile: string, sourceContent: string): void;
public applySourceMap(sourceMapConsumer: SourceMapConsumer, sourceFile?: string, sourceMapPath?: string): void;
public toString(): string;
public toJSON(): RawSourceMap;
}
class SourceNode {
constructor();
constructor(line: number, column: number, source: string);
constructor(line: number, column: number, source: string, chunk?: string, name?: string);
public static fromStringWithSourceMap(code: string, sourceMapConsumer: SourceMapConsumer, relativePath?: string): SourceNode;
public add(chunk: any): SourceNode;
public prepend(chunk: any): SourceNode;
public setSourceContent(sourceFile: string, sourceContent: string): void;
public walk(fn: (chunk: string, mapping: MappedPosition) => void): void;
public walkSourceContents(fn: (file: string, content: string) => void): void;
public join(sep: string): SourceNode;
public replaceRight(pattern: string, replacement: string): SourceNode;
public toString(): string;
public toStringWithSourceMap(startOfSourceMap?: StartOfSourceMap): CodeWithSourceMap;
}
}
declare module 'source-map' {
export = SourceMap;
}

22
build/lib/typings/through.d.ts vendored Normal file
View File

@@ -0,0 +1,22 @@
// Type definitions for through
// Project: https://github.com/dominictarr/through
// Definitions by: Andrew Gaspar <https://github.com/AndrewGaspar/>
// Definitions: https://github.com/borisyankov/DefinitelyTyped
declare module "through" {
import stream = require("stream");
function through(write?: (data:any) => void,
end?: () => void,
opts?: {
autoDestroy: boolean;
}): through.ThroughStream;
module through {
export interface ThroughStream extends stream.Transform {
autoDestroy: boolean;
}
}
export = through;
}

38
build/lib/typings/through2.d.ts vendored Normal file
View File

@@ -0,0 +1,38 @@
// Type definitions for through2 v 2.0.0
// Project: https://github.com/rvagg/through2
// Definitions by: Bart van der Schoor <https://github.com/Bartvds>, jedmao <https://github.com/jedmao>, Georgios Valotasios <https://github.com/valotas>, Ben Chauvette <https://github.com/bdchauvette>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
declare module 'through2' {
import stream = require('stream');
type TransformCallback = (err?: any, data?: any) => void;
type TransformFunction = (chunk: any, enc: string, callback: TransformCallback) => void;
type FlushCallback = (flushCallback: () => void) => void;
function through2(transform?: TransformFunction, flush?: FlushCallback): stream.Transform;
function through2(opts?: stream.DuplexOptions, transform?: TransformFunction, flush?: FlushCallback): stream.Transform;
namespace through2 {
export interface Through2Constructor extends stream.Transform {
new(opts?: stream.DuplexOptions): stream.Transform;
(opts?: stream.DuplexOptions): stream.Transform;
}
/**
* Convenvience method for creating object streams
*/
export function obj(transform?: TransformFunction, flush?: FlushCallback): stream.Transform;
/**
* Creates a constructor for a custom Transform. This is useful when you
* want to use the same transform logic in multiple instances.
*/
export function ctor(opts?: stream.DuplexOptions, transfrom?: TransformFunction, flush?: FlushCallback): Through2Constructor;
}
export = through2;
}

6086
build/lib/typings/underscore.d.ts vendored Normal file

File diff suppressed because it is too large Load Diff

112
build/lib/typings/vinyl.d.ts vendored Normal file
View File

@@ -0,0 +1,112 @@
// Type definitions for vinyl 0.4.3
// Project: https://github.com/wearefractal/vinyl
// Definitions by: vvakame <https://github.com/vvakame/>, jedmao <https://github.com/jedmao>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
declare module "vinyl" {
import fs = require("fs");
/**
* A virtual file format.
*/
class File {
constructor(options?: {
/**
* Default: process.cwd()
*/
cwd?: string;
/**
* Used for relative pathing. Typically where a glob starts.
*/
base?: string;
/**
* Full path to the file.
*/
path?: string;
/**
* Path history. Has no effect if options.path is passed.
*/
history?: string[];
/**
* The result of an fs.stat call. See fs.Stats for more information.
*/
stat?: fs.Stats;
/**
* File contents.
* Type: Buffer, Stream, or null
*/
contents?: Buffer | NodeJS.ReadWriteStream;
});
/**
* Default: process.cwd()
*/
public cwd: string;
/**
* Used for relative pathing. Typically where a glob starts.
*/
public base: string;
/**
* Full path to the file.
*/
public path: string;
public stat: fs.Stats;
/**
* Type: Buffer|Stream|null (Default: null)
*/
public contents: Buffer | NodeJS.ReadableStream;
/**
* Returns path.relative for the file base and file path.
* Example:
* var file = new File({
* cwd: "/",
* base: "/test/",
* path: "/test/file.js"
* });
* console.log(file.relative); // file.js
*/
public relative: string;
public isBuffer(): boolean;
public isStream(): boolean;
public isNull(): boolean;
public isDirectory(): boolean;
/**
* Returns a new File object with all attributes cloned. Custom attributes are deep-cloned.
*/
public clone(opts?: { contents?: boolean }): File;
/**
* If file.contents is a Buffer, it will write it to the stream.
* If file.contents is a Stream, it will pipe it to the stream.
* If file.contents is null, it will do nothing.
*/
public pipe<T extends NodeJS.ReadWriteStream>(
stream: T,
opts?: {
/**
* If false, the destination stream will not be ended (same as node core).
*/
end?: boolean;
}): T;
/**
* Returns a pretty String interpretation of the File. Useful for console.log.
*/
public inspect(): string;
}
/**
* This is required as per:
* https://github.com/Microsoft/TypeScript/issues/5073
*/
namespace File {}
export = File;
}

213
build/lib/util.js Normal file
View File

@@ -0,0 +1,213 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
var es = require("event-stream");
var debounce = require("debounce");
var _filter = require("gulp-filter");
var rename = require("gulp-rename");
var _ = require("underscore");
var path = require("path");
var fs = require("fs");
var _rimraf = require("rimraf");
var git = require("./git");
var VinylFile = require("vinyl");
var NoCancellationToken = { isCancellationRequested: function () { return false; } };
function incremental(streamProvider, initial, supportsCancellation) {
var input = es.through();
var output = es.through();
var state = 'idle';
var buffer = Object.create(null);
var token = !supportsCancellation ? null : { isCancellationRequested: function () { return Object.keys(buffer).length > 0; } };
var run = function (input, isCancellable) {
state = 'running';
var stream = !supportsCancellation ? streamProvider() : streamProvider(isCancellable ? token : NoCancellationToken);
input
.pipe(stream)
.pipe(es.through(null, function () {
state = 'idle';
eventuallyRun();
}))
.pipe(output);
};
if (initial) {
run(initial, false);
}
var eventuallyRun = debounce(function () {
var paths = Object.keys(buffer);
if (paths.length === 0) {
return;
}
var data = paths.map(function (path) { return buffer[path]; });
buffer = Object.create(null);
run(es.readArray(data), true);
}, 500);
input.on('data', function (f) {
buffer[f.path] = f;
if (state === 'idle') {
eventuallyRun();
}
});
return es.duplex(input, output);
}
exports.incremental = incremental;
function fixWin32DirectoryPermissions() {
if (!/win32/.test(process.platform)) {
return es.through();
}
return es.mapSync(function (f) {
if (f.stat && f.stat.isDirectory && f.stat.isDirectory()) {
f.stat.mode = 16877;
}
return f;
});
}
exports.fixWin32DirectoryPermissions = fixWin32DirectoryPermissions;
function setExecutableBit(pattern) {
var setBit = es.mapSync(function (f) {
f.stat.mode = 33261;
return f;
});
if (!pattern) {
return setBit;
}
var input = es.through();
var filter = _filter(pattern, { restore: true });
var output = input
.pipe(filter)
.pipe(setBit)
.pipe(filter.restore);
return es.duplex(input, output);
}
exports.setExecutableBit = setExecutableBit;
function toFileUri(filePath) {
var match = filePath.match(/^([a-z])\:(.*)$/i);
if (match) {
filePath = '/' + match[1].toUpperCase() + ':' + match[2];
}
return 'file://' + filePath.replace(/\\/g, '/');
}
exports.toFileUri = toFileUri;
function skipDirectories() {
return es.mapSync(function (f) {
if (!f.isDirectory()) {
return f;
}
});
}
exports.skipDirectories = skipDirectories;
function cleanNodeModule(name, excludes, includes) {
var toGlob = function (path) { return '**/node_modules/' + name + (path ? '/' + path : ''); };
var negate = function (str) { return '!' + str; };
var allFilter = _filter(toGlob('**'), { restore: true });
var globs = [toGlob('**')].concat(excludes.map(_.compose(negate, toGlob)));
var input = es.through();
var nodeModuleInput = input.pipe(allFilter);
var output = nodeModuleInput.pipe(_filter(globs));
if (includes) {
var includeGlobs = includes.map(toGlob);
output = es.merge(output, nodeModuleInput.pipe(_filter(includeGlobs)));
}
output = output.pipe(allFilter.restore);
return es.duplex(input, output);
}
exports.cleanNodeModule = cleanNodeModule;
function loadSourcemaps() {
var input = es.through();
var output = input
.pipe(es.map(function (f, cb) {
if (f.sourceMap) {
cb(null, f);
return;
}
if (!f.contents) {
cb(new Error('empty file'));
return;
}
var contents = f.contents.toString('utf8');
var reg = /\/\/# sourceMappingURL=(.*)$/g;
var lastMatch = null, match = null;
while (match = reg.exec(contents)) {
lastMatch = match;
}
if (!lastMatch) {
f.sourceMap = {
version: 3,
names: [],
mappings: '',
sources: [f.relative.replace(/\//g, '/')],
sourcesContent: [contents]
};
cb(null, f);
return;
}
f.contents = new Buffer(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', function (err, contents) {
if (err) {
return cb(err);
}
f.sourceMap = JSON.parse(contents);
cb(null, f);
});
}));
return es.duplex(input, output);
}
exports.loadSourcemaps = loadSourcemaps;
function stripSourceMappingURL() {
var input = es.through();
var output = input
.pipe(es.mapSync(function (f) {
var contents = f.contents.toString('utf8');
f.contents = new Buffer(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
return f;
}));
return es.duplex(input, output);
}
exports.stripSourceMappingURL = stripSourceMappingURL;
function rimraf(dir) {
var retries = 0;
var retry = function (cb) {
_rimraf(dir, { maxBusyTries: 1 }, function (err) {
if (!err) {
return cb();
}
;
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
return setTimeout(function () { return retry(cb); }, 10);
}
return cb(err);
});
};
return function (cb) { return retry(cb); };
}
exports.rimraf = rimraf;
function getVersion(root) {
var version = process.env['BUILD_SOURCEVERSION'];
if (!version || !/^[0-9a-f]{40}$/i.test(version)) {
version = git.getVersion(root);
}
return version;
}
exports.getVersion = getVersion;
function rebase(count) {
return rename(function (f) {
var parts = f.dirname.split(/[\/\\]/);
f.dirname = parts.slice(count).join(path.sep);
});
}
exports.rebase = rebase;
function filter(fn) {
var result = es.through(function (data) {
if (fn(data)) {
this.emit('data', data);
}
else {
result.restore.push(data);
}
});
result.restore = es.through();
return result;
}
exports.filter = filter;

271
build/lib/util.ts Normal file
View File

@@ -0,0 +1,271 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as es from 'event-stream';
import * as debounce from 'debounce';
import * as _filter from 'gulp-filter';
import * as rename from 'gulp-rename';
import * as _ from 'underscore';
import * as path from 'path';
import * as fs from 'fs';
import * as _rimraf from 'rimraf';
import * as git from './git';
import * as VinylFile from 'vinyl';
import { ThroughStream } from 'through';
import * as sm from 'source-map';
export interface ICancellationToken {
isCancellationRequested(): boolean;
}
const NoCancellationToken: ICancellationToken = { isCancellationRequested: () => false };
export interface IStreamProvider {
(cancellationToken?: ICancellationToken): NodeJS.ReadWriteStream;
}
export function incremental(streamProvider: IStreamProvider, initial: NodeJS.ReadWriteStream, supportsCancellation: boolean): NodeJS.ReadWriteStream {
const input = es.through();
const output = es.through();
let state = 'idle';
let buffer = Object.create(null);
const token: ICancellationToken = !supportsCancellation ? null : { isCancellationRequested: () => Object.keys(buffer).length > 0 };
const run = (input, isCancellable) => {
state = 'running';
const stream = !supportsCancellation ? streamProvider() : streamProvider(isCancellable ? token : NoCancellationToken);
input
.pipe(stream)
.pipe(es.through(null, () => {
state = 'idle';
eventuallyRun();
}))
.pipe(output);
};
if (initial) {
run(initial, false);
}
const eventuallyRun = debounce(() => {
const paths = Object.keys(buffer);
if (paths.length === 0) {
return;
}
const data = paths.map(path => buffer[path]);
buffer = Object.create(null);
run(es.readArray(data), true);
}, 500);
input.on('data', (f: any) => {
buffer[f.path] = f;
if (state === 'idle') {
eventuallyRun();
}
});
return es.duplex(input, output);
}
export function fixWin32DirectoryPermissions(): NodeJS.ReadWriteStream {
if (!/win32/.test(process.platform)) {
return es.through();
}
return es.mapSync<VinylFile, VinylFile>(f => {
if (f.stat && f.stat.isDirectory && f.stat.isDirectory()) {
f.stat.mode = 16877;
}
return f;
});
}
export function setExecutableBit(pattern: string | string[]): NodeJS.ReadWriteStream {
var setBit = es.mapSync<VinylFile, VinylFile>(f => {
f.stat.mode = /* 100755 */ 33261;
return f;
});
if (!pattern) {
return setBit;
}
var input = es.through();
var filter = _filter(pattern, { restore: true });
var output = input
.pipe(filter)
.pipe(setBit)
.pipe(filter.restore);
return es.duplex(input, output);
}
export function toFileUri(filePath: string): string {
const match = filePath.match(/^([a-z])\:(.*)$/i);
if (match) {
filePath = '/' + match[1].toUpperCase() + ':' + match[2];
}
return 'file://' + filePath.replace(/\\/g, '/');
}
export function skipDirectories(): NodeJS.ReadWriteStream {
return es.mapSync<VinylFile, VinylFile>(f => {
if (!f.isDirectory()) {
return f;
}
});
}
export function cleanNodeModule(name: string, excludes: string[], includes: string[]): NodeJS.ReadWriteStream {
const toGlob = (path: string) => '**/node_modules/' + name + (path ? '/' + path : '');
const negate = (str: string) => '!' + str;
const allFilter = _filter(toGlob('**'), { restore: true });
const globs = [toGlob('**')].concat(excludes.map(_.compose(negate, toGlob)));
const input = es.through();
const nodeModuleInput = input.pipe(allFilter);
let output: NodeJS.ReadWriteStream = nodeModuleInput.pipe(_filter(globs));
if (includes) {
const includeGlobs = includes.map(toGlob);
output = es.merge(output, nodeModuleInput.pipe(_filter(includeGlobs)));
}
output = output.pipe(allFilter.restore);
return es.duplex(input, output);
}
declare class FileSourceMap extends VinylFile {
public sourceMap: sm.RawSourceMap;
}
export function loadSourcemaps(): NodeJS.ReadWriteStream {
const input = es.through();
const output = input
.pipe(es.map<FileSourceMap, FileSourceMap>((f, cb): FileSourceMap => {
if (f.sourceMap) {
cb(null, f);
return;
}
if (!f.contents) {
cb(new Error('empty file'));
return;
}
const contents = (<Buffer>f.contents).toString('utf8');
const reg = /\/\/# sourceMappingURL=(.*)$/g;
let lastMatch = null, match = null;
while (match = reg.exec(contents)) {
lastMatch = match;
}
if (!lastMatch) {
f.sourceMap = {
version: 3,
names: [],
mappings: '',
sources: [f.relative.replace(/\//g, '/')],
sourcesContent: [contents]
};
cb(null, f);
return;
}
f.contents = new Buffer(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', (err, contents) => {
if (err) { return cb(err); }
f.sourceMap = JSON.parse(contents);
cb(null, f);
});
}));
return es.duplex(input, output);
}
export function stripSourceMappingURL(): NodeJS.ReadWriteStream {
const input = es.through();
const output = input
.pipe(es.mapSync<VinylFile, VinylFile>(f => {
const contents = (<Buffer>f.contents).toString('utf8');
f.contents = new Buffer(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
return f;
}));
return es.duplex(input, output);
}
export function rimraf(dir: string): (cb: any) => void {
let retries = 0;
const retry = cb => {
_rimraf(dir, { maxBusyTries: 1 }, (err: any) => {
if (!err) {
return cb();
};
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
return setTimeout(() => retry(cb), 10);
}
return cb(err);
});
};
return cb => retry(cb);
}
export function getVersion(root: string): string {
let version = process.env['BUILD_SOURCEVERSION'];
if (!version || !/^[0-9a-f]{40}$/i.test(version)) {
version = git.getVersion(root);
}
return version;
}
export function rebase(count: number): NodeJS.ReadWriteStream {
return rename(f => {
const parts = f.dirname.split(/[\/\\]/);
f.dirname = parts.slice(count).join(path.sep);
});
}
export interface FilterStream extends NodeJS.ReadWriteStream {
restore: ThroughStream;
}
export function filter(fn: (data: any) => boolean): FilterStream {
const result = <FilterStream><any>es.through(function (data) {
if (fn(data)) {
this.emit('data', data);
} else {
result.restore.push(data);
}
});
result.restore = es.through();
return result;
}

38
build/lib/watch/index.js Normal file
View File

@@ -0,0 +1,38 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const es = require('event-stream');
/** Ugly hack for gulp-tsb */
function handleDeletions() {
return es.mapSync(f => {
if (/\.ts$/.test(f.relative) && !f.contents) {
f.contents = new Buffer('');
f.stat = { mtime: new Date() };
}
return f;
});
}
let watch = void 0;
if (!process.env['VSCODE_USE_LEGACY_WATCH']) {
try {
watch = require('./watch-nsfw');
} catch (err) {
console.warn('Could not load our cross platform file watcher: ' + err.toString());
console.warn('Falling back to our platform specific watcher...');
}
}
if (!watch) {
watch = process.platform === 'win32' ? require('./watch-win32') : require('gulp-watch');
}
module.exports = function () {
return watch.apply(null, arguments)
.pipe(handleDeletions());
};

View File

@@ -0,0 +1,11 @@
{
"name": "watch",
"version": "1.0.0",
"description": "",
"author": "Microsoft ",
"private": true,
"devDependencies": {
"gulp-watch": "^4.3.9",
"nsfw": "^1.0.15"
}
}

View File

@@ -0,0 +1,94 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var nsfw = require('nsfw');
var path = require('path');
var fs = require('fs');
var File = require('vinyl');
var es = require('event-stream');
var filter = require('gulp-filter');
function toChangeType(type) {
switch (type) {
case 0: return 'add';
case 1: return 'unlink';
case 2: return 'change';
}
}
function watch(root) {
var result = es.through();
function handleEvent(path, type) {
if (/[/\\].git[/\\]/.test(path) || /[/\\]out[/\\]/.test(path)) {
return; // filter as early as possible
}
var file = new File({
path: path,
base: root
});
file.event = type;
result.emit('data', file);
}
nsfw(root, function(events) {
for (var i = 0; i < events.length; i++) {
var e = events[i];
var changeType = e.action;
if (changeType === 3 /* RENAMED */) {
handleEvent(path.join(e.directory, e.oldFile), 'unlink');
handleEvent(path.join(e.directory, e.newFile), 'add');
} else {
handleEvent(path.join(e.directory, e.file), toChangeType(changeType));
}
}
}).then(function(watcher) {
watcher.start();
});
return result;
}
var cache = Object.create(null);
module.exports = function(pattern, options) {
options = options || {};
var cwd = path.normalize(options.cwd || process.cwd());
var watcher = cache[cwd];
if (!watcher) {
watcher = cache[cwd] = watch(cwd);
}
var rebase = !options.base ? es.through() : es.mapSync(function(f) {
f.base = options.base;
return f;
});
return watcher
.pipe(filter(['**', '!.git{,/**}'])) // ignore all things git
.pipe(filter(pattern))
.pipe(es.map(function(file, cb) {
fs.stat(file.path, function(err, stat) {
if (err && err.code === 'ENOENT') { return cb(null, file); }
if (err) { return cb(); }
if (!stat.isFile()) { return cb(); }
fs.readFile(file.path, function(err, contents) {
if (err && err.code === 'ENOENT') { return cb(null, file); }
if (err) { return cb(); }
file.contents = contents;
file.stat = stat;
cb(null, file);
});
});
}))
.pipe(rebase);
};

View File

@@ -0,0 +1,108 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
var path = require('path');
var cp = require('child_process');
var fs = require('fs');
var File = require('vinyl');
var es = require('event-stream');
var filter = require('gulp-filter');
var watcherPath = path.join(__dirname, 'watcher.exe');
function toChangeType(type) {
switch (type) {
case '0': return 'change';
case '1': return 'add';
default: return 'unlink';
}
}
function watch(root) {
var result = es.through();
var child = cp.spawn(watcherPath, [root]);
child.stdout.on('data', function(data) {
var lines = data.toString('utf8').split('\n');
for (var i = 0; i < lines.length; i++) {
var line = lines[i].trim();
if (line.length === 0) {
continue;
}
var changeType = line[0];
var changePath = line.substr(2);
// filter as early as possible
if (/^\.git/.test(changePath) || /(^|\\)out($|\\)/.test(changePath)) {
continue;
}
var changePathFull = path.join(root, changePath);
var file = new File({
path: changePathFull,
base: root
});
file.event = toChangeType(changeType);
result.emit('data', file);
}
});
child.stderr.on('data', function(data) {
result.emit('error', data);
});
child.on('exit', function(code) {
result.emit('error', 'Watcher died with code ' + code);
child = null;
});
process.once('SIGTERM', function () { process.exit(0); });
process.once('SIGTERM', function () { process.exit(0); });
process.once('exit', function () { child && child.kill(); });
return result;
}
var cache = Object.create(null);
module.exports = function(pattern, options) {
options = options || {};
var cwd = path.normalize(options.cwd || process.cwd());
var watcher = cache[cwd];
if (!watcher) {
watcher = cache[cwd] = watch(cwd);
}
var rebase = !options.base ? es.through() : es.mapSync(function (f) {
f.base = options.base;
return f;
});
return watcher
.pipe(filter(['**', '!.git{,/**}'])) // ignore all things git
.pipe(filter(pattern))
.pipe(es.map(function (file, cb) {
fs.stat(file.path, function (err, stat) {
if (err && err.code === 'ENOENT') { return cb(null, file); }
if (err) { return cb(); }
if (!stat.isFile()) { return cb(); }
fs.readFile(file.path, function (err, contents) {
if (err && err.code === 'ENOENT') { return cb(null, file); }
if (err) { return cb(); }
file.contents = contents;
file.stat = stat;
cb(null, file);
});
});
}))
.pipe(rebase);
};

BIN
build/lib/watch/watcher.exe Normal file

Binary file not shown.

View File

@@ -0,0 +1,14 @@
# monaco-editor-core
> This npm module is a building block for the [monaco-editor](https://www.npmjs.com/package/monaco-editor)
npm module and unless you are doing something special (e.g. authoring a monaco editor language that can be shipped
and consumed independently), it is best to consume the [monaco-editor](https://www.npmjs.com/package/monaco-editor) module
that contains this module and adds languages supports.
The Monaco Editor is the code editor that powers [VS Code](https://github.com/Microsoft/vscode),
a good page describing the code editor's features is [here](https://code.visualstudio.com/docs/editor/editingevolved).
This npm module contains the core editor functionality, as it comes from the [vscode repository](https://github.com/Microsoft/vscode).
## License
[Source EULA](https://github.com/Microsoft/sqlopsstudio/blob/master/LICENSE.txt)

20
build/monaco/README.md Normal file
View File

@@ -0,0 +1,20 @@
# Steps to publish a new version of monaco-editor-core
## Generate monaco.d.ts
* The `monaco.d.ts` is now automatically generated when running `gulp watch`
## Bump version
* increase version in `build/monaco/package.json`
## Generate npm contents for monaco-editor-core
* Be sure to have all changes committed **and pushed to the remote**
* (the generated files contain the HEAD sha and that should be available on the remote)
* run gulp editor-distro
## Publish
* `cd out-monaco-editor-core`
* `npm publish`

View File

@@ -0,0 +1,85 @@
THIRD-PARTY SOFTWARE NOTICES AND INFORMATION
Do Not Translate or Localize
This project incorporates components from the projects listed below. The original copyright notices and the licenses
under which Microsoft received such components are set forth below. Microsoft reserves all rights not expressly granted
herein, whether by implication, estoppel or otherwise.
%% winjs version 4.4.0 (https://github.com/winjs/winjs)
=========================================
WinJS
Copyright (c) Microsoft Corporation
All rights reserved.
Source EULA
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the ""Software""), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF winjs NOTICES AND INFORMATION
%% string_scorer version 0.1.20 (https://github.com/joshaven/string_score)
=========================================
This software is released under the Source EULA:
Copyright (c) Joshaven Potter
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF string_scorer NOTICES AND INFORMATION
%% chjj-marked NOTICES AND INFORMATION BEGIN HERE
=========================================
The Source EULA License
Copyright (c) 2011-2014, Christopher Jeffrey (https://github.com/chjj/)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
=========================================
END OF chjj-marked NOTICES AND INFORMATION

327
build/monaco/api.js Normal file
View File

@@ -0,0 +1,327 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
var fs = require("fs");
var ts = require("typescript");
var path = require("path");
var tsfmt = require('../../tsfmt.json');
var util = require('gulp-util');
function log(message) {
var rest = [];
for (var _i = 1; _i < arguments.length; _i++) {
rest[_i - 1] = arguments[_i];
}
util.log.apply(util, [util.colors.cyan('[monaco.d.ts]'), message].concat(rest));
}
var SRC = path.join(__dirname, '../../src');
var OUT_ROOT = path.join(__dirname, '../../');
var RECIPE_PATH = path.join(__dirname, './monaco.d.ts.recipe');
var DECLARATION_PATH = path.join(__dirname, '../../src/vs/monaco.d.ts');
var CURRENT_PROCESSING_RULE = '';
function logErr(message) {
var rest = [];
for (var _i = 1; _i < arguments.length; _i++) {
rest[_i - 1] = arguments[_i];
}
util.log(util.colors.red('[monaco.d.ts]'), 'WHILE HANDLING RULE: ', CURRENT_PROCESSING_RULE);
util.log.apply(util, [util.colors.red('[monaco.d.ts]'), message].concat(rest));
}
function moduleIdToPath(out, moduleId) {
if (/\.d\.ts/.test(moduleId)) {
return path.join(SRC, moduleId);
}
return path.join(OUT_ROOT, out, moduleId) + '.d.ts';
}
var SOURCE_FILE_MAP = {};
function getSourceFile(out, inputFiles, moduleId) {
if (!SOURCE_FILE_MAP[moduleId]) {
var filePath = path.normalize(moduleIdToPath(out, moduleId));
if (!inputFiles.hasOwnProperty(filePath)) {
logErr('CANNOT FIND FILE ' + filePath + '. YOU MIGHT NEED TO RESTART gulp');
return null;
}
var fileContents = inputFiles[filePath];
var sourceFile = ts.createSourceFile(filePath, fileContents, ts.ScriptTarget.ES5);
SOURCE_FILE_MAP[moduleId] = sourceFile;
}
return SOURCE_FILE_MAP[moduleId];
}
function isDeclaration(a) {
return (a.kind === ts.SyntaxKind.InterfaceDeclaration
|| a.kind === ts.SyntaxKind.EnumDeclaration
|| a.kind === ts.SyntaxKind.ClassDeclaration
|| a.kind === ts.SyntaxKind.TypeAliasDeclaration
|| a.kind === ts.SyntaxKind.FunctionDeclaration
|| a.kind === ts.SyntaxKind.ModuleDeclaration);
}
function visitTopLevelDeclarations(sourceFile, visitor) {
var stop = false;
var visit = function (node) {
if (stop) {
return;
}
switch (node.kind) {
case ts.SyntaxKind.InterfaceDeclaration:
case ts.SyntaxKind.EnumDeclaration:
case ts.SyntaxKind.ClassDeclaration:
case ts.SyntaxKind.VariableStatement:
case ts.SyntaxKind.TypeAliasDeclaration:
case ts.SyntaxKind.FunctionDeclaration:
case ts.SyntaxKind.ModuleDeclaration:
stop = visitor(node);
}
// if (node.kind !== ts.SyntaxKind.SourceFile) {
// if (getNodeText(sourceFile, node).indexOf('SymbolKind') >= 0) {
// console.log('FOUND TEXT IN NODE: ' + ts.SyntaxKind[node.kind]);
// console.log(getNodeText(sourceFile, node));
// }
// }
if (stop) {
return;
}
ts.forEachChild(node, visit);
};
visit(sourceFile);
}
function getAllTopLevelDeclarations(sourceFile) {
var all = [];
visitTopLevelDeclarations(sourceFile, function (node) {
if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) {
var interfaceDeclaration = node;
var triviaStart = interfaceDeclaration.pos;
var triviaEnd = interfaceDeclaration.name.pos;
var triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd });
// // let nodeText = getNodeText(sourceFile, node);
// if (getNodeText(sourceFile, node).indexOf('SymbolKind') >= 0) {
// console.log('TRIVIA: ', triviaText);
// }
if (triviaText.indexOf('@internal') === -1) {
all.push(node);
}
}
else {
var nodeText = getNodeText(sourceFile, node);
if (nodeText.indexOf('@internal') === -1) {
all.push(node);
}
}
return false /*continue*/;
});
return all;
}
function getTopLevelDeclaration(sourceFile, typeName) {
var result = null;
visitTopLevelDeclarations(sourceFile, function (node) {
if (isDeclaration(node)) {
if (node.name.text === typeName) {
result = node;
return true /*stop*/;
}
return false /*continue*/;
}
// node is ts.VariableStatement
if (getNodeText(sourceFile, node).indexOf(typeName) >= 0) {
result = node;
return true /*stop*/;
}
return false /*continue*/;
});
return result;
}
function getNodeText(sourceFile, node) {
return sourceFile.getFullText().substring(node.pos, node.end);
}
function getMassagedTopLevelDeclarationText(sourceFile, declaration) {
var result = getNodeText(sourceFile, declaration);
// if (result.indexOf('MonacoWorker') >= 0) {
// console.log('here!');
// // console.log(ts.SyntaxKind[declaration.kind]);
// }
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
var interfaceDeclaration = declaration;
var members = interfaceDeclaration.members;
members.forEach(function (member) {
try {
var memberText = getNodeText(sourceFile, member);
if (memberText.indexOf('@internal') >= 0 || memberText.indexOf('private') >= 0) {
// console.log('BEFORE: ', result);
result = result.replace(memberText, '');
// console.log('AFTER: ', result);
}
}
catch (err) {
// life..
}
});
}
result = result.replace(/export default/g, 'export');
result = result.replace(/export declare/g, 'export');
return result;
}
function format(text) {
// Parse the source text
var sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
// Get the formatting edits on the input sources
var edits = ts.formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt);
// Apply the edits on the input code
return applyEdits(text, edits);
function getRuleProvider(options) {
// Share this between multiple formatters using the same options.
// This represents the bulk of the space the formatter uses.
var ruleProvider = new ts.formatting.RulesProvider();
ruleProvider.ensureUpToDate(options);
return ruleProvider;
}
function applyEdits(text, edits) {
// Apply edits in reverse on the existing text
var result = text;
for (var i = edits.length - 1; i >= 0; i--) {
var change = edits[i];
var head = result.slice(0, change.span.start);
var tail = result.slice(change.span.start + change.span.length);
result = head + change.newText + tail;
}
return result;
}
}
function createReplacer(data) {
data = data || '';
var rawDirectives = data.split(';');
var directives = [];
rawDirectives.forEach(function (rawDirective) {
if (rawDirective.length === 0) {
return;
}
var pieces = rawDirective.split('=>');
var findStr = pieces[0];
var replaceStr = pieces[1];
findStr = findStr.replace(/[\-\\\{\}\*\+\?\|\^\$\.\,\[\]\(\)\#\s]/g, '\\$&');
findStr = '\\b' + findStr + '\\b';
directives.push([new RegExp(findStr, 'g'), replaceStr]);
});
return function (str) {
for (var i = 0; i < directives.length; i++) {
str = str.replace(directives[i][0], directives[i][1]);
}
return str;
};
}
function generateDeclarationFile(out, inputFiles, recipe) {
var lines = recipe.split(/\r\n|\n|\r/);
var result = [];
lines.forEach(function (line) {
var m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m1) {
CURRENT_PROCESSING_RULE = line;
var moduleId = m1[1];
var sourceFile_1 = getSourceFile(out, inputFiles, moduleId);
if (!sourceFile_1) {
return;
}
var replacer_1 = createReplacer(m1[2]);
var typeNames = m1[3].split(/,/);
typeNames.forEach(function (typeName) {
typeName = typeName.trim();
if (typeName.length === 0) {
return;
}
var declaration = getTopLevelDeclaration(sourceFile_1, typeName);
if (!declaration) {
logErr('Cannot find type ' + typeName);
return;
}
result.push(replacer_1(getMassagedTopLevelDeclarationText(sourceFile_1, declaration)));
});
return;
}
var m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m2) {
CURRENT_PROCESSING_RULE = line;
var moduleId = m2[1];
var sourceFile_2 = getSourceFile(out, inputFiles, moduleId);
if (!sourceFile_2) {
return;
}
var replacer_2 = createReplacer(m2[2]);
var typeNames = m2[3].split(/,/);
var typesToExcludeMap_1 = {};
var typesToExcludeArr_1 = [];
typeNames.forEach(function (typeName) {
typeName = typeName.trim();
if (typeName.length === 0) {
return;
}
typesToExcludeMap_1[typeName] = true;
typesToExcludeArr_1.push(typeName);
});
getAllTopLevelDeclarations(sourceFile_2).forEach(function (declaration) {
if (isDeclaration(declaration)) {
if (typesToExcludeMap_1[declaration.name.text]) {
return;
}
}
else {
// node is ts.VariableStatement
var nodeText = getNodeText(sourceFile_2, declaration);
for (var i = 0; i < typesToExcludeArr_1.length; i++) {
if (nodeText.indexOf(typesToExcludeArr_1[i]) >= 0) {
return;
}
}
}
result.push(replacer_2(getMassagedTopLevelDeclarationText(sourceFile_2, declaration)));
});
return;
}
result.push(line);
});
var resultTxt = result.join('\n');
resultTxt = resultTxt.replace(/\bURI\b/g, 'Uri');
resultTxt = resultTxt.replace(/\bEvent</g, 'IEvent<');
resultTxt = resultTxt.replace(/\bTPromise</g, 'Promise<');
resultTxt = format(resultTxt);
resultTxt = resultTxt.replace(/\r\n/g, '\n');
return resultTxt;
}
function getFilesToWatch(out) {
var recipe = fs.readFileSync(RECIPE_PATH).toString();
var lines = recipe.split(/\r\n|\n|\r/);
var result = [];
lines.forEach(function (line) {
var m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m1) {
var moduleId = m1[1];
result.push(moduleIdToPath(out, moduleId));
return;
}
var m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m2) {
var moduleId = m2[1];
result.push(moduleIdToPath(out, moduleId));
return;
}
});
return result;
}
exports.getFilesToWatch = getFilesToWatch;
function run(out, inputFiles) {
log('Starting monaco.d.ts generation');
SOURCE_FILE_MAP = {};
var recipe = fs.readFileSync(RECIPE_PATH).toString();
var result = generateDeclarationFile(out, inputFiles, recipe);
var currentContent = fs.readFileSync(DECLARATION_PATH).toString();
log('Finished monaco.d.ts generation');
return {
content: result,
filePath: DECLARATION_PATH,
isTheSame: currentContent === result
};
}
exports.run = run;
function complainErrors() {
logErr('Not running monaco.d.ts generation due to compile errors');
}
exports.complainErrors = complainErrors;

382
build/monaco/api.ts Normal file
View File

@@ -0,0 +1,382 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import fs = require('fs');
import ts = require('typescript');
import path = require('path');
const tsfmt = require('../../tsfmt.json');
var util = require('gulp-util');
function log(message: any, ...rest: any[]): void {
util.log(util.colors.cyan('[monaco.d.ts]'), message, ...rest);
}
const SRC = path.join(__dirname, '../../src');
const OUT_ROOT = path.join(__dirname, '../../');
const RECIPE_PATH = path.join(__dirname, './monaco.d.ts.recipe');
const DECLARATION_PATH = path.join(__dirname, '../../src/vs/monaco.d.ts');
var CURRENT_PROCESSING_RULE = '';
function logErr(message: any, ...rest: any[]): void {
util.log(util.colors.red('[monaco.d.ts]'), 'WHILE HANDLING RULE: ', CURRENT_PROCESSING_RULE);
util.log(util.colors.red('[monaco.d.ts]'), message, ...rest);
}
function moduleIdToPath(out:string, moduleId:string): string {
if (/\.d\.ts/.test(moduleId)) {
return path.join(SRC, moduleId);
}
return path.join(OUT_ROOT, out, moduleId) + '.d.ts';
}
let SOURCE_FILE_MAP: {[moduleId:string]:ts.SourceFile;} = {};
function getSourceFile(out:string, inputFiles: { [file: string]: string; }, moduleId:string): ts.SourceFile {
if (!SOURCE_FILE_MAP[moduleId]) {
let filePath = path.normalize(moduleIdToPath(out, moduleId));
if (!inputFiles.hasOwnProperty(filePath)) {
logErr('CANNOT FIND FILE ' + filePath + '. YOU MIGHT NEED TO RESTART gulp');
return null;
}
let fileContents = inputFiles[filePath];
let sourceFile = ts.createSourceFile(filePath, fileContents, ts.ScriptTarget.ES5);
SOURCE_FILE_MAP[moduleId] = sourceFile;
}
return SOURCE_FILE_MAP[moduleId];
}
type TSTopLevelDeclaration = ts.InterfaceDeclaration | ts.EnumDeclaration | ts.ClassDeclaration | ts.TypeAliasDeclaration | ts.FunctionDeclaration | ts.ModuleDeclaration;
type TSTopLevelDeclare = TSTopLevelDeclaration | ts.VariableStatement;
function isDeclaration(a:TSTopLevelDeclare): a is TSTopLevelDeclaration {
return (
a.kind === ts.SyntaxKind.InterfaceDeclaration
|| a.kind === ts.SyntaxKind.EnumDeclaration
|| a.kind === ts.SyntaxKind.ClassDeclaration
|| a.kind === ts.SyntaxKind.TypeAliasDeclaration
|| a.kind === ts.SyntaxKind.FunctionDeclaration
|| a.kind === ts.SyntaxKind.ModuleDeclaration
);
}
function visitTopLevelDeclarations(sourceFile:ts.SourceFile, visitor:(node:TSTopLevelDeclare)=>boolean): void {
let stop = false;
let visit = (node: ts.Node): void => {
if (stop) {
return;
}
switch (node.kind) {
case ts.SyntaxKind.InterfaceDeclaration:
case ts.SyntaxKind.EnumDeclaration:
case ts.SyntaxKind.ClassDeclaration:
case ts.SyntaxKind.VariableStatement:
case ts.SyntaxKind.TypeAliasDeclaration:
case ts.SyntaxKind.FunctionDeclaration:
case ts.SyntaxKind.ModuleDeclaration:
stop = visitor(<TSTopLevelDeclare>node);
}
// if (node.kind !== ts.SyntaxKind.SourceFile) {
// if (getNodeText(sourceFile, node).indexOf('SymbolKind') >= 0) {
// console.log('FOUND TEXT IN NODE: ' + ts.SyntaxKind[node.kind]);
// console.log(getNodeText(sourceFile, node));
// }
// }
if (stop) {
return;
}
ts.forEachChild(node, visit);
};
visit(sourceFile);
}
function getAllTopLevelDeclarations(sourceFile:ts.SourceFile): TSTopLevelDeclare[] {
let all:TSTopLevelDeclare[] = [];
visitTopLevelDeclarations(sourceFile, (node) => {
if (node.kind === ts.SyntaxKind.InterfaceDeclaration || node.kind === ts.SyntaxKind.ClassDeclaration || node.kind === ts.SyntaxKind.ModuleDeclaration) {
let interfaceDeclaration = <ts.InterfaceDeclaration>node;
let triviaStart = interfaceDeclaration.pos;
let triviaEnd = interfaceDeclaration.name.pos;
let triviaText = getNodeText(sourceFile, { pos: triviaStart, end: triviaEnd });
// // let nodeText = getNodeText(sourceFile, node);
// if (getNodeText(sourceFile, node).indexOf('SymbolKind') >= 0) {
// console.log('TRIVIA: ', triviaText);
// }
if (triviaText.indexOf('@internal') === -1) {
all.push(node);
}
} else {
let nodeText = getNodeText(sourceFile, node);
if (nodeText.indexOf('@internal') === -1) {
all.push(node);
}
}
return false /*continue*/;
});
return all;
}
function getTopLevelDeclaration(sourceFile:ts.SourceFile, typeName:string): TSTopLevelDeclare {
let result:TSTopLevelDeclare = null;
visitTopLevelDeclarations(sourceFile, (node) => {
if (isDeclaration(node)) {
if (node.name.text === typeName) {
result = node;
return true /*stop*/;
}
return false /*continue*/;
}
// node is ts.VariableStatement
if (getNodeText(sourceFile, node).indexOf(typeName) >= 0) {
result = node;
return true /*stop*/;
}
return false /*continue*/;
});
return result;
}
function getNodeText(sourceFile:ts.SourceFile, node:{pos:number; end:number;}): string {
return sourceFile.getFullText().substring(node.pos, node.end);
}
function getMassagedTopLevelDeclarationText(sourceFile:ts.SourceFile, declaration: TSTopLevelDeclare): string {
let result = getNodeText(sourceFile, declaration);
// if (result.indexOf('MonacoWorker') >= 0) {
// console.log('here!');
// // console.log(ts.SyntaxKind[declaration.kind]);
// }
if (declaration.kind === ts.SyntaxKind.InterfaceDeclaration || declaration.kind === ts.SyntaxKind.ClassDeclaration) {
let interfaceDeclaration = <ts.InterfaceDeclaration | ts.ClassDeclaration>declaration;
let members:ts.NodeArray<ts.Node> = interfaceDeclaration.members;
members.forEach((member) => {
try {
let memberText = getNodeText(sourceFile, member);
if (memberText.indexOf('@internal') >= 0 || memberText.indexOf('private') >= 0) {
// console.log('BEFORE: ', result);
result = result.replace(memberText, '');
// console.log('AFTER: ', result);
}
} catch (err) {
// life..
}
});
}
result = result.replace(/export default/g, 'export');
result = result.replace(/export declare/g, 'export');
return result;
}
function format(text:string): string {
// Parse the source text
let sourceFile = ts.createSourceFile('file.ts', text, ts.ScriptTarget.Latest, /*setParentPointers*/ true);
// Get the formatting edits on the input sources
let edits = (<any>ts).formatting.formatDocument(sourceFile, getRuleProvider(tsfmt), tsfmt);
// Apply the edits on the input code
return applyEdits(text, edits);
function getRuleProvider(options: ts.FormatCodeSettings) {
// Share this between multiple formatters using the same options.
// This represents the bulk of the space the formatter uses.
let ruleProvider = new (<any>ts).formatting.RulesProvider();
ruleProvider.ensureUpToDate(options);
return ruleProvider;
}
function applyEdits(text: string, edits: ts.TextChange[]): string {
// Apply edits in reverse on the existing text
let result = text;
for (let i = edits.length - 1; i >= 0; i--) {
let change = edits[i];
let head = result.slice(0, change.span.start);
let tail = result.slice(change.span.start + change.span.length);
result = head + change.newText + tail;
}
return result;
}
}
function createReplacer(data:string): (str:string)=>string {
data = data || '';
let rawDirectives = data.split(';');
let directives: [RegExp,string][] = [];
rawDirectives.forEach((rawDirective) => {
if (rawDirective.length === 0) {
return;
}
let pieces = rawDirective.split('=>');
let findStr = pieces[0];
let replaceStr = pieces[1];
findStr = findStr.replace(/[\-\\\{\}\*\+\?\|\^\$\.\,\[\]\(\)\#\s]/g, '\\$&');
findStr = '\\b' + findStr + '\\b';
directives.push([new RegExp(findStr, 'g'), replaceStr]);
});
return (str:string)=> {
for (let i = 0; i < directives.length; i++) {
str = str.replace(directives[i][0], directives[i][1]);
}
return str;
};
}
function generateDeclarationFile(out: string, inputFiles: { [file: string]: string; }, recipe:string): string {
let lines = recipe.split(/\r\n|\n|\r/);
let result = [];
lines.forEach(line => {
let m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m1) {
CURRENT_PROCESSING_RULE = line;
let moduleId = m1[1];
let sourceFile = getSourceFile(out, inputFiles, moduleId);
if (!sourceFile) {
return;
}
let replacer = createReplacer(m1[2]);
let typeNames = m1[3].split(/,/);
typeNames.forEach((typeName) => {
typeName = typeName.trim();
if (typeName.length === 0) {
return;
}
let declaration = getTopLevelDeclaration(sourceFile, typeName);
if (!declaration) {
logErr('Cannot find type ' + typeName);
return;
}
result.push(replacer(getMassagedTopLevelDeclarationText(sourceFile, declaration)));
});
return;
}
let m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m2) {
CURRENT_PROCESSING_RULE = line;
let moduleId = m2[1];
let sourceFile = getSourceFile(out, inputFiles, moduleId);
if (!sourceFile) {
return;
}
let replacer = createReplacer(m2[2]);
let typeNames = m2[3].split(/,/);
let typesToExcludeMap: {[typeName:string]:boolean;} = {};
let typesToExcludeArr: string[] = [];
typeNames.forEach((typeName) => {
typeName = typeName.trim();
if (typeName.length === 0) {
return;
}
typesToExcludeMap[typeName] = true;
typesToExcludeArr.push(typeName);
});
getAllTopLevelDeclarations(sourceFile).forEach((declaration) => {
if (isDeclaration(declaration)) {
if (typesToExcludeMap[declaration.name.text]) {
return;
}
} else {
// node is ts.VariableStatement
let nodeText = getNodeText(sourceFile, declaration);
for (let i = 0; i < typesToExcludeArr.length; i++) {
if (nodeText.indexOf(typesToExcludeArr[i]) >= 0) {
return;
}
}
}
result.push(replacer(getMassagedTopLevelDeclarationText(sourceFile, declaration)));
});
return;
}
result.push(line);
});
let resultTxt = result.join('\n');
resultTxt = resultTxt.replace(/\bURI\b/g, 'Uri');
resultTxt = resultTxt.replace(/\bEvent</g, 'IEvent<');
resultTxt = resultTxt.replace(/\bTPromise</g, 'Promise<');
resultTxt = format(resultTxt);
resultTxt = resultTxt.replace(/\r\n/g, '\n');
return resultTxt;
}
export function getFilesToWatch(out:string): string[] {
let recipe = fs.readFileSync(RECIPE_PATH).toString();
let lines = recipe.split(/\r\n|\n|\r/);
let result = [];
lines.forEach(line => {
let m1 = line.match(/^\s*#include\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m1) {
let moduleId = m1[1];
result.push(moduleIdToPath(out, moduleId));
return;
}
let m2 = line.match(/^\s*#includeAll\(([^;)]*)(;[^)]*)?\)\:(.*)$/);
if (m2) {
let moduleId = m2[1];
result.push(moduleIdToPath(out, moduleId));
return;
}
});
return result;
}
export interface IMonacoDeclarationResult {
content: string;
filePath: string;
isTheSame: boolean;
}
export function run(out: string, inputFiles: { [file: string]: string; }): IMonacoDeclarationResult {
log('Starting monaco.d.ts generation');
SOURCE_FILE_MAP = {};
let recipe = fs.readFileSync(RECIPE_PATH).toString();
let result = generateDeclarationFile(out, inputFiles, recipe);
let currentContent = fs.readFileSync(DECLARATION_PATH).toString();
log('Finished monaco.d.ts generation');
return {
content: result,
filePath: DECLARATION_PATH,
isTheSame: currentContent === result
};
}
export function complainErrors() {
logErr('Not running monaco.d.ts generation due to compile errors');
}

View File

@@ -0,0 +1,87 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
declare module monaco {
type Thenable<T> = PromiseLike<T>;
export interface IDisposable {
dispose(): void;
}
export interface IEvent<T> {
(listener: (e: T) => any, thisArg?: any): IDisposable;
}
/**
* A helper that allows to emit and listen to typed events
*/
export class Emitter<T> {
constructor();
readonly event: Event<T>;
fire(event?: T): void;
dispose(): void;
}
export enum Severity {
Ignore = 0,
Info = 1,
Warning = 2,
Error = 3,
}
#include(vs/base/common/winjs.base.d.ts): TValueCallback, ProgressCallback, Promise
#include(vs/base/common/cancellation): CancellationTokenSource, CancellationToken
#include(vs/base/common/uri): URI
#include(vs/editor/common/standalone/standaloneBase): KeyCode, KeyMod
#include(vs/base/common/htmlContent): IMarkdownString
#include(vs/base/browser/keyboardEvent): IKeyboardEvent
#include(vs/base/browser/mouseEvent): IMouseEvent
#include(vs/editor/common/editorCommon): IScrollEvent
#include(vs/editor/common/core/position): IPosition, Position
#include(vs/editor/common/core/range): IRange, Range
#include(vs/editor/common/core/selection): ISelection, Selection, SelectionDirection
#include(vs/editor/common/core/token): Token
}
declare module monaco.editor {
#includeAll(vs/editor/standalone/browser/standaloneEditor;modes.=>languages.;editorCommon.=>):
#include(vs/editor/standalone/common/standaloneThemeService): BuiltinTheme, IStandaloneThemeData, IColors
#include(vs/editor/common/modes/supports/tokenization): ITokenThemeRule
#include(vs/editor/common/services/webWorker): MonacoWebWorker, IWebWorkerOptions
#include(vs/editor/standalone/browser/standaloneCodeEditor): IEditorConstructionOptions, IDiffEditorConstructionOptions, IStandaloneCodeEditor, IStandaloneDiffEditor
export interface ICommandHandler {
(...args:any[]): void;
}
#include(vs/platform/contextkey/common/contextkey): IContextKey
#include(vs/editor/standalone/browser/standaloneServices): IEditorOverrideServices
#include(vs/platform/markers/common/markers): IMarker, IMarkerData
#include(vs/editor/standalone/browser/colorizer): IColorizerOptions, IColorizerElementOptions
#include(vs/base/common/scrollable): ScrollbarVisibility
#include(vs/platform/theme/common/themeService): ThemeColor
#includeAll(vs/editor/common/editorCommon;IMode=>languages.IMode;LanguageIdentifier=>languages.LanguageIdentifier;editorOptions.=>): ISelection, IScrollEvent
#includeAll(vs/editor/common/model/textModelEvents):
#includeAll(vs/editor/common/controller/cursorEvents):
#includeAll(vs/editor/common/config/editorOptions):
#includeAll(vs/editor/browser/editorBrowser;editorCommon.=>;editorOptions.=>):
#include(vs/editor/common/config/fontInfo): FontInfo, BareFontInfo
}
declare module monaco.languages {
#includeAll(vs/editor/standalone/browser/standaloneLanguages;modes.=>;editorCommon.=>editor.;IMarkerData=>editor.IMarkerData):
#includeAll(vs/editor/common/modes/languageConfiguration):
#includeAll(vs/editor/common/modes;editorCommon.IRange=>IRange;editorCommon.IPosition=>IPosition;editorCommon.=>editor.):
#include(vs/editor/common/services/modeService): ILanguageExtensionPoint
#includeAll(vs/editor/standalone/common/monarch/monarchTypes):
}
declare module monaco.worker {
#includeAll(vs/editor/common/services/editorSimpleWorker;):
}

57
build/monaco/package.json Normal file
View File

@@ -0,0 +1,57 @@
{
"name": "monaco-editor-core",
"private": true,
"version": "0.9.0",
"description": "A browser based code editor",
"author": "Microsoft Corporation",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/Microsoft/vscode"
},
"bugs": {
"url": "https://github.com/Microsoft/vscode/issues"
},
"devDependencies": {
"@types/minimist": "^1.2.0",
"@types/mocha": "^2.2.39",
"@types/semver": "^5.3.30",
"@types/sinon": "^1.16.34",
"debounce": "^1.0.0",
"eslint": "^3.4.0",
"event-stream": "^3.1.7",
"ghooks": "1.0.3",
"glob": "^5.0.13",
"gulp": "^3.8.9",
"gulp-bom": "^1.0.0",
"gulp-concat": "^2.6.0",
"gulp-cssnano": "^2.1.0",
"gulp-filter": "^3.0.0",
"gulp-flatmap": "^1.0.0",
"gulp-rename": "^1.2.0",
"gulp-sourcemaps": "^1.11.0",
"gulp-tsb": "^2.0.3",
"gulp-tslint": "^7.0.1",
"gulp-uglify": "^2.0.0",
"gulp-util": "^3.0.6",
"gulp-watch": "^4.3.9",
"is": "^3.1.0",
"istanbul": "^0.3.17",
"jsdom-no-contextify": "^3.1.0",
"lazy.js": "^0.4.2",
"minimatch": "^2.0.10",
"mocha": "^2.2.5",
"object-assign": "^4.0.1",
"pump": "^1.0.1",
"remap-istanbul": "^0.6.4",
"rimraf": "^2.2.8",
"sinon": "^1.17.2",
"source-map": "^0.4.4",
"tslint": "^4.3.1",
"typescript": "2.4.1",
"typescript-formatter": "4.0.1",
"underscore": "^1.8.2",
"vinyl": "^0.4.5",
"vscode-nls-dev": "^2.0.1"
}
}

62
build/npm/postinstall.js Normal file
View File

@@ -0,0 +1,62 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const cp = require('child_process');
const path = require('path');
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
function npmInstall(location, opts) {
opts = opts || {};
opts.cwd = location;
opts.stdio = 'inherit';
const result = cp.spawnSync(npm, ['install'], opts);
if (result.error || result.status !== 0) {
process.exit(1);
}
}
const protocol = [
'jsonrpc',
'types',
'client'
];
protocol.forEach(item => npmInstall(`dataprotocol-node/${item}`));
// {{SQL CARBON EDIT}}
npmInstall('extensions-modules');
npmInstall('extensions'); // node modules shared by all extensions
const extensions = [
'vscode-colorize-tests',
'git',
'json',
'mssql',
'configuration-editing',
'extension-editing',
'markdown',
'merge-conflict',
'account-provider-azure',
'insights-default'
];
extensions.forEach(extension => npmInstall(`extensions/${extension}`));
function npmInstallBuildDependencies() {
// make sure we install gulp watch for the system installed
// node, since that is the driver of gulp
const env = Object.assign({}, process.env);
delete env['npm_config_disturl'];
delete env['npm_config_target'];
delete env['npm_config_runtime'];
npmInstall(path.join(path.dirname(__dirname), 'lib', 'watch'), { env });
}
npmInstall(`build`); // node modules required for build
npmInstallBuildDependencies(); // node modules for watching, specific to host node version, not electron

15
build/npm/preinstall.js Normal file
View File

@@ -0,0 +1,15 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
if (process.env['npm_config_disturl'] !== 'https://atom.io/download/electron') {
console.error("You can't use plain npm to install Code's dependencies.");
console.error(
/^win/.test(process.platform)
? "Please run '.\\scripts\\npm.bat install' instead."
: "Please run './scripts/npm.sh install' instead."
);
process.exit(1);
}

View File

@@ -0,0 +1,73 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const cp = require('child_process');
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
function updateGrammar(location) {
const result = cp.spawnSync(npm, ['run', 'update-grammar'], {
cwd: location,
stdio: 'inherit'
});
if (result.error || result.status !== 0) {
process.exit(1);
}
}
const extensions = [
// 'bat' Grammar no longer available
'clojure',
'coffeescript',
'cpp',
'csharp',
'css',
'diff',
'docker',
'fsharp',
'gitsyntax',
'go',
'groovy',
'handlebars',
'hlsl',
'html',
'ini',
'java',
// 'javascript', updated through JavaScript
'json',
'less',
'lua',
'make',
'markdown',
'objective-c',
'perl',
'php',
// 'powershell', grammar not ready yet, @daviwil will ping when ready
'pug',
'python',
'r',
'razor',
'ruby',
'rust',
'scss',
'shaderlab',
'shellscript',
// 'sql', customized, PRs pending
'swift',
'typescript',
'vb',
'xml',
'yaml'
];
extensions.forEach(extension => updateGrammar(`extensions/${extension}`));
// run integration tests
if (process.platform === 'win32') {
cp.spawn('.\scripts\test-integration.bat', [], { env: process.env, stdio: 'inherit' });
} else {
cp.spawn('/bin/bash', ['./scripts/test-integration.sh'], { env: process.env, stdio: 'inherit' });
}

125
build/npm/update-grammar.js Normal file
View File

@@ -0,0 +1,125 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
var path = require('path');
var fs = require('fs');
var plist = require('fast-plist');
var cson = require('cson-parser');
var https = require('https');
var url = require('url');
function getOptions(urlString) {
var _url = url.parse(urlString);
return {
protocol: _url.protocol,
host: _url.host,
port: _url.port,
path: _url.path,
headers: {
'User-Agent': 'NodeJS'
}
}
}
function download(url, redirectCount) {
return new Promise((c, e) => {
var content = '';
https.get(getOptions(url), function (response) {
response.on('data', function (data) {
content += data.toString();
}).on('end', function () {
let count = redirectCount || 0;
if (count < 5 && response.statusCode >= 300 && response.statusCode <= 303 || response.statusCode === 307) {
let location = response.headers['location'];
if (location) {
console.log("Redirected " + url + " to " + location);
download(location, count+1).then(c, e);
return;
}
}
c(content);
});
}).on('error', function (err) {
e(err.message);
});
});
}
function getCommitSha(repoId, repoPath) {
var commitInfo = 'https://api.github.com/repos/' + repoId + '/commits?path=' + repoPath;
return download(commitInfo).then(function (content) {
try {
let lastCommit = JSON.parse(content)[0];
return Promise.resolve({
commitSha: lastCommit.sha,
commitDate: lastCommit.commit.author.date
});
} catch (e) {
console.error("Failed extracting the SHA: " + content);
return Promise.resolve(null);
}
}, function () {
console.error('Failed loading ' + commitInfo);
return Promise.resolve(null);
});
}
exports.update = function (repoId, repoPath, dest, modifyGrammar) {
var contentPath = 'https://raw.githubusercontent.com/' + repoId + '/master/' + repoPath;
console.log('Reading from ' + contentPath);
return download(contentPath).then(function (content) {
var ext = path.extname(repoPath);
var grammar;
if (ext === '.tmLanguage' || ext === '.plist') {
grammar = plist.parse(content);
} else if (ext === '.cson') {
grammar = cson.parse(content);
} else if (ext === '.json') {
grammar = JSON.parse(content);
} else {
console.error('Unknown file extension: ' + ext);
return;
}
if (modifyGrammar) {
modifyGrammar(grammar);
}
return getCommitSha(repoId, repoPath).then(function (info) {
let result = {
information_for_contributors: [
'This file has been converted from https://github.com/' + repoId + '/blob/master/' + repoPath,
'If you want to provide a fix or improvement, please create a pull request against the original repository.',
'Once accepted there, we are happy to receive an update request.'
]
};
if (info) {
result.version = 'https://github.com/' + repoId + '/commit/' + info.commitSha;
}
for (let key in grammar) {
result[key] = grammar[key];
}
try {
fs.writeFileSync(dest, JSON.stringify(result, null, '\t'));
if (info) {
console.log('Updated ' + path.basename(dest) + ' to ' + repoId + '@' + info.commitSha.substr(0, 7) + ' (' + info.commitDate.substr(0, 10) + ')');
} else {
console.log('Updated ' + path.basename(dest));
}
} catch (e) {
console.error(e);
}
});
}, console.error);
}
if (path.basename(process.argv[1]) === 'update-grammar.js') {
for (var i = 3; i < process.argv.length; i += 2) {
exports.update(process.argv[2], process.argv[i], process.argv[i + 1]);
}
}

82
build/npm/update-theme.js Normal file
View File

@@ -0,0 +1,82 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
var path = require('path');
var fs = require('fs');
var plist = require('fast-plist');
var mappings = {
"background": ["editor.background"],
"foreground": ["editor.foreground"],
"hoverHighlight": ["editor.hoverHighlightBackground"],
"linkForeground": ["editorLink.foreground"],
"selection": ["editor.selectionBackground"],
"inactiveSelection": ["editor.inactiveSelectionBackground"],
"selectionHighlightColor": ["editor.selectionHighlightBackground"],
"wordHighlight": ["editor.wordHighlightBackground"],
"wordHighlightStrong": ["editor.wordHighlightStrongBackground"],
"findMatchHighlight": ["editor.findMatchHighlightBackground", "peekViewResult.matchHighlightBackground"],
"currentFindMatchHighlight": ["editor.findMatchBackground"],
"findRangeHighlight": ["editor.findRangeHighlightBackground"],
"referenceHighlight": ["peekViewEditor.matchHighlightBackground"],
"lineHighlight": ["editor.lineHighlightBackground"],
"rangeHighlight": ["editor.rangeHighlightBackground"],
"caret": ["editorCursor.foreground"],
"invisibles": ["editorWhitespace.foreground"],
"guide": ["editorIndentGuide.background"],
"ansiBlack": ["terminal.ansiBlack"], "ansiRed": ["terminal.ansiRed"], "ansiGreen": ["terminal.ansiGreen"], "ansiYellow": ["terminal.ansiYellow"],
"ansiBlue": ["terminal.ansiBlue"], "ansiMagenta": ["terminal.ansiMagenta"], "ansiCyan": ["terminal.ansiCyan"], "ansiWhite": ["terminal.ansiWhite"],
"ansiBrightBlack": ["terminal.ansiBrightBlack"], "ansiBrightRed": ["terminal.ansiBrightRed"], "ansiBrightGreen": ["terminal.ansiBrightGreen"],
"ansiBrightYellow": ["terminal.ansiBrightYellow"], "ansiBrightBlue": ["terminal.ansiBrightBlue"], "ansiBrightMagenta": ["terminal.ansiBrightMagenta"],
"ansiBrightCyan": ["terminal.ansiBrightCyan"], "ansiBrightWhite": ["terminal.ansiBrightWhite"]
};
exports.update = function (srcName, destName) {
try {
console.log('reading ', srcName);
let result = {};
let plistContent = fs.readFileSync(srcName).toString();
let theme = plist.parse(plistContent);
let settings = theme.settings;
if (Array.isArray(settings)) {
let colorMap = {};
for (let entry of settings) {
let scope = entry.scope;
if (scope) {
let parts = scope.split(',').map(p => p.trim());
if (parts.length > 1) {
entry.scope = parts;
}
} else {
var entrySettings = entry.settings;
for (let entry in entrySettings) {
let mapping = mappings[entry];
if (mapping) {
for (let newKey of mapping) {
colorMap[newKey] = entrySettings[entry];
}
if (entry !== 'foreground' && entry !== 'background') {
delete entrySettings[entry];
}
}
}
}
}
result.name = theme.name;
result.tokenColors = settings;
result.colors = colorMap;
}
fs.writeFileSync(destName, JSON.stringify(result, null, '\t'));
} catch (e) {
console.log(e);
}
};
if (path.basename(process.argv[1]) === 'update-theme.js') {
exports.update(process.argv[2], process.argv[3]);
}

28
build/package.json Normal file
View File

@@ -0,0 +1,28 @@
{
"name": "code-oss-dev-build",
"version": "1.0.0",
"devDependencies": {
"@types/azure": "^0.9.18",
"@types/documentdb": "^1.10.1",
"@types/es6-collections": "^0.5.30",
"@types/es6-promise": "0.0.32",
"@types/mime": "0.0.29",
"@types/node": "^7.0.13",
"@types/xml2js": "^0.0.33",
"azure-storage": "^2.1.0",
"decompress": "^4.2.0",
"documentdb": "^1.11.0",
"extensions-modules": "file:../extensions-modules",
"fs-extra-promise": "^1.0.1",
"mime": "^1.3.4",
"minimist": "^1.2.0",
"typescript": "2.4.1",
"vscode": "^1.0.1",
"xml2js": "^0.4.17"
},
"scripts": {
"compile": "tsc",
"watch": "tsc --watch",
"postinstall": "npm run compile"
}
}

2
build/tfs/common/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
node_modules/
*.js

View File

@@ -0,0 +1,40 @@
#!/bin/bash
set -e
# set agent specific npm cache
if [ -n "$AGENT_WORKFOLDER" ]
then
export npm_config_cache="$AGENT_WORKFOLDER/npm-cache"
echo "Using npm cache: $npm_config_cache"
fi
SUMMARY="Task;Duration"$'\n'
step() {
START=$SECONDS
TASK=$1; shift
echo ""
echo "*****************************************************************************"
echo "Start: $TASK"
echo "*****************************************************************************"
"$@"
# Calculate total duration
TOTAL=$(echo "$SECONDS - $START" | bc)
M=$(echo "$TOTAL / 60" | bc)
S=$(echo "$TOTAL % 60" | bc)
DURATION="$(printf "%02d" $M):$(printf "%02d" $S)"
echo "*****************************************************************************"
echo "End: $TASK, Total: $DURATION"
echo "*****************************************************************************"
SUMMARY="$SUMMARY$TASK;$DURATION"$'\n'
}
done_steps() {
echo ""
echo "Build Summary"
echo "============="
echo "${SUMMARY}" | column -t -s';'
}
trap done_steps EXIT

View File

@@ -0,0 +1,85 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { execSync } from 'child_process';
import { DocumentClient } from 'documentdb';
import * as azure from 'azure-storage';
interface Asset {
platform: string;
type: string;
url: string;
mooncakeUrl: string;
hash: string;
}
function queueSigningRequest(quality: string, commit: string): Promise<void> {
const retryOperations = new azure.ExponentialRetryPolicyFilter();
const queueSvc = azure
.createQueueService(process.env['AZURE_STORAGE_ACCOUNT_2'], process.env['AZURE_STORAGE_ACCESS_KEY_2'])
.withFilter(retryOperations);
queueSvc.messageEncoder = new azure.QueueMessageEncoder.TextBase64QueueMessageEncoder();
const message = `${quality}/${commit}`;
return new Promise<void>((c, e) => queueSvc.createMessage('sign-darwin', message, err => err ? e(err) : c()));
}
function isBuildSigned(quality: string, commit: string): Promise<boolean> {
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/' + quality;
const updateQuery = {
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
parameters: [{ name: '@id', value: commit }]
};
return new Promise<boolean>((c, e) => {
client.queryDocuments(collection, updateQuery).toArray((err, results) => {
if (err) { return e(err); }
if (results.length !== 1) { return c(false); }
const [release] = results;
const assets: Asset[] = release.assets;
const isSigned = assets.some(a => a.platform === 'darwin' && a.type === 'archive');
c(isSigned);
});
});
}
async function waitForSignedBuild(quality: string, commit: string): Promise<void> {
let retries = 0;
while (retries < 180) {
if (await isBuildSigned(quality, commit)) {
return;
}
await new Promise<void>(c => setTimeout(c, 10000));
retries++;
}
throw new Error('Timed out waiting for signed build');
}
async function main(quality: string): Promise<void> {
const commit = execSync('git rev-parse HEAD', { encoding: 'utf8' }).trim();
console.log(`Queueing signing request for '${quality}/${commit}'...`);
await queueSigningRequest(quality, commit);
console.log('Waiting on signed build...');
await waitForSignedBuild(quality, commit);
console.log('Found signed build!');
}
main(process.argv[2]).catch(err => {
console.error(err);
process.exit(1);
});

View File

@@ -0,0 +1,26 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const cp = require('child_process');
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
function npmInstall(package: string, args: string[]): void {
const result = cp.spawnSync(npm, ['install', package, ...args], {
stdio: 'inherit'
});
if (result.error || result.status !== 0) {
process.exit(1);
}
}
const product = require('../../../product.json');
const dependencies = product.dependencies || {} as { [name: string]: string; };
const [, , ...args] = process.argv;
Object.keys(dependencies).forEach(name => {
const url = dependencies[name];
npmInstall(url, args);
});

15
build/tfs/common/node.sh Normal file
View File

@@ -0,0 +1,15 @@
#!/bin/bash
set -e
# setup nvm
if [[ "$OSTYPE" == "darwin"* ]]; then
export NVM_DIR=~/.nvm
source $(brew --prefix nvm)/nvm.sh
else
source $NVM_DIR/nvm.sh
fi
# install node
NODE_VERSION=7.10.0
nvm install $NODE_VERSION
nvm use $NODE_VERSION

266
build/tfs/common/publish.ts Normal file
View File

@@ -0,0 +1,266 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import { execSync } from 'child_process';
import { Readable } from 'stream';
import * as crypto from 'crypto';
import * as azure from 'azure-storage';
import * as mime from 'mime';
import * as minimist from 'minimist';
import { DocumentClient, NewDocument } from 'documentdb';
if (process.argv.length < 6) {
console.error('Usage: node publish.js <product> <platform> <type> <name> <version> <commit> <is_update> <file>');
process.exit(-1);
}
function hashStream(hashName: string, stream: Readable): Promise<string> {
return new Promise<string>((c, e) => {
const shasum = crypto.createHash(hashName);
stream
.on('data', shasum.update.bind(shasum))
.on('error', e)
.on('close', () => c(shasum.digest('hex')));
});
}
interface Config {
id: string;
frozen: boolean;
}
function createDefaultConfig(quality: string): Config {
return {
id: quality,
frozen: false
};
}
function getConfig(quality: string): Promise<Config> {
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/config';
const query = {
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
parameters: [
{ name: '@quality', value: quality }
]
};
return new Promise<Config>((c, e) => {
client.queryDocuments(collection, query).toArray((err, results) => {
if (err && err.code !== 409) { return e(err); }
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0] as any as Config);
});
});
}
interface Asset {
platform: string;
type: string;
url: string;
mooncakeUrl: string;
hash: string;
sha256hash: string;
}
function createOrUpdate(commit: string, quality: string, platform: string, type: string, release: NewDocument, asset: Asset, isUpdate: boolean): Promise<void> {
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/' + quality;
const updateQuery = {
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
parameters: [{ name: '@id', value: commit }]
};
let updateTries = 0;
function update(): Promise<void> {
updateTries++;
return new Promise<void>((c, e) => {
client.queryDocuments(collection, updateQuery).toArray((err, results) => {
if (err) { return e(err); }
if (results.length !== 1) { return e(new Error('No documents')); }
const release = results[0];
release.assets = [
...release.assets.filter((a: any) => !(a.platform === platform && a.type === type)),
asset
];
if (isUpdate) {
release.updates[platform] = type;
}
client.replaceDocument(release._self, release, err => {
if (err && err.code === 409 && updateTries < 5) { return c(update()); }
if (err) { return e(err); }
console.log('Build successfully updated.');
c();
});
});
});
}
return new Promise<void>((c, e) => {
client.createDocument(collection, release, err => {
if (err && err.code === 409) { return c(update()); }
if (err) { return e(err); }
console.log('Build successfully published.');
c();
});
});
}
async function assertContainer(blobService: azure.BlobService, quality: string): Promise<void> {
await new Promise((c, e) => blobService.createContainerIfNotExists(quality, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
}
async function doesAssetExist(blobService: azure.BlobService, quality: string, blobName: string): Promise<boolean> {
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService: azure.BlobService, quality: string, blobName: string, file: string): Promise<void> {
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
contentSettings: {
contentType: mime.lookup(file),
cacheControl: 'max-age=31536000, public'
}
};
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, file, blobOptions, err => err ? e(err) : c()));
}
interface PublishOptions {
'upload-only': boolean;
}
async function publish(commit: string, quality: string, platform: string, type: string, name: string, version: string, _isUpdate: string, file: string, opts: PublishOptions): Promise<void> {
const isUpdate = _isUpdate === 'true';
const queuedBy = process.env['BUILD_QUEUEDBY'];
const sourceBranch = process.env['BUILD_SOURCEBRANCH'];
const isReleased = quality === 'insider'
&& /^master$|^refs\/heads\/master$/.test(sourceBranch)
&& /Project Collection Service Accounts|Microsoft.VisualStudio.Services.TFS/.test(queuedBy);
console.log('Publishing...');
console.log('Quality:', quality);
console.log('Platforn:', platform);
console.log('Type:', type);
console.log('Name:', name);
console.log('Version:', version);
console.log('Commit:', commit);
console.log('Is Update:', isUpdate);
console.log('Is Released:', isReleased);
console.log('File:', file);
const stream = fs.createReadStream(file);
const [sha1hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]);
console.log('SHA1:', sha1hash);
console.log('SHA256:', sha256hash);
const blobName = commit + '/' + name;
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2'];
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
// mooncake is fussy and far away, this is needed!
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
await Promise.all([
assertContainer(blobService, quality),
assertContainer(mooncakeBlobService, quality)
]);
const [blobExists, moooncakeBlobExists] = await Promise.all([
doesAssetExist(blobService, quality, blobName),
doesAssetExist(mooncakeBlobService, quality, blobName)
]);
const promises = [];
if (!blobExists) {
promises.push(uploadBlob(blobService, quality, blobName, file));
}
if (!moooncakeBlobExists) {
promises.push(uploadBlob(mooncakeBlobService, quality, blobName, file));
}
if (promises.length === 0) {
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
return;
}
console.log('Uploading blobs to Azure storage...');
await Promise.all(promises);
console.log('Blobs successfully uploaded.');
const config = await getConfig(quality);
console.log('Quality config:', config);
const asset: Asset = {
platform: platform,
type: type,
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
mooncakeUrl: `${process.env['MOONCAKE_CDN_URL']}/${quality}/${blobName}`,
hash: sha1hash,
sha256hash
};
const release = {
id: commit,
timestamp: (new Date()).getTime(),
version,
isReleased: config.frozen ? false : isReleased,
sourceBranch,
queuedBy,
assets: [],
updates: {} as any
};
if (!opts['upload-only']) {
release.assets.push(asset);
if (isUpdate) {
release.updates[platform] = type;
}
}
await createOrUpdate(commit, quality, platform, type, release, asset, isUpdate);
}
function main(): void {
const opts = minimist<PublishOptions>(process.argv.slice(2), {
boolean: ['upload-only']
});
const [quality, platform, type, name, version, _isUpdate, file] = opts._;
const commit = execSync('git rev-parse HEAD', { encoding: 'utf8' }).trim();
publish(commit, quality, platform, type, name, version, _isUpdate, file, opts).catch(err => {
console.error(err);
process.exit(1);
});
}
main();

41
build/tfs/darwin/build.sh Normal file
View File

@@ -0,0 +1,41 @@
#!/bin/sh
. ./build/tfs/common/node.sh
. ./scripts/env.sh
. ./build/tfs/common/common.sh
export VSCODE_MIXIN_PASSWORD="$1"
export AZURE_STORAGE_ACCESS_KEY="$2"
export AZURE_STORAGE_ACCESS_KEY_2="$3"
export MOONCAKE_STORAGE_ACCESS_KEY="$4"
export AZURE_DOCUMENTDB_MASTERKEY="$5"
VSO_PAT="$6"
echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc
step "Install dependencies" \
npm install
step "Hygiene" \
npm run gulp -- hygiene
step "Mix in repository from vscode-distro" \
npm run gulp -- mixin
step "Install distro dependencies" \
node build/tfs/common/installDistro.js
step "Build minified & upload source maps" \
npm run gulp -- vscode-darwin-min upload-vscode-sourcemaps
# step "Create loader snapshot"
# node build/lib/snapshotLoader.js
step "Run unit tests" \
./scripts/test.sh --build --reporter dot
step "Run integration tests" \
./scripts/test-integration.sh
step "Publish release" \
./build/tfs/darwin/release.sh

View File

@@ -0,0 +1,26 @@
#!/bin/sh
. ./scripts/env.sh
. ./build/tfs/common/common.sh
(cd $BUILD_SOURCESDIRECTORY/build/tfs/common && \
step "Install build dependencies" \
npm i)
REPO=`pwd`
ZIP=$REPO/../VSCode-darwin-selfsigned.zip
UNSIGNEDZIP=$REPO/../VSCode-darwin-unsigned.zip
BUILD=$REPO/../VSCode-darwin
PACKAGEJSON=`ls $BUILD/*.app/Contents/Resources/app/package.json`
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
rm -rf $UNSIGNEDZIP
(cd $BUILD && \
step "Create unsigned archive" \
zip -r -X -y $UNSIGNEDZIP *)
step "Upload unsigned archive" \
node build/tfs/common/publish.js --upload-only $VSCODE_QUALITY darwin archive-unsigned VSCode-darwin-$VSCODE_QUALITY-unsigned.zip $VERSION false $UNSIGNEDZIP
step "Sign build" \
node build/tfs/common/enqueue.js $VSCODE_QUALITY

View File

@@ -0,0 +1,28 @@
#!/bin/sh
. ./build/tfs/common/node.sh
. ./scripts/env.sh
. ./build/tfs/common/common.sh
export VSCODE_MIXIN_PASSWORD="$1"
VSO_PAT="$2"
echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc
step "Install dependencies" \
npm install
step "Mix in repository from vscode-distro" \
npm run gulp -- mixin
step "Install distro dependencies" \
node build/tfs/common/installDistro.js
step "Build minified & upload source maps" \
npm run gulp -- vscode-darwin-min
step "Run smoke test" \
pushd test/smoke
npm install
npm test -- --latest "$AGENT_BUILDDIRECTORY/VSCode-darwin/Visual Studio Code - Insiders.app/Contents/MacOS/Electron"
popd

1
build/tfs/linux/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
pat

Some files were not shown because too many files have changed in this diff Show More