mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-18 02:51:36 -05:00
Compare commits
17 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cacf0777c7 | ||
|
|
cb433fbeac | ||
|
|
8f6736df5e | ||
|
|
ef76d730e3 | ||
|
|
aabbeeffa9 | ||
|
|
7a513de543 | ||
|
|
abbd5ec037 | ||
|
|
84009f65ec | ||
|
|
191648df0a | ||
|
|
1265a56ff4 | ||
|
|
2171d44922 | ||
|
|
812f315e69 | ||
|
|
d48258a0fb | ||
|
|
8ec78f67af | ||
|
|
348b03327e | ||
|
|
6497bbcc38 | ||
|
|
a1abca26df |
19
.eslintrc
Normal file
19
.eslintrc
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true
|
||||
},
|
||||
"rules": {
|
||||
"no-console": 0,
|
||||
"no-cond-assign": 0,
|
||||
"no-unused-vars": 1,
|
||||
"no-extra-semi": "warn",
|
||||
"semi": "warn"
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaFeatures": {
|
||||
"experimentalObjectRestSpread": true
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,20 +0,0 @@
|
||||
{
|
||||
"root": true,
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true
|
||||
},
|
||||
"rules": {
|
||||
"no-console": 0,
|
||||
"no-cond-assign": 0,
|
||||
"no-unused-vars": 1,
|
||||
"no-extra-semi": "warn",
|
||||
"semi": "warn"
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaFeatures": {
|
||||
"experimentalObjectRestSpread": true
|
||||
}
|
||||
}
|
||||
}
|
||||
49
.github/classifier.yml
vendored
49
.github/classifier.yml
vendored
@@ -1,49 +0,0 @@
|
||||
{
|
||||
perform: true,
|
||||
alwaysRequireAssignee: false,
|
||||
labelsRequiringAssignee: [],
|
||||
autoAssignees: {
|
||||
accessibility: [],
|
||||
acquisition: [],
|
||||
agent: [],
|
||||
azure: [],
|
||||
backup: [],
|
||||
bcdr: [],
|
||||
'chart viewer': [],
|
||||
connection: [],
|
||||
dacfx: [],
|
||||
dashboard: [],
|
||||
'data explorer': [],
|
||||
documentation: [],
|
||||
'edit data': [],
|
||||
export: [],
|
||||
extensibility: [],
|
||||
extensionManager: [],
|
||||
globalization: [],
|
||||
grid: [],
|
||||
import: [],
|
||||
insights: [],
|
||||
intellisense: [],
|
||||
localization: [],
|
||||
'managed instance': [],
|
||||
notebooks: [],
|
||||
'object explorer': [],
|
||||
performance: [],
|
||||
profiler: [],
|
||||
'query editor': [],
|
||||
'query execution': [],
|
||||
reliability: [],
|
||||
restore: [],
|
||||
scripting: [],
|
||||
'server group': [],
|
||||
settings: [],
|
||||
setup: [],
|
||||
shell: [],
|
||||
showplan: [],
|
||||
snippet: [],
|
||||
sql2019Preview: [],
|
||||
sqldw: [],
|
||||
supportability: [],
|
||||
ux: []
|
||||
}
|
||||
}
|
||||
12
.github/commands.yml
vendored
12
.github/commands.yml
vendored
@@ -1,12 +0,0 @@
|
||||
{
|
||||
perform: false,
|
||||
commands: [
|
||||
{
|
||||
type: 'label',
|
||||
name: 'duplicate',
|
||||
allowTriggerByBot: true,
|
||||
action: 'close',
|
||||
comment: "Thanks for creating this issue! We figured it's covering the same as another one we already have. Thus, we closed this one as a duplicate. You can search for existing issues [here](https://aka.ms/vscodeissuesearch). See also our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines.\n\nHappy Coding!"
|
||||
}
|
||||
]
|
||||
}
|
||||
5
.github/copycat.yml
vendored
5
.github/copycat.yml
vendored
@@ -1,5 +0,0 @@
|
||||
{
|
||||
perform: true,
|
||||
target_owner: 'anthonydresser',
|
||||
target_repo: 'testissues'
|
||||
}
|
||||
6
.github/locker.yml
vendored
6
.github/locker.yml
vendored
@@ -1,6 +0,0 @@
|
||||
{
|
||||
daysAfterClose: 45,
|
||||
daysSinceLastUpdate: 3,
|
||||
ignoredLabels: [],
|
||||
perform: true
|
||||
}
|
||||
6
.github/needs_more_info.yml
vendored
6
.github/needs_more_info.yml
vendored
@@ -1,6 +0,0 @@
|
||||
{
|
||||
daysUntilClose: 7,
|
||||
needsMoreInfoLabel: 'more info',
|
||||
perform: false,
|
||||
closeComment: "This issue has been closed automatically because it needs more information and has not had recent activity. See also our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines.\n\nHappy Coding!"
|
||||
}
|
||||
6
.github/new_release.yml
vendored
6
.github/new_release.yml
vendored
@@ -1,6 +0,0 @@
|
||||
{
|
||||
newReleaseLabel: 'new-release',
|
||||
newReleaseColor: '006b75',
|
||||
daysAfterRelease: 5,
|
||||
perform: true
|
||||
}
|
||||
5
.github/similarity.yml
vendored
5
.github/similarity.yml
vendored
@@ -1,5 +0,0 @@
|
||||
{
|
||||
perform: true,
|
||||
whenCreatedByTeam: true,
|
||||
comment: "Thanks for submitting this issue. Please also check if it is already covered by an existing one, like:\n${potentialDuplicates}"
|
||||
}
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -3,7 +3,6 @@ npm-debug.log
|
||||
Thumbs.db
|
||||
node_modules/
|
||||
.build/
|
||||
extensions/**/dist/
|
||||
out/
|
||||
out-build/
|
||||
out-editor/
|
||||
@@ -18,4 +17,4 @@ build/node_modules
|
||||
coverage/
|
||||
test_data/
|
||||
test-results/
|
||||
yarn-error.log
|
||||
yarn-error.log
|
||||
23
.vscode/cglicenses.schema.json
vendored
23
.vscode/cglicenses.schema.json
vendored
@@ -1,23 +0,0 @@
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"licenseDetail"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "The name of the dependency"
|
||||
},
|
||||
"licenseDetail": {
|
||||
"type": "array",
|
||||
"description": "The complete license text of the dependency",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
142
.vscode/cgmanifest.schema.json
vendored
142
.vscode/cgmanifest.schema.json
vendored
@@ -1,142 +0,0 @@
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"registrations": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"component": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"type",
|
||||
"git"
|
||||
],
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"git"
|
||||
]
|
||||
},
|
||||
"git": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"repositoryUrl",
|
||||
"commitHash"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"repositoryUrl": {
|
||||
"type": "string"
|
||||
},
|
||||
"commitHash": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"type",
|
||||
"npm"
|
||||
],
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"npm"
|
||||
]
|
||||
},
|
||||
"npm": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"version"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"version": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"type",
|
||||
"other"
|
||||
],
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"other"
|
||||
]
|
||||
},
|
||||
"other": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"downloadUrl",
|
||||
"version"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"downloadUrl": {
|
||||
"type": "string"
|
||||
},
|
||||
"version": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"repositoryUrl": {
|
||||
"type": "string",
|
||||
"description": "The git url of the component"
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "The version of the component"
|
||||
},
|
||||
"license": {
|
||||
"type": "string",
|
||||
"description": "The name of the license"
|
||||
},
|
||||
"developmentDependency": {
|
||||
"type": "boolean",
|
||||
"description": "This component is inlined in the vscode repo and **is not shipped**."
|
||||
},
|
||||
"isOnlyProductionDependency": {
|
||||
"type": "boolean",
|
||||
"description": "This component is shipped and **is not inlined in the vscode repo**."
|
||||
},
|
||||
"licenseDetail": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "The license text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
2
.vscode/extensions.json
vendored
2
.vscode/extensions.json
vendored
@@ -2,7 +2,7 @@
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=827846
|
||||
// for the documentation about the extensions.json format
|
||||
"recommendations": [
|
||||
"ms-vscode.vscode-typescript-tslint-plugin",
|
||||
"eg2.tslint",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"msjsdiag.debugger-for-chrome"
|
||||
]
|
||||
|
||||
94
.vscode/launch.json
vendored
94
.vscode/launch.json
vendored
@@ -9,12 +9,14 @@
|
||||
"stopOnEntry": true,
|
||||
"args": [
|
||||
"hygiene"
|
||||
]
|
||||
],
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"name": "Attach to Extension Host",
|
||||
"protocol": "inspector",
|
||||
"port": 5870,
|
||||
"restart": true,
|
||||
"outFiles": [
|
||||
@@ -22,15 +24,19 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"name": "Attach to Shared Process",
|
||||
"port": 9222,
|
||||
"urlFilter": "*"
|
||||
"protocol": "inspector",
|
||||
"port": 5871,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"protocol": "inspector",
|
||||
"name": "Attach to Search Process",
|
||||
"port": 5876,
|
||||
"outFiles": [
|
||||
@@ -41,6 +47,7 @@
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"name": "Attach to CLI Process",
|
||||
"protocol": "inspector",
|
||||
"port": 5874,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
@@ -50,6 +57,7 @@
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"name": "Attach to Main Process",
|
||||
"protocol": "inspector",
|
||||
"port": 5875,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
@@ -74,9 +82,9 @@
|
||||
"linux": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||
},
|
||||
"urlFilter": "*workbench.html*",
|
||||
"urlFilter": "*index.html*",
|
||||
"runtimeArgs": [
|
||||
"--inspect=5875", "--no-cached-data"
|
||||
"--inspect=5875"
|
||||
],
|
||||
"skipFiles": [
|
||||
"**/winjs*.js"
|
||||
@@ -85,28 +93,32 @@
|
||||
"timeout": 45000
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Launch azuredatastudio with new notebook command",
|
||||
"name": "Unit Tests",
|
||||
"protocol": "inspector",
|
||||
"program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
|
||||
"runtimeExecutable": "${workspaceFolder}/.build/electron/Azure Data Studio.app/Contents/MacOS/Electron",
|
||||
"windows": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat"
|
||||
},
|
||||
"osx": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio.exe"
|
||||
},
|
||||
"linux": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio"
|
||||
},
|
||||
"urlFilter": "*index.html*",
|
||||
"runtimeArgs": [
|
||||
"--inspect=5875",
|
||||
"--command=notebook.command.new"
|
||||
"stopOnEntry": false,
|
||||
"outputCapture": "std",
|
||||
"args": [
|
||||
"--delay",
|
||||
"--timeout",
|
||||
"2000"
|
||||
],
|
||||
"skipFiles": [
|
||||
"**/winjs*.js"
|
||||
],
|
||||
"webRoot": "${workspaceFolder}",
|
||||
"timeout": 45000
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"ELECTRON_RUN_AS_NODE": "true"
|
||||
},
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Built-in Extension",
|
||||
@@ -116,47 +128,9 @@
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/debug-auto-launch"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Launch Smoke Test",
|
||||
"program": "${workspaceFolder}/test/smoke/test/index.js",
|
||||
"cwd": "${workspaceFolder}/test/smoke",
|
||||
"env": {
|
||||
"BUILD_ARTIFACTSTAGINGDIRECTORY": "${workspaceFolder}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Run Unit Tests",
|
||||
"program": "${workspaceFolder}/test/electron/index.js",
|
||||
"runtimeExecutable": "${workspaceFolder}/.build/electron/Azure Data Studio.app/Contents/MacOS/Electron",
|
||||
"windows": {
|
||||
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio.exe"
|
||||
},
|
||||
"linux": {
|
||||
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio"
|
||||
},
|
||||
"outputCapture": "std",
|
||||
"args": [
|
||||
"--remote-debugging-port=9222"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
]
|
||||
}
|
||||
],
|
||||
"compounds": [
|
||||
{
|
||||
"name": "Debug Unit Tests",
|
||||
"configurations": [
|
||||
"Attach to azuredatastudio",
|
||||
"Run Unit Tests"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Debug azuredatastudio Main and Renderer",
|
||||
"configurations": [
|
||||
|
||||
16
.vscode/settings.json
vendored
16
.vscode/settings.json
vendored
@@ -11,7 +11,7 @@
|
||||
}
|
||||
},
|
||||
"files.associations": {
|
||||
"cglicenses.json": "jsonc"
|
||||
"OSSREADME.json": "jsonc"
|
||||
},
|
||||
"search.exclude": {
|
||||
"**/node_modules": true,
|
||||
@@ -22,9 +22,9 @@
|
||||
"out-vscode/**": true,
|
||||
"i18n/**": true,
|
||||
"extensions/**/out/**": true,
|
||||
"test/smoke/out/**": true,
|
||||
"src/vs/base/test/node/uri.test.data.txt": true
|
||||
"test/smoke/out/**": true
|
||||
},
|
||||
"tslint.enable": true,
|
||||
"lcov.path": [
|
||||
"./.build/coverage/lcov.info",
|
||||
"./.build/coverage-single/lcov.info"
|
||||
@@ -43,12 +43,6 @@
|
||||
"git.ignoreLimitWarning": true,
|
||||
"emmet.excludeLanguages": [],
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"typescript.preferences.quoteStyle": "single",
|
||||
"json.schemas": [{
|
||||
"fileMatch": [ "cgmanifest.json" ],
|
||||
"url": "./.vscode/cgmanifest.schema.json"
|
||||
}, {
|
||||
"fileMatch": [ "cglicenses.json" ],
|
||||
"url": "./.vscode/cglicenses.schema.json"
|
||||
}]
|
||||
"typescript.preferences.quoteStyle": "single"
|
||||
|
||||
}
|
||||
|
||||
38
.vscode/shared.code-snippets
vendored
38
.vscode/shared.code-snippets
vendored
@@ -1,38 +0,0 @@
|
||||
{
|
||||
// Each snippet is defined under a snippet name and has a scope, prefix, body and
|
||||
// description. The scope defines in watch languages the snippet is applicable. The prefix is what is
|
||||
// used to trigger the snippet and the body will be expanded and inserted.Possible variables are:
|
||||
// $1, $2 for tab stops, $0 for the final cursor position, and ${1:label}, ${2:another} for placeholders.
|
||||
// Placeholders with the same ids are connected.
|
||||
// Example:
|
||||
"MSFT Copyright Header": {
|
||||
"scope": "javascript,typescript,css",
|
||||
"prefix": [
|
||||
"header",
|
||||
"stub",
|
||||
"copyright"
|
||||
],
|
||||
"body": [
|
||||
"/*---------------------------------------------------------------------------------------------",
|
||||
" * Copyright (c) Microsoft Corporation. All rights reserved.",
|
||||
" * Licensed under the Source EULA. See License.txt in the project root for license information.",
|
||||
" *--------------------------------------------------------------------------------------------*/",
|
||||
"",
|
||||
"$0"
|
||||
],
|
||||
"description": "Insert Copyright Statement"
|
||||
},
|
||||
"TS -> Inject Service": {
|
||||
"description": "Constructor Injection Pattern",
|
||||
"prefix": "@inject",
|
||||
"body": "@$1 private readonly _$2: ${1},$0"
|
||||
},
|
||||
"TS -> Event & Emitter": {
|
||||
"prefix": "emitter",
|
||||
"description": "Add emitter and event properties",
|
||||
"body": [
|
||||
"private readonly _onDid$1 = new Emitter<$2>();",
|
||||
"readonly onDid$1: Event<$2> = this._onDid$1.event;"
|
||||
],
|
||||
}
|
||||
}
|
||||
17
.vscode/tasks.json
vendored
17
.vscode/tasks.json
vendored
@@ -28,23 +28,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
"script": "strict-null-check-watch",
|
||||
"label": "TS - Strict Null Checks",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"reveal": "never"
|
||||
},
|
||||
"problemMatcher": {
|
||||
"base": "$tsc-watch",
|
||||
"owner": "typescript-strict-null",
|
||||
"applyTo": "allDocuments"
|
||||
},
|
||||
"runOptions": {
|
||||
"runOn": "folderOpen"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "gulp",
|
||||
"task": "tslint",
|
||||
|
||||
2
.yarnrc
2
.yarnrc
@@ -1,3 +1,3 @@
|
||||
disturl "https://atom.io/download/electron"
|
||||
target "2.0.12"
|
||||
target "2.0.9"
|
||||
runtime "electron"
|
||||
|
||||
47
CHANGELOG.md
47
CHANGELOG.md
@@ -1,52 +1,5 @@
|
||||
# Change Log
|
||||
|
||||
## Version 1.4.5
|
||||
* Release date: February 13, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
## What's new in this version
|
||||
* Added **Admin pack for SQL Server** extension pack to make it easier to install SQL Server admin-related extensions. This includes:
|
||||
* [SQL Server Agent](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-agent-extension?view=sql-server-2017)
|
||||
* [SQL Server Profiler](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-profiler-extension?view=sql-server-2017)
|
||||
* [SQL Server Import](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-import-extension?view=sql-server-2017)
|
||||
* Added filtering extended event support in Profiler extension
|
||||
* Added Save as XML feature that can save T-SQL results as XML
|
||||
* Added Data-Tier Application Wizard improvements
|
||||
* Added Generate script button
|
||||
* Added view to give warnings of possible data loss during deployment
|
||||
* Updates to the [SQL Server 2019 Preview extension](https://docs.microsoft.com/sql/azure-data-studio/sql-server-2019-extension?view=sql-server-ver15)
|
||||
* Results streaming enabled by default for long running queries
|
||||
* Resolved [bugs and issues](https://github.com/Microsoft/azuredatastudio/milestone/23?closed=1).
|
||||
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* AlexFsmn for `Added context menu for DBs in explorer view to backup & restore db. #2277`
|
||||
* sadedil for `Missing feature request: Save as XML #3729`
|
||||
* gbritton1 for `Removed reference to object explorer #3463`
|
||||
|
||||
## Version 1.3.8
|
||||
* Release date: January 9, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
## What's new in this version
|
||||
* #13 Feature Request: Azure Active Directory Authentication
|
||||
* #1040 Stream initial query results as they become available
|
||||
* #3298 Сan't add an azure account.
|
||||
* #2387 Support Per-User Installer
|
||||
* SQL Server Import updates for DACPAC\BACPAC
|
||||
* SQL Server Profiler UI and UX improvements
|
||||
* Updates to [SQL Server 2019 extension](https://docs.microsoft.com/sql/azure-data-studio/sql-server-2019-extension?view=sql-server-ver15)
|
||||
* **sp_executesql to SQL** and **New Database** extensions
|
||||
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* Tarig0 for `Add Routine_Type to CreateStoredProc fixes #3257 (#3286)`
|
||||
* oltruong for `typo fix #3025'`
|
||||
* Thomas-S-B for `Removed unnecessary IErrorDetectionStrategy #749`
|
||||
* Thomas-S-B for `Simplified code #750`
|
||||
|
||||
## Version 1.2.4
|
||||
* Release date: November 6, 2018
|
||||
* Release status: General Availability
|
||||
|
||||
@@ -18,9 +18,7 @@ File a single issue per problem and feature request.
|
||||
* Do not enumerate multiple bugs or feature requests in the same issue.
|
||||
* Do not add your issue as a comment to an existing issue unless it's for the identical input. Many issues look similar, but have different causes.
|
||||
|
||||
The more information you can provide, the more likely someone will be successful at reproducing the issue and finding a fix.
|
||||
|
||||
The built-in tool for reporting an issue, which you can access by using `Report Issue` in Azure Data Studio's Help menu, can help streamline this process by automatically providing the version of Azure Data Studio, all your installed extensions, and your system info.
|
||||
The more information you can provide, the more likely someone will be successful reproducing the issue and finding a fix.
|
||||
|
||||
Please include the following with each issue.
|
||||
|
||||
|
||||
1751
OSSREADME.json
Normal file
1751
OSSREADME.json
Normal file
File diff suppressed because it is too large
Load Diff
27
README.md
27
README.md
@@ -9,22 +9,16 @@ Azure Data Studio is a data management tool that enables you to work with SQL Se
|
||||
|
||||
Platform | Link
|
||||
-- | --
|
||||
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2072725
|
||||
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2072728
|
||||
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2072354
|
||||
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2072737
|
||||
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2072360
|
||||
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2072741
|
||||
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2072744
|
||||
Windows Setup Installer | https://go.microsoft.com/fwlink/?linkid=2038320
|
||||
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2038323
|
||||
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2038327
|
||||
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2038332
|
||||
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2038401
|
||||
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2038405
|
||||
|
||||
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
|
||||
|
||||
Try out the latest insiders build from `master`:
|
||||
- [Windows User Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-user/insider)
|
||||
- [Windows System Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64/insider)
|
||||
- [Windows ZIP - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-archive/insider)
|
||||
- [macOS ZIP - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/darwin/insider)
|
||||
- [Linux TAR.GZ - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/linux-x64/insider)
|
||||
Try out the latest insiders build from `master` at https://github.com/Microsoft/azuredatastudio/releases.
|
||||
|
||||
See the [change log](https://github.com/Microsoft/azuredatastudio/blob/master/CHANGELOG.md) for additional details of what's in this release.
|
||||
|
||||
@@ -68,13 +62,6 @@ The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.micro
|
||||
## Contributions and "Thank You"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* AlexFsmn for `Added context menu for DBs in explorer view to backup & restore db. #2277`
|
||||
* sadedil for `Missing feature request: Save as XML #3729`
|
||||
* gbritton1 for `Removed reference to object explorer #3463`
|
||||
* Tarig0 for `Add Routine_Type to CreateStoredProc fixes #3257 (#3286)`
|
||||
* oltruong for `typo fix #3025'`
|
||||
* Thomas-S-B for `Removed unnecessary IErrorDetectionStrategy #749`
|
||||
* Thomas-S-B for `Simplified code #750`
|
||||
* rdaniels6813 for `Add query plan theme support #3031`
|
||||
* Ruturaj123 for `Fixed some typos and grammatical errors #3027`
|
||||
* PromoFaux for `Use emoji shortcodes in CONTRIBUTING.md instead of <20> #3009`
|
||||
|
||||
@@ -17,12 +17,10 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
||||
chokidar: https://github.com/paulmillr/chokidar
|
||||
comment-json: https://github.com/kaelzhang/node-comment-json
|
||||
core-js: https://github.com/zloirock/core-js
|
||||
decompress: https://github.com/kevva/decompress
|
||||
emmet: https://github.com/emmetio/emmet
|
||||
error-ex: https://github.com/Qix-/node-error-ex
|
||||
escape-string-regexp: https://github.com/sindresorhus/escape-string-regexp
|
||||
fast-plist: https://github.com/Microsoft/node-fast-plist
|
||||
figures: https://github.com/sindresorhus/figures
|
||||
find-remove: https://www.npmjs.com/package/find-remove
|
||||
fs-extra: https://github.com/jprichardson/node-fs-extra
|
||||
gc-signals: https://github.com/Microsoft/node-gc-signals
|
||||
@@ -43,28 +41,22 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
||||
native-keymap: https://github.com/Microsoft/node-native-keymap
|
||||
native-watchdog: https://github.com/Microsoft/node-native-watchdog
|
||||
ng2-charts: https://github.com/valor-software/ng2-charts
|
||||
node-fetch: https://github.com/bitinn/node-fetch
|
||||
node-pty: https://github.com/Tyriar/node-pty
|
||||
nsfw: https://github.com/Axosoft/nsfw
|
||||
pretty-data: https://github.com/vkiryukhin/pretty-data
|
||||
primeng: https://github.com/primefaces/primeng
|
||||
process-nextick-args: https://github.com/calvinmetcalf/process-nextick-args
|
||||
pty.js: https://github.com/chjj/pty.js
|
||||
reflect-metadata: https://github.com/rbuckton/reflect-metadata
|
||||
request: https://github.com/request/request
|
||||
rxjs: https://github.com/ReactiveX/RxJS
|
||||
semver: https://github.com/npm/node-semver
|
||||
slickgrid: https://github.com/6pac/SlickGrid
|
||||
sqltoolsservice: https://github.com/Microsoft/sqltoolsservice
|
||||
svg.js: https://github.com/svgdotjs/svg.js
|
||||
systemjs: https://github.com/systemjs/systemjs
|
||||
temp-write: https://github.com/sindresorhus/temp-write
|
||||
underscore: https://github.com/jashkenas/underscore
|
||||
v8-profiler: https://github.com/node-inspector/v8-profiler
|
||||
vscode: https://github.com/microsoft/vscode
|
||||
vscode-debugprotocol: https://github.com/Microsoft/vscode-debugadapter-node
|
||||
vscode-languageclient: https://github.com/Microsoft/vscode-languageserver-node
|
||||
vscode-nls: https://github.com/Microsoft/vscode-nls
|
||||
vscode-ripgrep: https://github.com/roblourens/vscode-ripgrep
|
||||
vscode-textmate: https://github.com/Microsoft/vscode-textmate
|
||||
winreg: https://github.com/fresc81/node-winreg
|
||||
@@ -72,9 +64,10 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
||||
yauzl: https://github.com/thejoshwolfe/yauzl
|
||||
zone.js: https://www.npmjs.com/package/zone
|
||||
|
||||
Microsoft PROSE SDK: https://microsoft.github.io/prose
|
||||
|
||||
%% angular NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2014-2017 Google, Inc. http://angular.io
|
||||
|
||||
@@ -300,20 +293,6 @@ THE SOFTWARE.
|
||||
=========================================
|
||||
END OF core-js NOTICES AND INFORMATION
|
||||
|
||||
%% decompress NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
MIT License
|
||||
|
||||
Copyright (c) Kevin Mårtensson <kevinmartensson@gmail.com> (github.com/kevva)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
=========================================
|
||||
END OF decompress NOTICES AND INFORMATION
|
||||
|
||||
%% emmet NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
The MIT License (MIT)
|
||||
@@ -415,20 +394,6 @@ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEAL
|
||||
=========================================
|
||||
END OF fast-plist NOTICES AND INFORMATION
|
||||
|
||||
%% figures NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
MIT License
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
=========================================
|
||||
END OF figures NOTICES AND INFORMATION
|
||||
|
||||
%% fs-extra NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
(The MIT License)
|
||||
@@ -1370,32 +1335,6 @@ SOFTWARE.
|
||||
=========================================
|
||||
END OF ng2-charts NOTICES AND INFORMATION
|
||||
|
||||
%% node-fetch NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 David Frank
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
=========================================
|
||||
END OF node-fetch NOTICES AND INFORMATION
|
||||
|
||||
%% node-pty NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
Copyright (c) 2012-2015, Christopher Jeffrey (https://github.com/chjj/)
|
||||
@@ -1470,30 +1409,6 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI
|
||||
=========================================
|
||||
END OF primeng NOTICES AND INFORMATION
|
||||
|
||||
%% process-nextick-args NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
# Copyright (c) 2015 Calvin Metcalf
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.**
|
||||
=========================================
|
||||
END OF process-nextick-args NOTICES AND INFORMATION
|
||||
|
||||
%% pty.js NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
Copyright (c) 2012-2015, Christopher Jeffrey (https://github.com/chjj/)
|
||||
@@ -1578,66 +1493,6 @@ END OF TERMS AND CONDITIONS
|
||||
=========================================
|
||||
END OF reflect-metadata NOTICES AND INFORMATION
|
||||
|
||||
%% request NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
Apache License
|
||||
|
||||
Version 2.0, January 2004
|
||||
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
|
||||
|
||||
You must give any other recipients of the Work or Derivative Works a copy of this License; and
|
||||
|
||||
You must cause any modified files to carry prominent notices stating that You changed the files; and
|
||||
|
||||
You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
|
||||
|
||||
If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
=========================================
|
||||
END OF request NOTICES AND INFORMATION
|
||||
|
||||
%% rxjs NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
Apache License
|
||||
@@ -1963,20 +1818,6 @@ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEAL
|
||||
=========================================
|
||||
END OF systemjs NOTICES AND INFORMATION
|
||||
|
||||
%% temp-write NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
MIT License
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
=========================================
|
||||
END OF temp-write NOTICES AND INFORMATION
|
||||
|
||||
%% underscore NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
Copyright (c) 2009-2017 Jeremy Ashkenas, DocumentCloud and Investigative
|
||||
@@ -2079,50 +1920,6 @@ OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWA
|
||||
=========================================
|
||||
END OF vscode-debugprotocol NOTICES AND INFORMATION
|
||||
|
||||
%% vscode-languageclient NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
Copyright (c) Microsoft Corporation
|
||||
|
||||
All rights reserved.
|
||||
|
||||
MIT License
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy,
|
||||
modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
|
||||
OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
=========================================
|
||||
END OF vscode-languageclient NOTICES AND INFORMATION
|
||||
|
||||
%% vscode-nls NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Microsoft Corporation
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy,
|
||||
modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
|
||||
OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
=========================================
|
||||
END OF vscode-nls NOTICES AND INFORMATION
|
||||
|
||||
%% vscode-ripgrep NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
vscode-ripgrep
|
||||
@@ -2282,188 +2079,3 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
=========================================
|
||||
END OF zone.js NOTICES AND INFORMATION
|
||||
|
||||
%% Microsoft.ProgramSynthesis.Common NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
NOTICES AND INFORMATION
|
||||
Do Not Translate or Localize
|
||||
|
||||
This software incorporates material from third parties. Microsoft makes certain
|
||||
open source code available at http://3rdpartysource.microsoft.com, or you may
|
||||
send a check or money order for US $5.00, including the product name, the open
|
||||
source component name, and version number, to:
|
||||
|
||||
Source Code Compliance Team
|
||||
Microsoft Corporation
|
||||
One Microsoft Way
|
||||
Redmond, WA 98052
|
||||
USA
|
||||
|
||||
Notwithstanding any other terms, you may reverse engineer this software to the
|
||||
extent required to debug changes to any libraries licensed under the GNU Lesser
|
||||
General Public License.
|
||||
|
||||
-------------------------------START OF THIRD-PARTY NOTICES-------------------------------------------
|
||||
|
||||
===================================CoreFx (BEGIN)
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) .NET Foundation and Contributors
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
===================================CoreFx (END)
|
||||
|
||||
===================================CoreFxLab (BEGIN)
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
===================================CoreFxLab (END)
|
||||
|
||||
===================================Reactive Extensions (BEGIN)
|
||||
Copyright (c) .NET Foundation and Contributors
|
||||
All Rights Reserved
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you
|
||||
may not use this file except in compliance with the License. You may
|
||||
obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied. See the License for the specific language governing permissions
|
||||
and limitations under the License.
|
||||
|
||||
List of contributors to the Rx libraries
|
||||
|
||||
Rx and Ix.NET:
|
||||
Wes Dyer
|
||||
Jeffrey van Gogh
|
||||
Matthew Podwysocki
|
||||
Bart De Smet
|
||||
Danny van Velzen
|
||||
Erik Meijer
|
||||
Brian Beckman
|
||||
Aaron Lahman
|
||||
Georgi Chkodrov
|
||||
Arthur Watson
|
||||
Gert Drapers
|
||||
Mark Shields
|
||||
Eric Rozell
|
||||
|
||||
Rx.js and Ix.js:
|
||||
Matthew Podwysocki
|
||||
Jeffrey van Gogh
|
||||
Bart De Smet
|
||||
Brian Beckman
|
||||
Wes Dyer
|
||||
Erik Meijer
|
||||
|
||||
Tx:
|
||||
Georgi Chkodrov
|
||||
Bart De Smet
|
||||
Aaron Lahman
|
||||
Erik Meijer
|
||||
Brian Grunkemeyer
|
||||
Beysim Sezgin
|
||||
Tiho Tarnavski
|
||||
Collin Meek
|
||||
Sajay Anthony
|
||||
Karen Albrecht
|
||||
John Allen
|
||||
Zach Kramer
|
||||
|
||||
Rx++ and Ix++:
|
||||
Aaron Lahman
|
||||
===================================Reactive Extensions (END)
|
||||
|
||||
-------------------------------END OF THIRD-PARTY NOTICES-------------------------------------------
|
||||
=========================================
|
||||
END OF Microsoft.ProgramSynthesis.Common NOTICES AND INFORMATION
|
||||
|
||||
%% Microsoft.ProgramSynthesis.Detection NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
NOTICES AND INFORMATION
|
||||
Do Not Translate or Localize
|
||||
|
||||
This software incorporates material from third parties. Microsoft makes certain
|
||||
open source code available at http://3rdpartysource.microsoft.com, or you may
|
||||
send a check or money order for US $5.00, including the product name, the open
|
||||
source component name, and version number, to:
|
||||
|
||||
Source Code Compliance Team
|
||||
Microsoft Corporation
|
||||
One Microsoft Way
|
||||
Redmond, WA 98052
|
||||
USA
|
||||
|
||||
Notwithstanding any other terms, you may reverse engineer this software to the
|
||||
extent required to debug changes to any libraries licensed under the GNU Lesser
|
||||
General Public License.
|
||||
|
||||
-------------------------------START OF THIRD-PARTY NOTICES-------------------------------------------
|
||||
|
||||
===================================ExcelDataReader (BEGIN)
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 ExcelDataReader
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
===================================ExcelDataReader (END)
|
||||
|
||||
-------------------------------END OF THIRD-PARTY NOTICES-------------------------------------------
|
||||
=========================================
|
||||
END OF Microsoft.ProgramSynthesis.Detection NOTICES AND INFORMATION
|
||||
@@ -12,13 +12,9 @@ steps:
|
||||
displayName: 'preinstall'
|
||||
|
||||
- script: |
|
||||
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:10
|
||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||
sudo chmod +x /etc/init.d/xvfb
|
||||
sudo update-rc.d xvfb defaults
|
||||
sudo service xvfb start
|
||||
# sh -e /etc/init.d/xvfb start
|
||||
# sleep 3
|
||||
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:99.0
|
||||
sh -e /etc/init.d/xvfb start
|
||||
sleep 3
|
||||
displayName: 'Linux preinstall'
|
||||
condition: eq(variables['Agent.OS'], 'Linux')
|
||||
|
||||
@@ -27,20 +23,16 @@ steps:
|
||||
displayName: 'Install'
|
||||
|
||||
- script: |
|
||||
node_modules/.bin/gulp electron
|
||||
node_modules/.bin/gulp compile --max_old_space_size=4096
|
||||
node_modules/.bin/gulp optimize-vscode --max_old_space_size=4096
|
||||
node_modules/.bin/gulp electron --silent
|
||||
node_modules/.bin/gulp compile --silent --max_old_space_size=4096
|
||||
node_modules/.bin/gulp optimize-vscode --silent --max_old_space_size=4096
|
||||
displayName: 'Scripts'
|
||||
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter
|
||||
./scripts/test.sh --reporter mocha-junit-reporter
|
||||
displayName: 'Tests'
|
||||
|
||||
- task: PublishTestResults@2
|
||||
inputs:
|
||||
testResultsFiles: '**/test-results.xml'
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- script: |
|
||||
yarn run tslint
|
||||
displayName: 'Run TSLint'
|
||||
testResultsFiles: '**/test-results.xml'
|
||||
condition: succeededOrFailed()
|
||||
@@ -23,8 +23,4 @@ steps:
|
||||
- task: PublishTestResults@2
|
||||
inputs:
|
||||
testResultsFiles: 'test-results.xml'
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- script: |
|
||||
yarn run tslint
|
||||
displayName: 'Run TSLint'
|
||||
condition: succeededOrFailed()
|
||||
@@ -1,40 +0,0 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.12.0"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
- script: |
|
||||
yarn
|
||||
displayName: Install Dependencies
|
||||
- script: |
|
||||
yarn gulp electron-x64
|
||||
displayName: Download Electron
|
||||
- script: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
- script: |
|
||||
yarn monaco-compile-check
|
||||
displayName: Run Monaco Editor Checks
|
||||
- script: |
|
||||
yarn strict-null-check
|
||||
displayName: Run Strict Null Checks
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
- script: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
- script: |
|
||||
./scripts/test.sh --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests
|
||||
- script: |
|
||||
./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
displayName: Run Integration Tests
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
testResultsFiles: '*-results.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||
condition: succeededOrFailed()
|
||||
@@ -1,91 +0,0 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.12.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
|
||||
yarn
|
||||
yarn gulp -- hygiene
|
||||
yarn monaco-compile-check
|
||||
yarn strict-null-check
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" yarn gulp -- mixin
|
||||
node build/azure-pipelines/common/installDistro.js
|
||||
node build/lib/builtInExtensions.js
|
||||
displayName: Prepare build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" \
|
||||
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
|
||||
yarn gulp -- vscode-darwin-min upload-vscode-sourcemaps
|
||||
displayName: Build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./scripts/test.sh --build --tfs "Unit Tests"
|
||||
# APP_NAME="`ls $(agent.builddirectory)/VSCode-darwin | head -n 1`"
|
||||
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-darwin/$APP_NAME"
|
||||
displayName: Run unit tests
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
pushd ../VSCode-darwin && zip -r -X -y ../VSCode-darwin.zip * && popd
|
||||
displayName: Archive build
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: 'ESRP CodeSign'
|
||||
FolderPath: '$(agent.builddirectory)'
|
||||
Pattern: 'VSCode-darwin.zip'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-401337-Apple",
|
||||
"operationSetCode": "MacAppDeveloperSign",
|
||||
"parameters": [ ],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
displayName: Codesign
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
# remove pkg from archive
|
||||
zip -d ../VSCode-darwin.zip "*.pkg"
|
||||
|
||||
# publish the build
|
||||
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
|
||||
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
|
||||
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
|
||||
node build/azure-pipelines/common/publish.js \
|
||||
"$(VSCODE_QUALITY)" \
|
||||
darwin \
|
||||
archive \
|
||||
"VSCode-darwin-$(VSCODE_QUALITY).zip" \
|
||||
$VERSION \
|
||||
true \
|
||||
../VSCode-darwin.zip
|
||||
|
||||
# publish hockeyapp symbols
|
||||
node build/azure-pipelines/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_MACOS)"
|
||||
|
||||
# upload configuration
|
||||
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
|
||||
yarn gulp -- upload-vscode-configuration
|
||||
displayName: Publish
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
continueOnError: true
|
||||
1
build/azure-pipelines/linux/.gitignore
vendored
1
build/azure-pipelines/linux/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
pat
|
||||
@@ -1,45 +0,0 @@
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 libgconf-2-4 dbus xvfb libgtk-3-0
|
||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||
sudo chmod +x /etc/init.d/xvfb
|
||||
sudo update-rc.d xvfb defaults
|
||||
sudo service xvfb start
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.12.0"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
- script: |
|
||||
yarn
|
||||
displayName: Install Dependencies
|
||||
- script: |
|
||||
yarn gulp electron-x64
|
||||
displayName: Download Electron
|
||||
- script: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
- script: |
|
||||
yarn monaco-compile-check
|
||||
displayName: Run Monaco Editor Checks
|
||||
- script: |
|
||||
yarn strict-null-check
|
||||
displayName: Run Strict Null Checks
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
- script: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
testResultsFiles: '*-results.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||
condition: succeededOrFailed()
|
||||
@@ -1,40 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const documentdb_1 = require("documentdb");
|
||||
function createDefaultConfig(quality) {
|
||||
return {
|
||||
id: quality,
|
||||
frozen: false
|
||||
};
|
||||
}
|
||||
function getConfig(quality) {
|
||||
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/config';
|
||||
const query = {
|
||||
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
|
||||
parameters: [
|
||||
{ name: '@quality', value: quality }
|
||||
]
|
||||
};
|
||||
return new Promise((c, e) => {
|
||||
client.queryDocuments(collection, query).toArray((err, results) => {
|
||||
if (err && err.code !== 409) {
|
||||
return e(err);
|
||||
}
|
||||
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0]);
|
||||
});
|
||||
});
|
||||
}
|
||||
getConfig(process.argv[2])
|
||||
.then(config => {
|
||||
console.log(config.frozen);
|
||||
process.exit(0);
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -1,112 +0,0 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.12.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export npm_config_arch="$(VSCODE_ARCH)"
|
||||
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
|
||||
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
|
||||
fi
|
||||
|
||||
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
|
||||
CHILD_CONCURRENCY=1 yarn
|
||||
npm run gulp -- hygiene
|
||||
npm run monaco-compile-check
|
||||
npm run strict-null-check
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
|
||||
node build/azure-pipelines/common/installDistro.js
|
||||
node build/lib/builtInExtensions.js
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- vscode-linux-$(VSCODE_ARCH)-min
|
||||
name: build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
npm run gulp -- "electron-$(VSCODE_ARCH)"
|
||||
|
||||
# xvfb seems to be crashing often, let's make sure it's always up
|
||||
service xvfb start
|
||||
|
||||
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
|
||||
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)"
|
||||
name: test
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
REPO="$(pwd)"
|
||||
ROOT="$REPO/.."
|
||||
ARCH="$(VSCODE_ARCH)"
|
||||
|
||||
# Publish tarball
|
||||
PLATFORM_LINUX="linux-$(VSCODE_ARCH)"
|
||||
[[ "$ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
|
||||
[[ "$ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
|
||||
BUILDNAME="VSCode-$PLATFORM_LINUX"
|
||||
BUILD="$ROOT/$BUILDNAME"
|
||||
BUILD_VERSION="$(date +%s)"
|
||||
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
|
||||
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
|
||||
PACKAGEJSON="$BUILD/resources/app/package.json"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
|
||||
rm -rf $ROOT/code-*.tar.*
|
||||
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
|
||||
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$VERSION" true "$TARBALL_PATH"
|
||||
|
||||
# Publish hockeyapp symbols
|
||||
node build/azure-pipelines/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_LINUX64)"
|
||||
|
||||
# Publish DEB
|
||||
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-deb"
|
||||
PLATFORM_DEB="linux-deb-$ARCH"
|
||||
[[ "$ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
|
||||
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
|
||||
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
|
||||
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
|
||||
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_DEB" package "$DEB_FILENAME" "$VERSION" true "$DEB_PATH"
|
||||
|
||||
# Publish RPM
|
||||
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-rpm"
|
||||
PLATFORM_RPM="linux-rpm-$ARCH"
|
||||
[[ "$ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
|
||||
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
|
||||
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
|
||||
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
|
||||
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_RPM" package "$RPM_FILENAME" "$VERSION" true "$RPM_PATH"
|
||||
|
||||
# Publish Snap
|
||||
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-prepare-snap"
|
||||
|
||||
# Pack snap tarball artifact, in order to preserve file perms
|
||||
mkdir -p $REPO/.build/linux/snap-tarball
|
||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$(VSCODE_ARCH).tar.gz"
|
||||
rm -rf $SNAP_TARBALL_PATH
|
||||
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
continueOnError: true
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
displayName: 'Publish Pipeline Artifact'
|
||||
inputs:
|
||||
artifactName: snap-$(VSCODE_ARCH)
|
||||
targetPath: .build/linux/snap-tarball
|
||||
@@ -1,42 +0,0 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.12.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
|
||||
- task: DownloadPipelineArtifact@0
|
||||
displayName: 'Download Pipeline Artifact'
|
||||
inputs:
|
||||
artifactName: snap-$(VSCODE_ARCH)
|
||||
targetPath: .build/linux/snap-tarball
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
REPO="$(pwd)"
|
||||
ARCH="$(VSCODE_ARCH)"
|
||||
SNAP_ROOT="$REPO/.build/linux/snap/$ARCH"
|
||||
|
||||
# Install build dependencies
|
||||
(cd build && yarn)
|
||||
|
||||
# Unpack snap tarball artifact, in order to preserve file perms
|
||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$ARCH.tar.gz"
|
||||
(cd .build/linux && tar -xzf $SNAP_TARBALL_PATH)
|
||||
|
||||
# Create snap package
|
||||
BUILD_VERSION="$(date +%s)"
|
||||
SNAP_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.snap"
|
||||
PACKAGEJSON="$(ls $SNAP_ROOT/code*/usr/share/code*/resources/app/package.json)"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME"
|
||||
(cd $SNAP_ROOT/code-* && snapcraft snap --output "$SNAP_PATH")
|
||||
|
||||
# Publish snap package
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
|
||||
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-$ARCH" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"
|
||||
@@ -1,65 +0,0 @@
|
||||
resources:
|
||||
containers:
|
||||
- container: vscode-x64
|
||||
image: joaomoreno/vscode-linux-build-agent:x64
|
||||
- container: vscode-ia32
|
||||
image: joaomoreno/vscode-linux-build-agent:ia32
|
||||
- container: snapcraft
|
||||
image: snapcore/snapcraft
|
||||
|
||||
jobs:
|
||||
- job: Windows
|
||||
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
|
||||
- job: Windows32
|
||||
condition: eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true')
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
variables:
|
||||
VSCODE_ARCH: ia32
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
|
||||
- job: Linux
|
||||
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
container: vscode-x64
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
- job: LinuxSnap
|
||||
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
container: snapcraft
|
||||
dependsOn: Linux
|
||||
steps:
|
||||
- template: linux/snap-build-linux.yml
|
||||
|
||||
- job: Linux32
|
||||
condition: eq(variables['VSCODE_BUILD_LINUX_32BIT'], 'true')
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
variables:
|
||||
VSCODE_ARCH: ia32
|
||||
container: vscode-ia32
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
- job: macOS
|
||||
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
|
||||
pool:
|
||||
vmImage: macOS 10.13
|
||||
steps:
|
||||
- template: darwin/product-build-darwin.yml
|
||||
@@ -1,44 +0,0 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.12.0"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '2.x'
|
||||
addToPath: true
|
||||
- powershell: |
|
||||
yarn
|
||||
displayName: Install Dependencies
|
||||
- powershell: |
|
||||
yarn gulp electron
|
||||
displayName: Download Electron
|
||||
- powershell: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
- powershell: |
|
||||
yarn monaco-compile-check
|
||||
displayName: Run Monaco Editor Checks
|
||||
- script: |
|
||||
yarn strict-null-check
|
||||
displayName: Run Strict Null Checks
|
||||
- powershell: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
- powershell: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
- powershell: |
|
||||
.\scripts\test.bat --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests
|
||||
- powershell: |
|
||||
.\scripts\test-integration.bat --tfs "Integration Tests"
|
||||
displayName: Run Integration Tests
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
testResultsFiles: '*-results.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||
condition: succeededOrFailed()
|
||||
@@ -1,152 +0,0 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "8.12.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '2.x'
|
||||
addToPath: true
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
"machine monacotools.visualstudio.com password $(VSO_PAT)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
|
||||
$env:npm_config_arch="$(VSCODE_ARCH)"
|
||||
$env:CHILD_CONCURRENCY="1"
|
||||
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
|
||||
exec { yarn }
|
||||
exec { npm run gulp -- hygiene }
|
||||
exec { npm run monaco-compile-check }
|
||||
exec { npm run strict-null-check }
|
||||
exec { npm run gulp -- mixin }
|
||||
exec { node build/azure-pipelines/common/installDistro.js }
|
||||
exec { node build/lib/builtInExtensions.js }
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
|
||||
exec { npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-min" }
|
||||
exec { npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-inno-updater" }
|
||||
name: build
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { npm run gulp -- "electron-$(VSCODE_ARCH)" }
|
||||
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
|
||||
# yarn smoketest -- --build "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
|
||||
name: test
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: 'ESRP CodeSign'
|
||||
FolderPath: '$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)'
|
||||
Pattern: '*.dll,*.exe,*.node'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "VS Code"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "https://code.visualstudio.com/"
|
||||
},
|
||||
{
|
||||
"parameterName": "Append",
|
||||
"parameterValue": "/as"
|
||||
},
|
||||
{
|
||||
"parameterName": "FileDigest",
|
||||
"parameterValue": "/fd \"SHA256\""
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolVerify",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "VerifyAll",
|
||||
"parameterValue": "/all"
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
|
||||
- task: NuGetCommand@2
|
||||
displayName: Install ESRPClient.exe
|
||||
inputs:
|
||||
restoreSolution: 'build\azure-pipelines\win32\ESRPClient\packages.config'
|
||||
feedsToUse: config
|
||||
nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config'
|
||||
externalFeedCredentials: 3fc0b7f7-da09-4ae7-a9c8-d69824b1819b
|
||||
restoreDirectory: packages
|
||||
|
||||
- task: ESRPImportCertTask@1
|
||||
displayName: Import ESRP Request Signing Certificate
|
||||
inputs:
|
||||
ESRP: 'ESRP CodeSign'
|
||||
|
||||
- powershell: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
.\build\azure-pipelines\win32\import-esrp-auth-cert.ps1 -AuthCertificateBase64 $(ESRP_AUTH_CERTIFICATE) -AuthCertificateKey $(ESRP_AUTH_CERTIFICATE_KEY)
|
||||
displayName: Import ESRP Auth Certificate
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-archive" "vscode-win32-$(VSCODE_ARCH)-system-setup" "vscode-win32-$(VSCODE_ARCH)-user-setup" --sign }
|
||||
|
||||
$Repo = "$(pwd)"
|
||||
$Root = "$Repo\.."
|
||||
$SystemExe = "$Repo\.build\win32-$(VSCODE_ARCH)\system-setup\VSCodeSetup.exe"
|
||||
$UserExe = "$Repo\.build\win32-$(VSCODE_ARCH)\user-setup\VSCodeSetup.exe"
|
||||
$Zip = "$Repo\.build\win32-$(VSCODE_ARCH)\archive\VSCode-win32-$(VSCODE_ARCH).zip"
|
||||
$Build = "$Root\VSCode-win32-$(VSCODE_ARCH)"
|
||||
|
||||
# get version
|
||||
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
|
||||
$Version = $PackageJson.version
|
||||
$Quality = "$env:VSCODE_QUALITY"
|
||||
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(AZURE_STORAGE_ACCESS_KEY_2)"
|
||||
$env:MOONCAKE_STORAGE_ACCESS_KEY = "$(MOONCAKE_STORAGE_ACCESS_KEY)"
|
||||
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(AZURE_DOCUMENTDB_MASTERKEY)"
|
||||
|
||||
$assetPlatform = if ("$(VSCODE_ARCH)" -eq "ia32") { "win32" } else { "win32-x64" }
|
||||
|
||||
exec { node build/azure-pipelines/common/publish.js $Quality "$global:assetPlatform-archive" archive "VSCode-win32-$(VSCODE_ARCH)-$Version.zip" $Version true $Zip }
|
||||
exec { node build/azure-pipelines/common/publish.js $Quality "$global:assetPlatform" setup "VSCodeSetup-$(VSCODE_ARCH)-$Version.exe" $Version true $SystemExe }
|
||||
exec { node build/azure-pipelines/common/publish.js $Quality "$global:assetPlatform-user" setup "VSCodeUserSetup-$(VSCODE_ARCH)-$Version.exe" $Version true $UserExe }
|
||||
|
||||
# publish hockeyapp symbols
|
||||
$hockeyAppId = if ("$(VSCODE_ARCH)" -eq "ia32") { "$(VSCODE_HOCKEYAPP_ID_WIN32)" } else { "$(VSCODE_HOCKEYAPP_ID_WIN64)" }
|
||||
exec { node build/azure-pipelines/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" $hockeyAppId }
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
continueOnError: true
|
||||
@@ -1,70 +0,0 @@
|
||||
function Create-TmpJson($Obj) {
|
||||
$FileName = [System.IO.Path]::GetTempFileName()
|
||||
ConvertTo-Json -Depth 100 $Obj | Out-File -Encoding UTF8 $FileName
|
||||
return $FileName
|
||||
}
|
||||
|
||||
$Auth = Create-TmpJson @{
|
||||
Version = "1.0.0"
|
||||
AuthenticationType = "AAD_CERT"
|
||||
ClientId = $env:ESRPClientId
|
||||
AuthCert = @{
|
||||
SubjectName = $env:ESRPAuthCertificateSubjectName
|
||||
StoreLocation = "LocalMachine"
|
||||
StoreName = "My"
|
||||
}
|
||||
RequestSigningCert = @{
|
||||
SubjectName = $env:ESRPCertificateSubjectName
|
||||
StoreLocation = "LocalMachine"
|
||||
StoreName = "My"
|
||||
}
|
||||
}
|
||||
|
||||
$Policy = Create-TmpJson @{
|
||||
Version = "1.0.0"
|
||||
}
|
||||
|
||||
$Input = Create-TmpJson @{
|
||||
Version = "1.0.0"
|
||||
SignBatches = @(
|
||||
@{
|
||||
SourceLocationType = "UNC"
|
||||
SignRequestFiles = @(
|
||||
@{
|
||||
SourceLocation = $args[0]
|
||||
}
|
||||
)
|
||||
SigningInfo = @{
|
||||
Operations = @(
|
||||
@{
|
||||
KeyCode = "CP-230012"
|
||||
OperationCode = "SigntoolSign"
|
||||
Parameters = @{
|
||||
OpusName = "VS Code"
|
||||
OpusInfo = "https://code.visualstudio.com/"
|
||||
Append = "/as"
|
||||
FileDigest = "/fd `"SHA256`""
|
||||
PageHash = "/NPH"
|
||||
TimeStamp = "/tr `"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer`" /td sha256"
|
||||
}
|
||||
ToolName = "sign"
|
||||
ToolVersion = "1.0"
|
||||
},
|
||||
@{
|
||||
KeyCode = "CP-230012"
|
||||
OperationCode = "SigntoolVerify"
|
||||
Parameters = @{
|
||||
VerifyAll = "/all"
|
||||
}
|
||||
ToolName = "sign"
|
||||
ToolVersion = "1.0"
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
$Output = [System.IO.Path]::GetTempFileName()
|
||||
$ScriptPath = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent
|
||||
& "$ScriptPath\ESRPClient\packages\EsrpClient.1.0.27\tools\ESRPClient.exe" Sign -a $Auth -p $Policy -i $Input -o $Output
|
||||
@@ -1,2 +1,12 @@
|
||||
[
|
||||
{
|
||||
"name": "ms-vscode.node-debug",
|
||||
"version": "1.26.7",
|
||||
"repo": "https://github.com/Microsoft/vscode-node-debug"
|
||||
},
|
||||
{
|
||||
"name": "ms-vscode.node-debug2",
|
||||
"version": "1.26.8",
|
||||
"repo": "https://github.com/Microsoft/vscode-node-debug2"
|
||||
}
|
||||
]
|
||||
|
||||
@@ -43,7 +43,7 @@ function asYarnDependency(prefix, tree) {
|
||||
}
|
||||
|
||||
function getYarnProductionDependencies(cwd) {
|
||||
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'inherit'] });
|
||||
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'ignore'] });
|
||||
const match = /^{"type":"tree".*$/m.exec(raw);
|
||||
|
||||
if (!match || match.length !== 1) {
|
||||
|
||||
@@ -28,7 +28,7 @@ var editorEntryPoints = [
|
||||
name: 'vs/editor/editor.main',
|
||||
include: [],
|
||||
exclude: ['vs/css', 'vs/nls'],
|
||||
prepend: ['out-editor-build/vs/css.js', 'out-editor-build/vs/nls.js'],
|
||||
prepend: ['out-build/vs/css.js', 'out-build/vs/nls.js'],
|
||||
},
|
||||
{
|
||||
name: 'vs/base/common/worker/simpleWorker',
|
||||
@@ -79,21 +79,16 @@ gulp.task('extract-editor-src', ['clean-editor-src'], function () {
|
||||
apiusages,
|
||||
extrausages
|
||||
],
|
||||
typings: [
|
||||
'typings/lib.ie11_safe_es6.d.ts',
|
||||
'typings/thenable.d.ts',
|
||||
'typings/es6-promise.d.ts',
|
||||
'typings/require-monaco.d.ts',
|
||||
'vs/monaco.d.ts'
|
||||
],
|
||||
libs: [
|
||||
`lib.es5.d.ts`,
|
||||
`lib.dom.d.ts`,
|
||||
`lib.webworker.importscripts.d.ts`
|
||||
`lib.d.ts`,
|
||||
`lib.es2015.collection.d.ts`
|
||||
],
|
||||
redirects: {
|
||||
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
|
||||
},
|
||||
compilerOptions: {
|
||||
module: 2, // ModuleKind.AMD
|
||||
},
|
||||
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
|
||||
importIgnorePattern: /^vs\/css!/,
|
||||
destRoot: path.join(root, 'out-editor-src')
|
||||
@@ -113,8 +108,6 @@ gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-editor-build'],
|
||||
loaderConfig: {
|
||||
paths: {
|
||||
'vs': 'out-editor-build/vs',
|
||||
'vs/css': 'out-editor-build/vs/css.build',
|
||||
'vs/nls': 'out-editor-build/vs/nls.build',
|
||||
'vscode': 'empty:'
|
||||
}
|
||||
},
|
||||
@@ -129,42 +122,25 @@ gulp.task('clean-minified-editor', util.rimraf('out-editor-min'));
|
||||
gulp.task('minify-editor', ['clean-minified-editor', 'optimize-editor'], common.minifyTask('out-editor'));
|
||||
|
||||
gulp.task('clean-editor-esm', util.rimraf('out-editor-esm'));
|
||||
gulp.task('extract-editor-esm', ['clean-editor-esm', 'clean-editor-distro', 'extract-editor-src'], function () {
|
||||
standalone.createESMSourcesAndResources2({
|
||||
srcFolder: './out-editor-src',
|
||||
outFolder: './out-editor-esm',
|
||||
outResourcesFolder: './out-monaco-editor-core/esm',
|
||||
ignores: [
|
||||
'inlineEntryPoint:0.ts',
|
||||
'inlineEntryPoint:1.ts',
|
||||
'vs/loader.js',
|
||||
'vs/nls.ts',
|
||||
'vs/nls.build.js',
|
||||
'vs/nls.d.ts',
|
||||
'vs/css.js',
|
||||
'vs/css.build.js',
|
||||
'vs/css.d.ts',
|
||||
'vs/base/worker/workerMain.ts',
|
||||
gulp.task('extract-editor-esm', ['clean-editor-esm', 'clean-editor-distro'], function () {
|
||||
standalone.createESMSourcesAndResources({
|
||||
entryPoints: [
|
||||
'vs/editor/editor.main',
|
||||
'vs/editor/editor.worker'
|
||||
],
|
||||
renames: {
|
||||
'vs/nls.mock.ts': 'vs/nls.ts'
|
||||
outFolder: './out-editor-esm/src',
|
||||
outResourcesFolder: './out-monaco-editor-core/esm',
|
||||
redirects: {
|
||||
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
|
||||
'vs/nls': 'vs/nls.mock',
|
||||
}
|
||||
});
|
||||
});
|
||||
gulp.task('compile-editor-esm', ['extract-editor-esm', 'clean-editor-distro'], function () {
|
||||
if (process.platform === 'win32') {
|
||||
const result = cp.spawnSync(`..\\node_modules\\.bin\\tsc.cmd`, {
|
||||
cwd: path.join(__dirname, '../out-editor-esm')
|
||||
});
|
||||
console.log(result.stdout.toString());
|
||||
console.log(result.stderr.toString());
|
||||
} else {
|
||||
const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
|
||||
cwd: path.join(__dirname, '../out-editor-esm')
|
||||
});
|
||||
console.log(result.stdout.toString());
|
||||
console.log(result.stderr.toString());
|
||||
}
|
||||
const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
|
||||
cwd: path.join(__dirname, '../out-editor-esm')
|
||||
});
|
||||
console.log(result.stdout.toString());
|
||||
});
|
||||
|
||||
function toExternalDTS(contents) {
|
||||
@@ -218,7 +194,7 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
|
||||
this.emit('data', new File({
|
||||
path: data.path.replace(/monaco\.d\.ts/, 'editor.api.d.ts'),
|
||||
base: data.base,
|
||||
contents: Buffer.from(toExternalDTS(data.contents.toString()))
|
||||
contents: new Buffer(toExternalDTS(data.contents.toString()))
|
||||
}));
|
||||
}))
|
||||
.pipe(gulp.dest('out-monaco-editor-core/esm/vs/editor')),
|
||||
|
||||
@@ -21,7 +21,6 @@ const nlsDev = require('vscode-nls-dev');
|
||||
const root = path.dirname(__dirname);
|
||||
const commit = util.getVersion(root);
|
||||
const plumber = require('gulp-plumber');
|
||||
const _ = require('underscore');
|
||||
|
||||
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
|
||||
|
||||
@@ -36,8 +35,7 @@ const tasks = compilations.map(function (tsconfigFile) {
|
||||
const absolutePath = path.join(extensionsPath, tsconfigFile);
|
||||
const relativeDirname = path.dirname(tsconfigFile);
|
||||
|
||||
const tsconfig = require(absolutePath);
|
||||
const tsOptions = _.assign({}, tsconfig.extends ? require(path.join(extensionsPath, relativeDirname, tsconfig.extends)).compilerOptions : {}, tsconfig.compilerOptions);
|
||||
const tsOptions = require(absolutePath).compilerOptions;
|
||||
tsOptions.verbose = false;
|
||||
tsOptions.sourceMap = true;
|
||||
|
||||
@@ -169,4 +167,4 @@ gulp.task('watch-extensions', tasks.map(t => t.watch));
|
||||
|
||||
gulp.task('clean-extensions-build', tasks.map(t => t.cleanBuild));
|
||||
gulp.task('compile-extensions-build', tasks.map(t => t.compileBuild));
|
||||
gulp.task('watch-extensions-build', tasks.map(t => t.watchBuild));
|
||||
gulp.task('watch-extensions-build', tasks.map(t => t.watchBuild));
|
||||
@@ -43,11 +43,8 @@ const indentationFilter = [
|
||||
// except specific files
|
||||
'!ThirdPartyNotices.txt',
|
||||
'!LICENSE.txt',
|
||||
'!**/LICENSE',
|
||||
'!src/vs/nls.js',
|
||||
'!src/vs/nls.build.js',
|
||||
'!src/vs/css.js',
|
||||
'!src/vs/css.build.js',
|
||||
'!src/vs/loader.js',
|
||||
'!src/vs/base/common/marked/marked.js',
|
||||
'!src/vs/base/common/winjs.base.js',
|
||||
@@ -81,14 +78,12 @@ const indentationFilter = [
|
||||
'!src/vs/*/**/*.d.ts',
|
||||
'!src/typings/**/*.d.ts',
|
||||
'!extensions/**/*.d.ts',
|
||||
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns}',
|
||||
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe}',
|
||||
'!build/{lib,tslintRules}/**/*.js',
|
||||
'!build/**/*.sh',
|
||||
'!build/azure-pipelines/**/*.js',
|
||||
'!build/azure-pipelines/**/*.config',
|
||||
'!build/tfs/**/*.js',
|
||||
'!build/tfs/**/*.config',
|
||||
'!**/Dockerfile',
|
||||
'!**/*.Dockerfile',
|
||||
'!**/*.dockerfile',
|
||||
'!extensions/markdown-language-features/media/*.js'
|
||||
];
|
||||
|
||||
@@ -101,8 +96,6 @@ const copyrightFilter = [
|
||||
'!**/*.md',
|
||||
'!**/*.bat',
|
||||
'!**/*.cmd',
|
||||
'!**/*.ico',
|
||||
'!**/*.icns',
|
||||
'!**/*.xml',
|
||||
'!**/*.sh',
|
||||
'!**/*.txt',
|
||||
|
||||
@@ -13,6 +13,7 @@ const es = require('event-stream');
|
||||
const util = require('./lib/util');
|
||||
const remote = require('gulp-remote-src');
|
||||
const zip = require('gulp-vinyl-zip');
|
||||
const assign = require('object-assign');
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const jeditor = require('gulp-json-editor');
|
||||
|
||||
@@ -33,17 +33,16 @@ const i18n = require('./lib/i18n');
|
||||
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
|
||||
const platformInfo = require('service-downloader/out/platform').PlatformInformation;
|
||||
const glob = require('glob');
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
const deps = require('./dependencies');
|
||||
const getElectronVersion = require('./lib/electron').getElectronVersion;
|
||||
const createAsar = require('./lib/asar').createAsar;
|
||||
const minimist = require('minimist');
|
||||
|
||||
const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname));
|
||||
// @ts-ignore
|
||||
// {{SQL CARBON EDIT}}
|
||||
var del = require('del');
|
||||
|
||||
const extensionsRoot = path.join(root, 'extensions');
|
||||
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
|
||||
const baseModules = Object.keys(process.binding('natives')).filter(n => !/^_|\//.test(n));
|
||||
// {{SQL CARBON EDIT}}
|
||||
const nodeModules = [
|
||||
@@ -57,7 +56,28 @@ const nodeModules = [
|
||||
.concat(_.uniq(productionDependencies.map(d => d.name)))
|
||||
.concat(baseModules);
|
||||
|
||||
|
||||
// Build
|
||||
const builtInExtensions = require('./builtInExtensions.json');
|
||||
|
||||
const excludedExtensions = [
|
||||
'vscode-api-tests',
|
||||
'vscode-colorize-tests',
|
||||
'ms-vscode.node-debug',
|
||||
'ms-vscode.node-debug2',
|
||||
];
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const vsce = require('vsce');
|
||||
const sqlBuiltInExtensions = [
|
||||
// Add SQL built-in extensions here.
|
||||
// the extension will be excluded from SQLOps package and will have separate vsix packages
|
||||
'agent',
|
||||
'import',
|
||||
'profiler'
|
||||
];
|
||||
var azureExtensions = [ 'azurecore'];
|
||||
|
||||
const vscodeEntryPoints = _.flatten([
|
||||
buildfile.entrypoint('vs/workbench/workbench.main'),
|
||||
buildfile.base,
|
||||
@@ -70,15 +90,14 @@ const vscodeResources = [
|
||||
'out-build/cli.js',
|
||||
'out-build/driver.js',
|
||||
'out-build/bootstrap.js',
|
||||
'out-build/bootstrap-fork.js',
|
||||
'out-build/bootstrap-amd.js',
|
||||
'out-build/bootstrap-window.js',
|
||||
'out-build/paths.js',
|
||||
'out-build/vs/**/*.{svg,png,cur,html}',
|
||||
'out-build/vs/base/common/performance.js',
|
||||
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh}',
|
||||
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
|
||||
'out-build/vs/workbench/browser/media/*-theme.css',
|
||||
'out-build/vs/workbench/electron-browser/bootstrap/**',
|
||||
'out-build/vs/workbench/parts/debug/**/*.json',
|
||||
'out-build/vs/workbench/parts/execution/**/*.scpt',
|
||||
'out-build/vs/workbench/parts/webview/electron-browser/webview-pre.js',
|
||||
@@ -87,7 +106,6 @@ const vscodeResources = [
|
||||
'out-build/vs/workbench/parts/welcome/walkThrough/**/*.md',
|
||||
'out-build/vs/workbench/services/files/**/*.exe',
|
||||
'out-build/vs/workbench/services/files/**/*.md',
|
||||
'out-build/vs/code/electron-browser/workbench/**',
|
||||
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
|
||||
'out-build/vs/code/electron-browser/issue/issueReporter.js',
|
||||
'out-build/vs/code/electron-browser/processExplorer/processExplorer.js',
|
||||
@@ -135,31 +153,21 @@ gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compil
|
||||
|
||||
|
||||
gulp.task('optimize-index-js', ['optimize-vscode'], () => {
|
||||
const fullpath = path.join(process.cwd(), 'out-vscode/vs/code/electron-browser/workbench/workbench.js');
|
||||
const fullpath = path.join(process.cwd(), 'out-vscode/vs/workbench/electron-browser/bootstrap/index.js');
|
||||
const contents = fs.readFileSync(fullpath).toString();
|
||||
const newContents = contents.replace('[/*BUILD->INSERT_NODE_MODULES*/]', JSON.stringify(nodeModules));
|
||||
fs.writeFileSync(fullpath, newContents);
|
||||
});
|
||||
|
||||
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`;
|
||||
const baseUrl = `https://ticino.blob.core.windows.net/sourcemaps/${commit}/core`;
|
||||
gulp.task('clean-minified-vscode', util.rimraf('out-vscode-min'));
|
||||
gulp.task('minify-vscode', ['clean-minified-vscode', 'optimize-index-js'], common.minifyTask('out-vscode', `${sourceMappingURLBase}/core`));
|
||||
gulp.task('minify-vscode', ['clean-minified-vscode', 'optimize-index-js'], common.minifyTask('out-vscode', baseUrl));
|
||||
|
||||
// Package
|
||||
|
||||
// @ts-ignore JSON checking: darwinCredits is optional
|
||||
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
|
||||
|
||||
function darwinBundleDocumentType(extensions, icon) {
|
||||
return {
|
||||
name: product.nameLong + ' document',
|
||||
role: 'Editor',
|
||||
ostypes: ["TEXT", "utxt", "TUTX", "****"],
|
||||
extensions: extensions,
|
||||
iconFile: icon
|
||||
};
|
||||
}
|
||||
|
||||
const config = {
|
||||
version: getElectronVersion(),
|
||||
productAppName: product.nameLong,
|
||||
@@ -170,16 +178,19 @@ const config = {
|
||||
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
||||
darwinHelpBookFolder: 'VS Code HelpBook',
|
||||
darwinHelpBookName: 'VS Code HelpBook',
|
||||
darwinBundleDocumentTypes: [
|
||||
// {{SQL CARBON EDIT}} - Remove most document types and replace with ours
|
||||
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
|
||||
],
|
||||
darwinBundleDocumentTypes: [{
|
||||
name: product.nameLong + ' document',
|
||||
role: 'Editor',
|
||||
ostypes: ["TEXT", "utxt", "TUTX", "****"],
|
||||
// {{SQL CARBON EDIT}}
|
||||
extensions: ["csv", "json", "sqlplan", "sql", "xml"],
|
||||
iconFile: 'resources/darwin/code_file.icns'
|
||||
}],
|
||||
darwinBundleURLTypes: [{
|
||||
role: 'Viewer',
|
||||
name: product.nameLong,
|
||||
urlSchemes: [product.urlProtocol]
|
||||
}],
|
||||
darwinForceDarkModeSupport: true,
|
||||
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : void 0,
|
||||
linuxExecutableName: product.applicationName,
|
||||
winIcon: 'resources/win32/code.ico',
|
||||
@@ -199,7 +210,7 @@ function getElectron(arch) {
|
||||
});
|
||||
|
||||
return gulp.src('package.json')
|
||||
.pipe(json({ name: product.nameShort }))
|
||||
.pipe(json({ name: product.nameShort }))
|
||||
.pipe(electron(electronOpts))
|
||||
.pipe(filter(['**', '!**/app/package.json']))
|
||||
.pipe(vfs.dest('.build/electron'));
|
||||
@@ -210,8 +221,6 @@ gulp.task('clean-electron', util.rimraf('.build/electron'));
|
||||
gulp.task('electron', ['clean-electron'], getElectron(process.arch));
|
||||
gulp.task('electron-ia32', ['clean-electron'], getElectron('ia32'));
|
||||
gulp.task('electron-x64', ['clean-electron'], getElectron('x64'));
|
||||
gulp.task('electron-arm', ['clean-electron'], getElectron('arm'));
|
||||
gulp.task('electron-arm64', ['clean-electron'], getElectron('arm64'));
|
||||
|
||||
|
||||
/**
|
||||
@@ -248,6 +257,61 @@ function computeChecksum(filename) {
|
||||
return hash;
|
||||
}
|
||||
|
||||
function packageBuiltInExtensions() {
|
||||
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
|
||||
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
|
||||
const packagePath = path.join(path.dirname(root), element.name + '.vsix');
|
||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
||||
vsce.createVSIX({
|
||||
cwd: element.path,
|
||||
packagePath: packagePath,
|
||||
useYarn: true
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
function packageAzureCoreTask(platform, arch) {
|
||||
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
||||
if (platform === 'darwin') {
|
||||
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', 'azurecore');
|
||||
} else {
|
||||
destination = path.join(destination, 'resources', 'app', 'extensions', 'azurecore');
|
||||
}
|
||||
|
||||
platform = platform || process.platform;
|
||||
|
||||
return () => {
|
||||
const root = path.resolve(path.join(__dirname, '..'));
|
||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => azureExtensions.indexOf(name) > -1);
|
||||
|
||||
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
||||
return ext.fromLocal(extension.path);
|
||||
}));
|
||||
|
||||
let result = localExtensions
|
||||
.pipe(util.skipDirectories())
|
||||
.pipe(util.fixWin32DirectoryPermissions())
|
||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
|
||||
|
||||
return result.pipe(vfs.dest(destination));
|
||||
};
|
||||
}
|
||||
|
||||
function packageTask(platform, arch, opts) {
|
||||
opts = opts || {};
|
||||
|
||||
@@ -261,23 +325,47 @@ function packageTask(platform, arch, opts) {
|
||||
const checksums = computeChecksums(out, [
|
||||
'vs/workbench/workbench.main.js',
|
||||
'vs/workbench/workbench.main.css',
|
||||
'vs/code/electron-browser/workbench/workbench.html',
|
||||
'vs/code/electron-browser/workbench/workbench.js'
|
||||
'vs/workbench/electron-browser/bootstrap/index.html',
|
||||
'vs/workbench/electron-browser/bootstrap/index.js',
|
||||
'vs/workbench/electron-browser/bootstrap/preload.js'
|
||||
]);
|
||||
|
||||
const src = gulp.src(out + '/**', { base: '.' })
|
||||
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }))
|
||||
.pipe(util.setExecutableBit(['**/*.sh']))
|
||||
.pipe(filter(['**', '!**/*.js.map']));
|
||||
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }));
|
||||
|
||||
const root = path.resolve(path.join(__dirname, '..'));
|
||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
// {{SQL CARBON EDIT}}
|
||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => azureExtensions.indexOf(name) === -1);
|
||||
|
||||
packageBuiltInExtensions();
|
||||
|
||||
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
||||
return ext.fromLocal(extension.path)
|
||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
}));
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
ext.packageBuiltInExtensions();
|
||||
const extensionDepsSrc = [
|
||||
..._.flatten(extensionsProductionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
|
||||
];
|
||||
|
||||
const sources = es.merge(src, ext.packageExtensionsStream({
|
||||
sourceMappingURLBase: sourceMappingURLBase
|
||||
}));
|
||||
const localExtensionDependencies = gulp.src(extensionDepsSrc, { base: '.', dot: true })
|
||||
.pipe(filter(['**', '!**/package-lock.json']))
|
||||
.pipe(util.cleanNodeModule('account-provider-azure', ['node_modules/date-utils/doc/**', 'node_modules/adal_node/node_modules/**'], undefined))
|
||||
.pipe(util.cleanNodeModule('typescript', ['**/**'], undefined));
|
||||
|
||||
const sources = es.merge(src, localExtensions, localExtensionDependencies)
|
||||
.pipe(util.setExecutableBit(['**/*.sh']))
|
||||
.pipe(filter(['**', '!**/*.js.map']));
|
||||
|
||||
let version = packageJson.version;
|
||||
// @ts-ignore JSON checking: quality is optional
|
||||
@@ -289,15 +377,8 @@ function packageTask(platform, arch, opts) {
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const name = (platform === 'darwin') ? 'Azure Data Studio' : product.nameShort;
|
||||
const packageJsonUpdates = { name, version };
|
||||
|
||||
// for linux url handling
|
||||
if (platform === 'linux') {
|
||||
packageJsonUpdates.desktopName = `${product.applicationName}-url-handler.desktop`;
|
||||
}
|
||||
|
||||
const packageJsonStream = gulp.src(['package.json'], { base: '.' })
|
||||
.pipe(json(packageJsonUpdates));
|
||||
.pipe(json({ name, version }));
|
||||
|
||||
const date = new Date().toISOString();
|
||||
const productJsonUpdate = { commit, date, checksums };
|
||||
@@ -315,7 +396,7 @@ function packageTask(platform, arch, opts) {
|
||||
|
||||
// TODO the API should be copied to `out` during compile, not here
|
||||
const api = gulp.src('src/vs/vscode.d.ts').pipe(rename('out/vs/vscode.d.ts'));
|
||||
// {{SQL CARBON EDIT}}
|
||||
// {{SQL CARBON EDIT}}
|
||||
const dataApi = gulp.src('src/vs/data.d.ts').pipe(rename('out/sql/data.d.ts'));
|
||||
|
||||
const depsSrc = [
|
||||
@@ -327,17 +408,16 @@ function packageTask(platform, arch, opts) {
|
||||
const deps = gulp.src(depsSrc, { base: '.', dot: true })
|
||||
.pipe(filter(['**', '!**/package-lock.json']))
|
||||
.pipe(util.cleanNodeModule('fsevents', ['binding.gyp', 'fsevents.cc', 'build/**', 'src/**', 'test/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('vscode-sqlite3', ['binding.gyp', 'benchmark/**', 'cloudformation/**', 'deps/**', 'test/**', 'build/**', 'src/**'], ['build/Release/*.node']))
|
||||
.pipe(util.cleanNodeModule('oniguruma', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['build/Release/*.node', 'src/*.js']))
|
||||
.pipe(util.cleanNodeModule('oniguruma', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/*.js']))
|
||||
.pipe(util.cleanNodeModule('windows-mutex', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('native-keymap', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['build/Release/*.node']))
|
||||
.pipe(util.cleanNodeModule('native-is-elevated', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['build/Release/*.node']))
|
||||
.pipe(util.cleanNodeModule('native-watchdog', ['binding.gyp', 'build/**', 'src/**'], ['build/Release/*.node']))
|
||||
.pipe(util.cleanNodeModule('spdlog', ['binding.gyp', 'build/**', 'deps/**', 'src/**', 'test/**'], ['build/Release/*.node']))
|
||||
.pipe(util.cleanNodeModule('native-keymap', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('native-is-elevated', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('native-watchdog', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('spdlog', ['binding.gyp', 'build/**', 'deps/**', 'src/**', 'test/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('jschardet', ['dist/**']))
|
||||
.pipe(util.cleanNodeModule('windows-foreground-love', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('windows-process-tree', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('gc-signals', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['build/Release/*.node', 'src/index.js']))
|
||||
.pipe(util.cleanNodeModule('gc-signals', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/index.js']))
|
||||
.pipe(util.cleanNodeModule('keytar', ['binding.gyp', 'build/**', 'src/**', 'script/**', 'node_modules/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/*.exe', 'build/Release/*.dll', 'build/Release/*.node']))
|
||||
// {{SQL CARBON EDIT}}
|
||||
@@ -349,7 +429,6 @@ function packageTask(platform, arch, opts) {
|
||||
.pipe(util.cleanNodeModule('slickgrid', ['node_modules/**', 'examples/**'], undefined))
|
||||
.pipe(util.cleanNodeModule('nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
|
||||
.pipe(util.cleanNodeModule('vscode-nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
.pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node']))
|
||||
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*'], 'app/node_modules.asar'));
|
||||
|
||||
@@ -360,7 +439,7 @@ function packageTask(platform, arch, opts) {
|
||||
'node_modules/slickgrid/**/*.*',
|
||||
'node_modules/underscore/**/*.*',
|
||||
'node_modules/zone.js/**/*.*',
|
||||
'node_modules/chart.js/**/*.*',
|
||||
'node_modules/chart.js/**/*.*'
|
||||
], { base: '.', dot: true });
|
||||
|
||||
let all = es.merge(
|
||||
@@ -377,37 +456,7 @@ function packageTask(platform, arch, opts) {
|
||||
);
|
||||
|
||||
if (platform === 'win32') {
|
||||
all = es.merge(all, gulp.src([
|
||||
'resources/win32/bower.ico',
|
||||
'resources/win32/c.ico',
|
||||
'resources/win32/config.ico',
|
||||
'resources/win32/cpp.ico',
|
||||
'resources/win32/csharp.ico',
|
||||
'resources/win32/css.ico',
|
||||
'resources/win32/default.ico',
|
||||
'resources/win32/go.ico',
|
||||
'resources/win32/html.ico',
|
||||
'resources/win32/jade.ico',
|
||||
'resources/win32/java.ico',
|
||||
'resources/win32/javascript.ico',
|
||||
'resources/win32/json.ico',
|
||||
'resources/win32/less.ico',
|
||||
'resources/win32/markdown.ico',
|
||||
'resources/win32/php.ico',
|
||||
'resources/win32/powershell.ico',
|
||||
'resources/win32/python.ico',
|
||||
'resources/win32/react.ico',
|
||||
'resources/win32/ruby.ico',
|
||||
'resources/win32/sass.ico',
|
||||
'resources/win32/shell.ico',
|
||||
'resources/win32/sql.ico',
|
||||
'resources/win32/typescript.ico',
|
||||
'resources/win32/vue.ico',
|
||||
'resources/win32/xml.ico',
|
||||
'resources/win32/yaml.ico',
|
||||
'resources/win32/code_70x70.png',
|
||||
'resources/win32/code_150x150.png'
|
||||
], { base: '.' }));
|
||||
all = es.merge(all, gulp.src(['resources/win32/code_file.ico', 'resources/win32/code_70x70.png', 'resources/win32/code_150x150.png'], { base: '.' }));
|
||||
} else if (platform === 'linux') {
|
||||
all = es.merge(all, gulp.src('resources/linux/code.png', { base: '.' }));
|
||||
} else if (platform === 'darwin') {
|
||||
@@ -432,8 +481,6 @@ function packageTask(platform, arch, opts) {
|
||||
|
||||
result = es.merge(result, gulp.src('resources/win32/bin/code.sh', { base: 'resources/win32' })
|
||||
.pipe(replace('@@NAME@@', product.nameShort))
|
||||
.pipe(replace('@@COMMIT@@', commit))
|
||||
.pipe(replace('@@APPNAME@@', product.applicationName))
|
||||
.pipe(rename(function (f) { f.basename = product.applicationName; f.extname = ''; })));
|
||||
|
||||
result = es.merge(result, gulp.src('resources/win32/VisualElementsManifest.xml', { base: 'resources/win32' })
|
||||
@@ -444,15 +491,6 @@ function packageTask(platform, arch, opts) {
|
||||
.pipe(rename('bin/' + product.applicationName)));
|
||||
}
|
||||
|
||||
// submit all stats that have been collected
|
||||
// during the build phase
|
||||
if (opts.stats) {
|
||||
result.on('end', () => {
|
||||
const { submitAllStats } = require('./lib/stats');
|
||||
submitAllStats(product, commit).then(() => console.log('Submitted bundle stats!'));
|
||||
});
|
||||
}
|
||||
|
||||
return result.pipe(vfs.dest(destination));
|
||||
};
|
||||
}
|
||||
@@ -460,13 +498,9 @@ function packageTask(platform, arch, opts) {
|
||||
const buildRoot = path.dirname(root);
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
gulp.task('vscode-win32-x64-azurecore', ['optimize-vscode'], ext.packageExtensionTask('azurecore', 'win32', 'x64'));
|
||||
gulp.task('vscode-darwin-azurecore', ['optimize-vscode'], ext.packageExtensionTask('azurecore', 'darwin'));
|
||||
gulp.task('vscode-linux-x64-azurecore', ['optimize-vscode'], ext.packageExtensionTask('azurecore', 'linux', 'x64'));
|
||||
|
||||
gulp.task('vscode-win32-x64-mssql', ['vscode-linux-x64-azurecore', 'optimize-vscode'], ext.packageExtensionTask('mssql', 'win32', 'x64'));
|
||||
gulp.task('vscode-darwin-mssql', ['vscode-linux-x64-azurecore', 'optimize-vscode'], ext.packageExtensionTask('mssql', 'darwin'));
|
||||
gulp.task('vscode-linux-x64-mssql', ['vscode-linux-x64-azurecore', 'optimize-vscode'], ext.packageExtensionTask('mssql', 'linux', 'x64'));
|
||||
gulp.task('vscode-win32-x64-azurecore', ['optimize-vscode'], packageAzureCoreTask('win32', 'x64'));
|
||||
gulp.task('vscode-darwin-azurecore', ['optimize-vscode'], packageAzureCoreTask('darwin'));
|
||||
gulp.task('vscode-linux-x64-azurecore', ['optimize-vscode'], packageAzureCoreTask('linux', 'x64'));
|
||||
|
||||
gulp.task('clean-vscode-win32-ia32', util.rimraf(path.join(buildRoot, 'azuredatastudio-win32-ia32')));
|
||||
gulp.task('clean-vscode-win32-x64', util.rimraf(path.join(buildRoot, 'azuredatastudio-win32-x64')));
|
||||
@@ -474,23 +508,20 @@ gulp.task('clean-vscode-darwin', util.rimraf(path.join(buildRoot, 'azuredatastud
|
||||
gulp.task('clean-vscode-linux-ia32', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-ia32')));
|
||||
gulp.task('clean-vscode-linux-x64', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-x64')));
|
||||
gulp.task('clean-vscode-linux-arm', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-arm')));
|
||||
gulp.task('clean-vscode-linux-arm64', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-arm64')));
|
||||
|
||||
gulp.task('vscode-win32-ia32', ['optimize-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32'));
|
||||
gulp.task('vscode-win32-x64', ['vscode-win32-x64-azurecore', 'vscode-win32-x64-mssql', 'optimize-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64'));
|
||||
gulp.task('vscode-darwin', ['vscode-darwin-azurecore', 'vscode-darwin-mssql', 'optimize-vscode', 'clean-vscode-darwin'], packageTask('darwin', null, { stats: true }));
|
||||
gulp.task('vscode-win32-x64', ['vscode-win32-x64-azurecore', 'optimize-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64'));
|
||||
gulp.task('vscode-darwin', ['vscode-darwin-azurecore', 'optimize-vscode', 'clean-vscode-darwin'], packageTask('darwin'));
|
||||
gulp.task('vscode-linux-ia32', ['optimize-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32'));
|
||||
gulp.task('vscode-linux-x64', ['vscode-linux-x64-azurecore', 'vscode-linux-x64-mssql', 'optimize-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64'));
|
||||
gulp.task('vscode-linux-x64', ['vscode-linux-x64-azurecore', 'optimize-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64'));
|
||||
gulp.task('vscode-linux-arm', ['optimize-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm'));
|
||||
gulp.task('vscode-linux-arm64', ['optimize-vscode', 'clean-vscode-linux-arm64'], packageTask('linux', 'arm64'));
|
||||
|
||||
gulp.task('vscode-win32-ia32-min', ['minify-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32', { minified: true }));
|
||||
gulp.task('vscode-win32-x64-min', ['minify-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64', { minified: true }));
|
||||
gulp.task('vscode-darwin-min', ['minify-vscode', 'clean-vscode-darwin'], packageTask('darwin', null, { minified: true, stats: true }));
|
||||
gulp.task('vscode-darwin-min', ['minify-vscode', 'clean-vscode-darwin'], packageTask('darwin', null, { minified: true }));
|
||||
gulp.task('vscode-linux-ia32-min', ['minify-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32', { minified: true }));
|
||||
gulp.task('vscode-linux-x64-min', ['minify-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64', { minified: true }));
|
||||
gulp.task('vscode-linux-arm-min', ['minify-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm', { minified: true }));
|
||||
gulp.task('vscode-linux-arm64-min', ['minify-vscode', 'clean-vscode-linux-arm64'], packageTask('linux', 'arm64', { minified: true }));
|
||||
|
||||
// Transifex Localizations
|
||||
|
||||
@@ -526,7 +557,7 @@ gulp.task('vscode-translations-push', ['optimize-vscode'], function () {
|
||||
).pipe(i18n.pushXlfFiles(apiHostname, apiName, apiToken));
|
||||
});
|
||||
|
||||
gulp.task('vscode-translations-export', ['optimize-vscode'], function () {
|
||||
gulp.task('vscode-translations-push-test', ['optimize-vscode'], function () {
|
||||
const pathToMetadata = './out-vscode/nls.metadata.json';
|
||||
const pathToExtensions = './extensions/*';
|
||||
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
|
||||
@@ -535,44 +566,46 @@ gulp.task('vscode-translations-export', ['optimize-vscode'], function () {
|
||||
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
|
||||
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
|
||||
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
|
||||
).pipe(vfs.dest('../vscode-translations-export'));
|
||||
// {{SQL CARBON EDIT}}
|
||||
// disable since function makes calls to VS Code Transifex API
|
||||
// ).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
|
||||
).pipe(vfs.dest('../vscode-transifex-input'));
|
||||
});
|
||||
|
||||
gulp.task('vscode-translations-pull', function () {
|
||||
return es.merge([...i18n.defaultLanguages, ...i18n.extraLanguages].map(language => {
|
||||
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
|
||||
i18n.pullCoreAndExtensionsXlfFiles(apiHostname, apiName, apiToken, language).pipe(vfs.dest(`../vscode-localization/${language.id}/build`));
|
||||
|
||||
let includeDefault = !!innoSetupConfig[language.id].defaultInfo;
|
||||
return i18n.pullSetupXlfFiles(apiHostname, apiName, apiToken, language, includeDefault).pipe(vfs.dest(`../vscode-translations-import/${language.id}/setup`));
|
||||
}));
|
||||
i18n.pullSetupXlfFiles(apiHostname, apiName, apiToken, language, includeDefault).pipe(vfs.dest(`../vscode-localization/${language.id}/setup`));
|
||||
});
|
||||
});
|
||||
|
||||
gulp.task('vscode-translations-import', function () {
|
||||
// {{SQL CARBON EDIT}} - Replace function body with our own
|
||||
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
|
||||
gulp.src(`../vscode-localization/${language.id}/build/*/*.xlf`)
|
||||
.pipe(i18n.prepareI18nFiles())
|
||||
.pipe(vfs.dest(`./i18n/${language.folderName}`));
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
// gulp.src(`../vscode-localization/${language.id}/setup/*/*.xlf`)
|
||||
// .pipe(i18n.prepareIslFiles(language, innoSetupConfig[language.id]))
|
||||
// .pipe(vfs.dest(`./build/win32/i18n`));
|
||||
});
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
});
|
||||
|
||||
// Sourcemaps
|
||||
|
||||
gulp.task('upload-vscode-sourcemaps', ['vscode-darwin-min', 'minify-vscode'], () => {
|
||||
gulp.task('upload-vscode-sourcemaps', ['minify-vscode'], () => {
|
||||
const vs = gulp.src('out-vscode-min/**/*.map', { base: 'out-vscode-min' })
|
||||
.pipe(es.mapSync(f => {
|
||||
f.path = `${f.base}/core/${f.relative}`;
|
||||
return f;
|
||||
}));
|
||||
|
||||
const extensionsOut = gulp.src('extensions/**/out/**/*.map', { base: '.' });
|
||||
const extensionsDist = gulp.src('extensions/**/dist/**/*.map', { base: '.' });
|
||||
const extensions = gulp.src('extensions/**/out/**/*.map', { base: '.' });
|
||||
|
||||
return es.merge(vs, extensionsOut, extensionsDist)
|
||||
.pipe(es.through(function (data) {
|
||||
// debug
|
||||
console.log('Uploading Sourcemap', data.relative);
|
||||
this.emit('data', data);
|
||||
}))
|
||||
return es.merge(vs, extensions)
|
||||
.pipe(azure.upload({
|
||||
account: process.env.AZURE_STORAGE_ACCOUNT,
|
||||
key: process.env.AZURE_STORAGE_ACCESS_KEY,
|
||||
@@ -617,7 +650,7 @@ function getSettingsSearchBuildId(packageJson) {
|
||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||
const branchId = branch.indexOf('/release/') >= 0 ? 0 :
|
||||
/\/master$/.test(branch) ? 1 :
|
||||
2; // Some unexpected branch
|
||||
2; // Some unexpected branch
|
||||
|
||||
const out = cp.execSync(`git rev-list HEAD --count`);
|
||||
const count = parseInt(out.toString());
|
||||
@@ -690,5 +723,6 @@ function installService() {
|
||||
}
|
||||
|
||||
gulp.task('install-sqltoolsservice', () => {
|
||||
return installService();
|
||||
return installService();
|
||||
});
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ const rpmDependencies = require('../resources/linux/rpm/dependencies.json');
|
||||
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
|
||||
|
||||
function getDebPackageArch(arch) {
|
||||
return { x64: 'amd64', ia32: 'i386', arm: 'armhf', arm64: "arm64" }[arch];
|
||||
return { x64: 'amd64', ia32: 'i386', arm: 'armhf' }[arch];
|
||||
}
|
||||
|
||||
function prepareDebPackage(arch) {
|
||||
@@ -30,17 +30,11 @@ function prepareDebPackage(arch) {
|
||||
|
||||
return function () {
|
||||
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
|
||||
.pipe(rename('usr/share/applications/' + product.applicationName + '.desktop'));
|
||||
|
||||
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
|
||||
.pipe(rename('usr/share/applications/' + product.applicationName + '-url-handler.desktop'));
|
||||
|
||||
const desktops = es.merge(desktop, desktopUrlHandler)
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(replace('@@ICON@@', product.applicationName))
|
||||
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
|
||||
.pipe(rename('usr/share/applications/' + product.applicationName + '.desktop'));
|
||||
|
||||
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
@@ -85,7 +79,7 @@ function prepareDebPackage(arch) {
|
||||
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
|
||||
.pipe(rename('DEBIAN/postinst'));
|
||||
|
||||
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, icon, code);
|
||||
const all = es.merge(control, postinst, postrm, prerm, desktop, appdata, icon, code);
|
||||
|
||||
return all.pipe(vfs.dest(destination));
|
||||
};
|
||||
@@ -105,7 +99,7 @@ function getRpmBuildPath(rpmArch) {
|
||||
}
|
||||
|
||||
function getRpmPackageArch(arch) {
|
||||
return { x64: 'x86_64', ia32: 'i386', arm: 'armhf', arm64: "arm64" }[arch];
|
||||
return { x64: 'x86_64', ia32: 'i386', arm: 'armhf' }[arch];
|
||||
}
|
||||
|
||||
function prepareRpmPackage(arch) {
|
||||
@@ -115,17 +109,11 @@ function prepareRpmPackage(arch) {
|
||||
|
||||
return function () {
|
||||
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
|
||||
.pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '.desktop'));
|
||||
|
||||
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
|
||||
.pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '-url-handler.desktop'));
|
||||
|
||||
const desktops = es.merge(desktop, desktopUrlHandler)
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(replace('@@ICON@@', product.applicationName))
|
||||
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
|
||||
.pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '.desktop'));
|
||||
|
||||
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
@@ -156,7 +144,7 @@ function prepareRpmPackage(arch) {
|
||||
const specIcon = gulp.src('resources/linux/rpm/code.xpm', { base: '.' })
|
||||
.pipe(rename('SOURCES/' + product.applicationName + '.xpm'));
|
||||
|
||||
const all = es.merge(code, desktops, appdata, icon, spec, specIcon);
|
||||
const all = es.merge(code, desktop, appdata, icon, spec, specIcon);
|
||||
|
||||
return all.pipe(vfs.dest(getRpmBuildPath(rpmArch)));
|
||||
};
|
||||
@@ -174,7 +162,6 @@ function buildRpmPackage(arch) {
|
||||
'cp "' + rpmOut + '/$(ls ' + rpmOut + ')" ' + destination + '/'
|
||||
]);
|
||||
}
|
||||
|
||||
function getSnapBuildPath(arch) {
|
||||
return `.build/linux/snap/${arch}/${product.applicationName}-${arch}`;
|
||||
}
|
||||
@@ -195,21 +182,17 @@ function prepareSnapPackage(arch) {
|
||||
.pipe(rename(`usr/share/pixmaps/${product.applicationName}.png`));
|
||||
|
||||
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
|
||||
.pipe(rename(function (p) { p.dirname = `usr/share/${product.applicationName}/${p.dirname}`; }));
|
||||
.pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; }));
|
||||
|
||||
const snapcraft = gulp.src('resources/linux/snap/snapcraft.yaml', { base: '.' })
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(replace('@@VERSION@@', `${packageJson.version}-${linuxPackageRevision}`))
|
||||
.pipe(replace('@@VERSION@@', packageJson.version))
|
||||
.pipe(rename('snap/snapcraft.yaml'));
|
||||
|
||||
const snapUpdate = gulp.src('resources/linux/snap/snapUpdate.sh', { base: '.' })
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(rename(`usr/share/${product.applicationName}/snapUpdate.sh`));
|
||||
|
||||
const electronLaunch = gulp.src('resources/linux/snap/electron-launch', { base: '.' })
|
||||
.pipe(rename('electron-launch'));
|
||||
|
||||
const all = es.merge(desktop, icon, code, snapcraft, electronLaunch, snapUpdate);
|
||||
const all = es.merge(desktop, icon, code, snapcraft, electronLaunch);
|
||||
|
||||
return all.pipe(vfs.dest(destination));
|
||||
};
|
||||
@@ -217,7 +200,11 @@ function prepareSnapPackage(arch) {
|
||||
|
||||
function buildSnapPackage(arch) {
|
||||
const snapBuildPath = getSnapBuildPath(arch);
|
||||
return shell.task(`cd ${snapBuildPath} && snapcraft build`);
|
||||
const snapFilename = `${product.applicationName}-${packageJson.version}-${linuxPackageRevision}-${arch}.snap`;
|
||||
return shell.task([
|
||||
`chmod +x ${snapBuildPath}/electron-launch`,
|
||||
`cd ${snapBuildPath} && snapcraft snap --output ../${snapFilename}`
|
||||
]);
|
||||
}
|
||||
|
||||
function getFlatpakArch(arch) {
|
||||
@@ -297,39 +284,33 @@ function buildFlatpak(arch) {
|
||||
gulp.task('clean-vscode-linux-ia32-deb', util.rimraf('.build/linux/deb/i386'));
|
||||
gulp.task('clean-vscode-linux-x64-deb', util.rimraf('.build/linux/deb/amd64'));
|
||||
gulp.task('clean-vscode-linux-arm-deb', util.rimraf('.build/linux/deb/armhf'));
|
||||
gulp.task('clean-vscode-linux-arm64-deb', util.rimraf('.build/linux/deb/arm64'));
|
||||
gulp.task('clean-vscode-linux-ia32-rpm', util.rimraf('.build/linux/rpm/i386'));
|
||||
gulp.task('clean-vscode-linux-x64-rpm', util.rimraf('.build/linux/rpm/x86_64'));
|
||||
gulp.task('clean-vscode-linux-arm-rpm', util.rimraf('.build/linux/rpm/armhf'));
|
||||
gulp.task('clean-vscode-linux-arm64-rpm', util.rimraf('.build/linux/rpm/arm64'));
|
||||
gulp.task('clean-vscode-linux-ia32-snap', util.rimraf('.build/linux/snap/x64'));
|
||||
gulp.task('clean-vscode-linux-x64-snap', util.rimraf('.build/linux/snap/x64'));
|
||||
gulp.task('clean-vscode-linux-arm-snap', util.rimraf('.build/linux/snap/x64'));
|
||||
gulp.task('clean-vscode-linux-arm64-snap', util.rimraf('.build/linux/snap/x64'));
|
||||
gulp.task('clean-vscode-linux-ia32-flatpak', util.rimraf('.build/linux/flatpak/i386'));
|
||||
gulp.task('clean-vscode-linux-x64-flatpak', util.rimraf('.build/linux/flatpak/x86_64'));
|
||||
gulp.task('clean-vscode-linux-arm-flatpak', util.rimraf('.build/linux/flatpak/arm'));
|
||||
|
||||
gulp.task('vscode-linux-ia32-prepare-deb', ['clean-vscode-linux-ia32-deb'], prepareDebPackage('ia32'));
|
||||
gulp.task('vscode-linux-x64-prepare-deb', ['clean-vscode-linux-x64-deb'], prepareDebPackage('x64'));
|
||||
gulp.task('vscode-linux-arm-prepare-deb', ['clean-vscode-linux-arm-deb'], prepareDebPackage('arm'));
|
||||
gulp.task('vscode-linux-arm64-prepare-deb', ['clean-vscode-linux-arm64-deb'], prepareDebPackage('arm64'));
|
||||
gulp.task('vscode-linux-ia32-build-deb', ['vscode-linux-ia32-prepare-deb'], buildDebPackage('ia32'));
|
||||
gulp.task('vscode-linux-x64-build-deb', ['vscode-linux-x64-prepare-deb'], buildDebPackage('x64'));
|
||||
gulp.task('vscode-linux-arm-build-deb', ['vscode-linux-arm-prepare-deb'], buildDebPackage('arm'));
|
||||
gulp.task('vscode-linux-arm64-build-deb', ['vscode-linux-arm64-prepare-deb'], buildDebPackage('arm64'));
|
||||
|
||||
gulp.task('vscode-linux-ia32-prepare-rpm', ['clean-vscode-linux-ia32-rpm'], prepareRpmPackage('ia32'));
|
||||
gulp.task('vscode-linux-x64-prepare-rpm', ['clean-vscode-linux-x64-rpm'], prepareRpmPackage('x64'));
|
||||
gulp.task('vscode-linux-arm-prepare-rpm', ['clean-vscode-linux-arm-rpm'], prepareRpmPackage('arm'));
|
||||
gulp.task('vscode-linux-arm64-prepare-rpm', ['clean-vscode-linux-arm64-rpm'], prepareRpmPackage('arm64'));
|
||||
gulp.task('vscode-linux-ia32-build-rpm', ['vscode-linux-ia32-prepare-rpm'], buildRpmPackage('ia32'));
|
||||
gulp.task('vscode-linux-x64-build-rpm', ['vscode-linux-x64-prepare-rpm'], buildRpmPackage('x64'));
|
||||
gulp.task('vscode-linux-arm-build-rpm', ['vscode-linux-arm-prepare-rpm'], buildRpmPackage('arm'));
|
||||
gulp.task('vscode-linux-arm64-build-rpm', ['vscode-linux-arm64-prepare-rpm'], buildRpmPackage('arm64'));
|
||||
|
||||
gulp.task('vscode-linux-ia32-prepare-snap', ['clean-vscode-linux-ia32-snap'], prepareSnapPackage('ia32'));
|
||||
gulp.task('vscode-linux-x64-prepare-snap', ['clean-vscode-linux-x64-snap'], prepareSnapPackage('x64'));
|
||||
gulp.task('vscode-linux-arm-prepare-snap', ['clean-vscode-linux-arm-snap'], prepareSnapPackage('arm'));
|
||||
gulp.task('vscode-linux-arm64-prepare-snap', ['clean-vscode-linux-arm64-snap'], prepareSnapPackage('arm64'));
|
||||
gulp.task('vscode-linux-ia32-build-snap', ['vscode-linux-ia32-prepare-snap'], buildSnapPackage('ia32'));
|
||||
gulp.task('vscode-linux-x64-build-snap', ['vscode-linux-x64-prepare-snap'], buildSnapPackage('x64'));
|
||||
gulp.task('vscode-linux-arm-build-snap', ['vscode-linux-arm-prepare-snap'], buildSnapPackage('arm'));
|
||||
gulp.task('vscode-linux-arm64-build-snap', ['vscode-linux-arm64-prepare-snap'], buildSnapPackage('arm64'));
|
||||
|
||||
@@ -15,7 +15,6 @@ const util = require('./lib/util');
|
||||
const pkg = require('../package.json');
|
||||
const product = require('../product.json');
|
||||
const vfs = require('vinyl-fs');
|
||||
const rcedit = require('rcedit');
|
||||
const mkdirp = require('mkdirp');
|
||||
|
||||
const repoPath = path.dirname(__dirname);
|
||||
@@ -26,21 +25,18 @@ const zipPath = arch => path.join(zipDir(arch), `VSCode-win32-${arch}.zip`);
|
||||
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
|
||||
const issPath = path.join(__dirname, 'win32', 'code.iss');
|
||||
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup-compiler'))), 'bin', 'ISCC.exe');
|
||||
const signPS1 = path.join(repoPath, 'build', 'azure-pipelines', 'win32', 'sign.ps1');
|
||||
const signPS1 = path.join(repoPath, 'build', 'tfs', 'win32', 'sign.ps1');
|
||||
|
||||
function packageInnoSetup(iss, options, cb) {
|
||||
options = options || {};
|
||||
|
||||
const definitions = options.definitions || {};
|
||||
const debug = process.argv.some(arg => arg === '--debug-inno');
|
||||
|
||||
if (process.argv.some(arg => arg === '--debug-inno')) {
|
||||
if (debug) {
|
||||
definitions['Debug'] = 'true';
|
||||
}
|
||||
|
||||
if (process.argv.some(arg => arg === '--sign')) {
|
||||
definitions['Sign'] = 'true';
|
||||
}
|
||||
|
||||
const keys = Object.keys(definitions);
|
||||
|
||||
keys.forEach(key => assert(typeof definitions[key] === 'string', `Missing value for '${key}' in Inno Setup package step`));
|
||||
@@ -140,14 +136,4 @@ function copyInnoUpdater(arch) {
|
||||
}
|
||||
|
||||
gulp.task('vscode-win32-ia32-copy-inno-updater', copyInnoUpdater('ia32'));
|
||||
gulp.task('vscode-win32-x64-copy-inno-updater', copyInnoUpdater('x64'));
|
||||
|
||||
function patchInnoUpdater(arch) {
|
||||
return cb => {
|
||||
const icon = path.join(repoPath, 'resources', 'win32', 'code.ico');
|
||||
rcedit(path.join(buildPath(arch), 'tools', 'inno_updater.exe'), { icon }, cb);
|
||||
};
|
||||
}
|
||||
|
||||
gulp.task('vscode-win32-ia32-inno-updater', ['vscode-win32-ia32-copy-inno-updater'], patchInnoUpdater('ia32'));
|
||||
gulp.task('vscode-win32-x64-inno-updater', ['vscode-win32-x64-copy-inno-updater'], patchInnoUpdater('x64'));
|
||||
gulp.task('vscode-win32-x64-copy-inno-updater', copyInnoUpdater('x64'));
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"target": "es2017",
|
||||
"jsx": "preserve",
|
||||
"checkJs": true
|
||||
},
|
||||
"include": [
|
||||
"**/*.js"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
"**/node_modules/*"
|
||||
]
|
||||
}
|
||||
@@ -4,33 +4,33 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = require("path");
|
||||
const es = require("event-stream");
|
||||
const pickle = require('chromium-pickle-js');
|
||||
const Filesystem = require('asar/lib/filesystem');
|
||||
const VinylFile = require("vinyl");
|
||||
const minimatch = require("minimatch");
|
||||
var path = require("path");
|
||||
var es = require("event-stream");
|
||||
var pickle = require("chromium-pickle-js");
|
||||
var Filesystem = require("asar/lib/filesystem");
|
||||
var VinylFile = require("vinyl");
|
||||
var minimatch = require("minimatch");
|
||||
function createAsar(folderPath, unpackGlobs, destFilename) {
|
||||
const shouldUnpackFile = (file) => {
|
||||
for (let i = 0; i < unpackGlobs.length; i++) {
|
||||
var shouldUnpackFile = function (file) {
|
||||
for (var i = 0; i < unpackGlobs.length; i++) {
|
||||
if (minimatch(file.relative, unpackGlobs[i])) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
const filesystem = new Filesystem(folderPath);
|
||||
const out = [];
|
||||
var filesystem = new Filesystem(folderPath);
|
||||
var out = [];
|
||||
// Keep track of pending inserts
|
||||
let pendingInserts = 0;
|
||||
let onFileInserted = () => { pendingInserts--; };
|
||||
var pendingInserts = 0;
|
||||
var onFileInserted = function () { pendingInserts--; };
|
||||
// Do not insert twice the same directory
|
||||
const seenDir = {};
|
||||
const insertDirectoryRecursive = (dir) => {
|
||||
var seenDir = {};
|
||||
var insertDirectoryRecursive = function (dir) {
|
||||
if (seenDir[dir]) {
|
||||
return;
|
||||
}
|
||||
let lastSlash = dir.lastIndexOf('/');
|
||||
var lastSlash = dir.lastIndexOf('/');
|
||||
if (lastSlash === -1) {
|
||||
lastSlash = dir.lastIndexOf('\\');
|
||||
}
|
||||
@@ -40,8 +40,8 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
|
||||
seenDir[dir] = true;
|
||||
filesystem.insertDirectory(dir);
|
||||
};
|
||||
const insertDirectoryForFile = (file) => {
|
||||
let lastSlash = file.lastIndexOf('/');
|
||||
var insertDirectoryForFile = function (file) {
|
||||
var lastSlash = file.lastIndexOf('/');
|
||||
if (lastSlash === -1) {
|
||||
lastSlash = file.lastIndexOf('\\');
|
||||
}
|
||||
@@ -49,7 +49,7 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
|
||||
insertDirectoryRecursive(file.substring(0, lastSlash));
|
||||
}
|
||||
};
|
||||
const insertFile = (relativePath, stat, shouldUnpack) => {
|
||||
var insertFile = function (relativePath, stat, shouldUnpack) {
|
||||
insertDirectoryForFile(relativePath);
|
||||
pendingInserts++;
|
||||
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
|
||||
@@ -59,13 +59,13 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
|
||||
return;
|
||||
}
|
||||
if (!file.stat.isFile()) {
|
||||
throw new Error(`unknown item in stream!`);
|
||||
throw new Error("unknown item in stream!");
|
||||
}
|
||||
const shouldUnpack = shouldUnpackFile(file);
|
||||
var shouldUnpack = shouldUnpackFile(file);
|
||||
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
|
||||
if (shouldUnpack) {
|
||||
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
|
||||
const relative = path.relative(folderPath, file.path);
|
||||
var relative = path.relative(folderPath, file.path);
|
||||
this.queue(new VinylFile({
|
||||
cwd: folderPath,
|
||||
base: folderPath,
|
||||
@@ -79,33 +79,34 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
|
||||
out.push(file.contents);
|
||||
}
|
||||
}, function () {
|
||||
let finish = () => {
|
||||
var _this = this;
|
||||
var finish = function () {
|
||||
{
|
||||
const headerPickle = pickle.createEmpty();
|
||||
var headerPickle = pickle.createEmpty();
|
||||
headerPickle.writeString(JSON.stringify(filesystem.header));
|
||||
const headerBuf = headerPickle.toBuffer();
|
||||
const sizePickle = pickle.createEmpty();
|
||||
var headerBuf = headerPickle.toBuffer();
|
||||
var sizePickle = pickle.createEmpty();
|
||||
sizePickle.writeUInt32(headerBuf.length);
|
||||
const sizeBuf = sizePickle.toBuffer();
|
||||
var sizeBuf = sizePickle.toBuffer();
|
||||
out.unshift(headerBuf);
|
||||
out.unshift(sizeBuf);
|
||||
}
|
||||
const contents = Buffer.concat(out);
|
||||
var contents = Buffer.concat(out);
|
||||
out.length = 0;
|
||||
this.queue(new VinylFile({
|
||||
_this.queue(new VinylFile({
|
||||
cwd: folderPath,
|
||||
base: folderPath,
|
||||
path: destFilename,
|
||||
contents: contents
|
||||
}));
|
||||
this.queue(null);
|
||||
_this.queue(null);
|
||||
};
|
||||
// Call finish() only when all file inserts have finished...
|
||||
if (pendingInserts === 0) {
|
||||
finish();
|
||||
}
|
||||
else {
|
||||
onFileInserted = () => {
|
||||
onFileInserted = function () {
|
||||
pendingInserts--;
|
||||
if (pendingInserts === 0) {
|
||||
finish();
|
||||
|
||||
@@ -7,8 +7,8 @@
|
||||
|
||||
import * as path from 'path';
|
||||
import * as es from 'event-stream';
|
||||
const pickle = require('chromium-pickle-js');
|
||||
const Filesystem = require('asar/lib/filesystem');
|
||||
import * as pickle from 'chromium-pickle-js';
|
||||
import * as Filesystem from 'asar/lib/filesystem';
|
||||
import * as VinylFile from 'vinyl';
|
||||
import * as minimatch from 'minimatch';
|
||||
|
||||
|
||||
@@ -49,7 +49,7 @@ function syncMarketplaceExtension(extension) {
|
||||
|
||||
rimraf.sync(getExtensionPath(extension));
|
||||
|
||||
return ext.fromMarketplace(extension.name, extension.version, extension.metadata)
|
||||
return ext.fromMarketplace(extension.name, extension.version)
|
||||
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
|
||||
.pipe(vfs.dest('.build/builtInExtensions'))
|
||||
.on('end', () => util.log(util.colors.blue('[marketplace]'), extension.name, util.colors.green('✔︎')));
|
||||
|
||||
@@ -4,19 +4,19 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const vm = require("vm");
|
||||
var fs = require("fs");
|
||||
var path = require("path");
|
||||
var vm = require("vm");
|
||||
/**
|
||||
* Bundle `entryPoints` given config `config`.
|
||||
*/
|
||||
function bundle(entryPoints, config, callback) {
|
||||
const entryPointsMap = {};
|
||||
entryPoints.forEach((module) => {
|
||||
var entryPointsMap = {};
|
||||
entryPoints.forEach(function (module) {
|
||||
entryPointsMap[module.name] = module;
|
||||
});
|
||||
const allMentionedModulesMap = {};
|
||||
entryPoints.forEach((module) => {
|
||||
var allMentionedModulesMap = {};
|
||||
entryPoints.forEach(function (module) {
|
||||
allMentionedModulesMap[module.name] = true;
|
||||
(module.include || []).forEach(function (includedModule) {
|
||||
allMentionedModulesMap[includedModule] = true;
|
||||
@@ -25,30 +25,26 @@ function bundle(entryPoints, config, callback) {
|
||||
allMentionedModulesMap[excludedModule] = true;
|
||||
});
|
||||
});
|
||||
const code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
|
||||
const r = vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
|
||||
const loaderModule = { exports: {} };
|
||||
var code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
|
||||
var r = vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
|
||||
var loaderModule = { exports: {} };
|
||||
r.call({}, require, loaderModule, loaderModule.exports);
|
||||
const loader = loaderModule.exports;
|
||||
var loader = loaderModule.exports;
|
||||
config.isBuild = true;
|
||||
config.paths = config.paths || {};
|
||||
if (!config.paths['vs/nls']) {
|
||||
config.paths['vs/nls'] = 'out-build/vs/nls.build';
|
||||
}
|
||||
if (!config.paths['vs/css']) {
|
||||
config.paths['vs/css'] = 'out-build/vs/css.build';
|
||||
}
|
||||
config.paths['vs/nls'] = 'out-build/vs/nls.build';
|
||||
config.paths['vs/css'] = 'out-build/vs/css.build';
|
||||
loader.config(config);
|
||||
loader(['require'], (localRequire) => {
|
||||
const resolvePath = (path) => {
|
||||
const r = localRequire.toUrl(path);
|
||||
loader(['require'], function (localRequire) {
|
||||
var resolvePath = function (path) {
|
||||
var r = localRequire.toUrl(path);
|
||||
if (!/\.js/.test(r)) {
|
||||
return r + '.js';
|
||||
}
|
||||
return r;
|
||||
};
|
||||
for (const moduleId in entryPointsMap) {
|
||||
const entryPoint = entryPointsMap[moduleId];
|
||||
for (var moduleId in entryPointsMap) {
|
||||
var entryPoint = entryPointsMap[moduleId];
|
||||
if (entryPoint.append) {
|
||||
entryPoint.append = entryPoint.append.map(resolvePath);
|
||||
}
|
||||
@@ -57,59 +53,59 @@ function bundle(entryPoints, config, callback) {
|
||||
}
|
||||
}
|
||||
});
|
||||
loader(Object.keys(allMentionedModulesMap), () => {
|
||||
const modules = loader.getBuildInfo();
|
||||
const partialResult = emitEntryPoints(modules, entryPointsMap);
|
||||
const cssInlinedResources = loader('vs/css').getInlinedResources();
|
||||
loader(Object.keys(allMentionedModulesMap), function () {
|
||||
var modules = loader.getBuildInfo();
|
||||
var partialResult = emitEntryPoints(modules, entryPointsMap);
|
||||
var cssInlinedResources = loader('vs/css').getInlinedResources();
|
||||
callback(null, {
|
||||
files: partialResult.files,
|
||||
cssInlinedResources: cssInlinedResources,
|
||||
bundleData: partialResult.bundleData
|
||||
});
|
||||
}, (err) => callback(err, null));
|
||||
}, function (err) { return callback(err, null); });
|
||||
}
|
||||
exports.bundle = bundle;
|
||||
function emitEntryPoints(modules, entryPoints) {
|
||||
const modulesMap = {};
|
||||
modules.forEach((m) => {
|
||||
var modulesMap = {};
|
||||
modules.forEach(function (m) {
|
||||
modulesMap[m.id] = m;
|
||||
});
|
||||
const modulesGraph = {};
|
||||
modules.forEach((m) => {
|
||||
var modulesGraph = {};
|
||||
modules.forEach(function (m) {
|
||||
modulesGraph[m.id] = m.dependencies;
|
||||
});
|
||||
const sortedModules = topologicalSort(modulesGraph);
|
||||
let result = [];
|
||||
const usedPlugins = {};
|
||||
const bundleData = {
|
||||
var sortedModules = topologicalSort(modulesGraph);
|
||||
var result = [];
|
||||
var usedPlugins = {};
|
||||
var bundleData = {
|
||||
graph: modulesGraph,
|
||||
bundles: {}
|
||||
};
|
||||
Object.keys(entryPoints).forEach((moduleToBundle) => {
|
||||
const info = entryPoints[moduleToBundle];
|
||||
const rootNodes = [moduleToBundle].concat(info.include || []);
|
||||
const allDependencies = visit(rootNodes, modulesGraph);
|
||||
const excludes = ['require', 'exports', 'module'].concat(info.exclude || []);
|
||||
excludes.forEach((excludeRoot) => {
|
||||
const allExcludes = visit([excludeRoot], modulesGraph);
|
||||
Object.keys(allExcludes).forEach((exclude) => {
|
||||
Object.keys(entryPoints).forEach(function (moduleToBundle) {
|
||||
var info = entryPoints[moduleToBundle];
|
||||
var rootNodes = [moduleToBundle].concat(info.include || []);
|
||||
var allDependencies = visit(rootNodes, modulesGraph);
|
||||
var excludes = ['require', 'exports', 'module'].concat(info.exclude || []);
|
||||
excludes.forEach(function (excludeRoot) {
|
||||
var allExcludes = visit([excludeRoot], modulesGraph);
|
||||
Object.keys(allExcludes).forEach(function (exclude) {
|
||||
delete allDependencies[exclude];
|
||||
});
|
||||
});
|
||||
const includedModules = sortedModules.filter((module) => {
|
||||
var includedModules = sortedModules.filter(function (module) {
|
||||
return allDependencies[module];
|
||||
});
|
||||
bundleData.bundles[moduleToBundle] = includedModules;
|
||||
const res = emitEntryPoint(modulesMap, modulesGraph, moduleToBundle, includedModules, info.prepend || [], info.append || [], info.dest);
|
||||
var res = emitEntryPoint(modulesMap, modulesGraph, moduleToBundle, includedModules, info.prepend, info.append, info.dest);
|
||||
result = result.concat(res.files);
|
||||
for (const pluginName in res.usedPlugins) {
|
||||
for (var pluginName in res.usedPlugins) {
|
||||
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
|
||||
}
|
||||
});
|
||||
Object.keys(usedPlugins).forEach((pluginName) => {
|
||||
const plugin = usedPlugins[pluginName];
|
||||
Object.keys(usedPlugins).forEach(function (pluginName) {
|
||||
var plugin = usedPlugins[pluginName];
|
||||
if (typeof plugin.finishBuild === 'function') {
|
||||
const write = (filename, contents) => {
|
||||
var write = function (filename, contents) {
|
||||
result.push({
|
||||
dest: filename,
|
||||
sources: [{
|
||||
@@ -128,16 +124,16 @@ function emitEntryPoints(modules, entryPoints) {
|
||||
};
|
||||
}
|
||||
function extractStrings(destFiles) {
|
||||
const parseDefineCall = (moduleMatch, depsMatch) => {
|
||||
const module = moduleMatch.replace(/^"|"$/g, '');
|
||||
let deps = depsMatch.split(',');
|
||||
deps = deps.map((dep) => {
|
||||
var parseDefineCall = function (moduleMatch, depsMatch) {
|
||||
var module = moduleMatch.replace(/^"|"$/g, '');
|
||||
var deps = depsMatch.split(',');
|
||||
deps = deps.map(function (dep) {
|
||||
dep = dep.trim();
|
||||
dep = dep.replace(/^"|"$/g, '');
|
||||
dep = dep.replace(/^'|'$/g, '');
|
||||
let prefix = null;
|
||||
let _path = null;
|
||||
const pieces = dep.split('!');
|
||||
var prefix = null;
|
||||
var _path = null;
|
||||
var pieces = dep.split('!');
|
||||
if (pieces.length > 1) {
|
||||
prefix = pieces[0] + '!';
|
||||
_path = pieces[1];
|
||||
@@ -147,7 +143,7 @@ function extractStrings(destFiles) {
|
||||
_path = pieces[0];
|
||||
}
|
||||
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
|
||||
const res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
|
||||
var res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
|
||||
return prefix + res;
|
||||
}
|
||||
return prefix + _path;
|
||||
@@ -157,7 +153,7 @@ function extractStrings(destFiles) {
|
||||
deps: deps
|
||||
};
|
||||
};
|
||||
destFiles.forEach((destFile) => {
|
||||
destFiles.forEach(function (destFile, index) {
|
||||
if (!/\.js$/.test(destFile.dest)) {
|
||||
return;
|
||||
}
|
||||
@@ -165,44 +161,44 @@ function extractStrings(destFiles) {
|
||||
return;
|
||||
}
|
||||
// Do one pass to record the usage counts for each module id
|
||||
const useCounts = {};
|
||||
destFile.sources.forEach((source) => {
|
||||
const matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
|
||||
var useCounts = {};
|
||||
destFile.sources.forEach(function (source) {
|
||||
var matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
|
||||
if (!matches) {
|
||||
return;
|
||||
}
|
||||
const defineCall = parseDefineCall(matches[1], matches[2]);
|
||||
var defineCall = parseDefineCall(matches[1], matches[2]);
|
||||
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
|
||||
defineCall.deps.forEach((dep) => {
|
||||
defineCall.deps.forEach(function (dep) {
|
||||
useCounts[dep] = (useCounts[dep] || 0) + 1;
|
||||
});
|
||||
});
|
||||
const sortedByUseModules = Object.keys(useCounts);
|
||||
sortedByUseModules.sort((a, b) => {
|
||||
var sortedByUseModules = Object.keys(useCounts);
|
||||
sortedByUseModules.sort(function (a, b) {
|
||||
return useCounts[b] - useCounts[a];
|
||||
});
|
||||
const replacementMap = {};
|
||||
sortedByUseModules.forEach((module, index) => {
|
||||
var replacementMap = {};
|
||||
sortedByUseModules.forEach(function (module, index) {
|
||||
replacementMap[module] = index;
|
||||
});
|
||||
destFile.sources.forEach((source) => {
|
||||
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, (_, moduleMatch, depsMatch) => {
|
||||
const defineCall = parseDefineCall(moduleMatch, depsMatch);
|
||||
return `define(__m[${replacementMap[defineCall.module]}/*${defineCall.module}*/], __M([${defineCall.deps.map(dep => replacementMap[dep] + '/*' + dep + '*/').join(',')}])`;
|
||||
destFile.sources.forEach(function (source) {
|
||||
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, function (_, moduleMatch, depsMatch) {
|
||||
var defineCall = parseDefineCall(moduleMatch, depsMatch);
|
||||
return "define(__m[" + replacementMap[defineCall.module] + "/*" + defineCall.module + "*/], __M([" + defineCall.deps.map(function (dep) { return replacementMap[dep] + '/*' + dep + '*/'; }).join(',') + "])";
|
||||
});
|
||||
});
|
||||
destFile.sources.unshift({
|
||||
path: null,
|
||||
contents: [
|
||||
'(function() {',
|
||||
`var __m = ${JSON.stringify(sortedByUseModules)};`,
|
||||
`var __M = function(deps) {`,
|
||||
` var result = [];`,
|
||||
` for (var i = 0, len = deps.length; i < len; i++) {`,
|
||||
` result[i] = __m[deps[i]];`,
|
||||
` }`,
|
||||
` return result;`,
|
||||
`};`
|
||||
"var __m = " + JSON.stringify(sortedByUseModules) + ";",
|
||||
"var __M = function(deps) {",
|
||||
" var result = [];",
|
||||
" for (var i = 0, len = deps.length; i < len; i++) {",
|
||||
" result[i] = __m[deps[i]];",
|
||||
" }",
|
||||
" return result;",
|
||||
"};"
|
||||
].join('\n')
|
||||
});
|
||||
destFile.sources.push({
|
||||
@@ -214,7 +210,7 @@ function extractStrings(destFiles) {
|
||||
}
|
||||
function removeDuplicateTSBoilerplate(destFiles) {
|
||||
// Taken from typescript compiler => emitFiles
|
||||
const BOILERPLATE = [
|
||||
var BOILERPLATE = [
|
||||
{ start: /^var __extends/, end: /^}\)\(\);$/ },
|
||||
{ start: /^var __assign/, end: /^};$/ },
|
||||
{ start: /^var __decorate/, end: /^};$/ },
|
||||
@@ -223,14 +219,14 @@ function removeDuplicateTSBoilerplate(destFiles) {
|
||||
{ start: /^var __awaiter/, end: /^};$/ },
|
||||
{ start: /^var __generator/, end: /^};$/ },
|
||||
];
|
||||
destFiles.forEach((destFile) => {
|
||||
const SEEN_BOILERPLATE = [];
|
||||
destFile.sources.forEach((source) => {
|
||||
const lines = source.contents.split(/\r\n|\n|\r/);
|
||||
const newLines = [];
|
||||
let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE;
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
destFiles.forEach(function (destFile) {
|
||||
var SEEN_BOILERPLATE = [];
|
||||
destFile.sources.forEach(function (source) {
|
||||
var lines = source.contents.split(/\r\n|\n|\r/);
|
||||
var newLines = [];
|
||||
var IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE;
|
||||
for (var i = 0; i < lines.length; i++) {
|
||||
var line = lines[i];
|
||||
if (IS_REMOVING_BOILERPLATE) {
|
||||
newLines.push('');
|
||||
if (END_BOILERPLATE.test(line)) {
|
||||
@@ -238,8 +234,8 @@ function removeDuplicateTSBoilerplate(destFiles) {
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (let j = 0; j < BOILERPLATE.length; j++) {
|
||||
const boilerplate = BOILERPLATE[j];
|
||||
for (var j = 0; j < BOILERPLATE.length; j++) {
|
||||
var boilerplate = BOILERPLATE[j];
|
||||
if (boilerplate.start.test(line)) {
|
||||
if (SEEN_BOILERPLATE[j]) {
|
||||
IS_REMOVING_BOILERPLATE = true;
|
||||
@@ -267,45 +263,45 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
|
||||
if (!dest) {
|
||||
dest = entryPoint + '.js';
|
||||
}
|
||||
const mainResult = {
|
||||
var mainResult = {
|
||||
sources: [],
|
||||
dest: dest
|
||||
}, results = [mainResult];
|
||||
const usedPlugins = {};
|
||||
const getLoaderPlugin = (pluginName) => {
|
||||
var usedPlugins = {};
|
||||
var getLoaderPlugin = function (pluginName) {
|
||||
if (!usedPlugins[pluginName]) {
|
||||
usedPlugins[pluginName] = modulesMap[pluginName].exports;
|
||||
}
|
||||
return usedPlugins[pluginName];
|
||||
};
|
||||
includedModules.forEach((c) => {
|
||||
const bangIndex = c.indexOf('!');
|
||||
includedModules.forEach(function (c) {
|
||||
var bangIndex = c.indexOf('!');
|
||||
if (bangIndex >= 0) {
|
||||
const pluginName = c.substr(0, bangIndex);
|
||||
const plugin = getLoaderPlugin(pluginName);
|
||||
var pluginName = c.substr(0, bangIndex);
|
||||
var plugin = getLoaderPlugin(pluginName);
|
||||
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
|
||||
return;
|
||||
}
|
||||
const module = modulesMap[c];
|
||||
var module = modulesMap[c];
|
||||
if (module.path === 'empty:') {
|
||||
return;
|
||||
}
|
||||
const contents = readFileAndRemoveBOM(module.path);
|
||||
var contents = readFileAndRemoveBOM(module.path);
|
||||
if (module.shim) {
|
||||
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
|
||||
}
|
||||
else {
|
||||
mainResult.sources.push(emitNamedModule(c, module.defineLocation, module.path, contents));
|
||||
mainResult.sources.push(emitNamedModule(c, deps[c], module.defineLocation, module.path, contents));
|
||||
}
|
||||
});
|
||||
Object.keys(usedPlugins).forEach((pluginName) => {
|
||||
const plugin = usedPlugins[pluginName];
|
||||
Object.keys(usedPlugins).forEach(function (pluginName) {
|
||||
var plugin = usedPlugins[pluginName];
|
||||
if (typeof plugin.writeFile === 'function') {
|
||||
const req = (() => {
|
||||
var req = (function () {
|
||||
throw new Error('no-no!');
|
||||
});
|
||||
req.toUrl = something => something;
|
||||
const write = (filename, contents) => {
|
||||
req.toUrl = function (something) { return something; };
|
||||
var write = function (filename, contents) {
|
||||
results.push({
|
||||
dest: filename,
|
||||
sources: [{
|
||||
@@ -317,15 +313,15 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
|
||||
plugin.writeFile(pluginName, entryPoint, req, write, {});
|
||||
}
|
||||
});
|
||||
const toIFile = (path) => {
|
||||
const contents = readFileAndRemoveBOM(path);
|
||||
var toIFile = function (path) {
|
||||
var contents = readFileAndRemoveBOM(path);
|
||||
return {
|
||||
path: path,
|
||||
contents: contents
|
||||
};
|
||||
};
|
||||
const toPrepend = (prepend || []).map(toIFile);
|
||||
const toAppend = (append || []).map(toIFile);
|
||||
var toPrepend = (prepend || []).map(toIFile);
|
||||
var toAppend = (append || []).map(toIFile);
|
||||
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
|
||||
return {
|
||||
files: results,
|
||||
@@ -333,8 +329,8 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
|
||||
};
|
||||
}
|
||||
function readFileAndRemoveBOM(path) {
|
||||
const BOM_CHAR_CODE = 65279;
|
||||
let contents = fs.readFileSync(path, 'utf8');
|
||||
var BOM_CHAR_CODE = 65279;
|
||||
var contents = fs.readFileSync(path, 'utf8');
|
||||
// Remove BOM
|
||||
if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
|
||||
contents = contents.substring(1);
|
||||
@@ -342,15 +338,15 @@ function readFileAndRemoveBOM(path) {
|
||||
return contents;
|
||||
}
|
||||
function emitPlugin(entryPoint, plugin, pluginName, moduleName) {
|
||||
let result = '';
|
||||
var result = '';
|
||||
if (typeof plugin.write === 'function') {
|
||||
const write = ((what) => {
|
||||
var write = (function (what) {
|
||||
result += what;
|
||||
});
|
||||
write.getEntryPoint = () => {
|
||||
write.getEntryPoint = function () {
|
||||
return entryPoint;
|
||||
};
|
||||
write.asModule = (moduleId, code) => {
|
||||
write.asModule = function (moduleId, code) {
|
||||
code = code.replace(/^define\(/, 'define("' + moduleId + '",');
|
||||
result += code;
|
||||
};
|
||||
@@ -361,20 +357,20 @@ function emitPlugin(entryPoint, plugin, pluginName, moduleName) {
|
||||
contents: result
|
||||
};
|
||||
}
|
||||
function emitNamedModule(moduleId, defineCallPosition, path, contents) {
|
||||
function emitNamedModule(moduleId, myDeps, defineCallPosition, path, contents) {
|
||||
// `defineCallPosition` is the position in code: |define()
|
||||
const defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
|
||||
var defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
|
||||
// `parensOffset` is the position in code: define|()
|
||||
const parensOffset = contents.indexOf('(', defineCallOffset);
|
||||
const insertStr = '"' + moduleId + '", ';
|
||||
var parensOffset = contents.indexOf('(', defineCallOffset);
|
||||
var insertStr = '"' + moduleId + '", ';
|
||||
return {
|
||||
path: path,
|
||||
contents: contents.substr(0, parensOffset + 1) + insertStr + contents.substr(parensOffset + 1)
|
||||
};
|
||||
}
|
||||
function emitShimmedModule(moduleId, myDeps, factory, path, contents) {
|
||||
const strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
|
||||
const strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
|
||||
var strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
|
||||
var strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
|
||||
return {
|
||||
path: path,
|
||||
contents: contents + '\n;\n' + strDefine
|
||||
@@ -387,8 +383,7 @@ function positionToOffset(str, desiredLine, desiredCol) {
|
||||
if (desiredLine === 1) {
|
||||
return desiredCol - 1;
|
||||
}
|
||||
let line = 1;
|
||||
let lastNewLineOffset = -1;
|
||||
var line = 1, lastNewLineOffset = -1;
|
||||
do {
|
||||
if (desiredLine === line) {
|
||||
return lastNewLineOffset + 1 + desiredCol - 1;
|
||||
@@ -402,15 +397,14 @@ function positionToOffset(str, desiredLine, desiredCol) {
|
||||
* Return a set of reachable nodes in `graph` starting from `rootNodes`
|
||||
*/
|
||||
function visit(rootNodes, graph) {
|
||||
const result = {};
|
||||
const queue = rootNodes;
|
||||
rootNodes.forEach((node) => {
|
||||
var result = {}, queue = rootNodes;
|
||||
rootNodes.forEach(function (node) {
|
||||
result[node] = true;
|
||||
});
|
||||
while (queue.length > 0) {
|
||||
const el = queue.shift();
|
||||
const myEdges = graph[el] || [];
|
||||
myEdges.forEach((toNode) => {
|
||||
var el = queue.shift();
|
||||
var myEdges = graph[el] || [];
|
||||
myEdges.forEach(function (toNode) {
|
||||
if (!result[toNode]) {
|
||||
result[toNode] = true;
|
||||
queue.push(toNode);
|
||||
@@ -423,11 +417,11 @@ function visit(rootNodes, graph) {
|
||||
* Perform a topological sort on `graph`
|
||||
*/
|
||||
function topologicalSort(graph) {
|
||||
const allNodes = {}, outgoingEdgeCount = {}, inverseEdges = {};
|
||||
Object.keys(graph).forEach((fromNode) => {
|
||||
var allNodes = {}, outgoingEdgeCount = {}, inverseEdges = {};
|
||||
Object.keys(graph).forEach(function (fromNode) {
|
||||
allNodes[fromNode] = true;
|
||||
outgoingEdgeCount[fromNode] = graph[fromNode].length;
|
||||
graph[fromNode].forEach((toNode) => {
|
||||
graph[fromNode].forEach(function (toNode) {
|
||||
allNodes[toNode] = true;
|
||||
outgoingEdgeCount[toNode] = outgoingEdgeCount[toNode] || 0;
|
||||
inverseEdges[toNode] = inverseEdges[toNode] || [];
|
||||
@@ -435,8 +429,8 @@ function topologicalSort(graph) {
|
||||
});
|
||||
});
|
||||
// https://en.wikipedia.org/wiki/Topological_sorting
|
||||
const S = [], L = [];
|
||||
Object.keys(allNodes).forEach((node) => {
|
||||
var S = [], L = [];
|
||||
Object.keys(allNodes).forEach(function (node) {
|
||||
if (outgoingEdgeCount[node] === 0) {
|
||||
delete outgoingEdgeCount[node];
|
||||
S.push(node);
|
||||
@@ -445,10 +439,10 @@ function topologicalSort(graph) {
|
||||
while (S.length > 0) {
|
||||
// Ensure the exact same order all the time with the same inputs
|
||||
S.sort();
|
||||
const n = S.shift();
|
||||
var n = S.shift();
|
||||
L.push(n);
|
||||
const myInverseEdges = inverseEdges[n] || [];
|
||||
myInverseEdges.forEach((m) => {
|
||||
var myInverseEdges = inverseEdges[n] || [];
|
||||
myInverseEdges.forEach(function (m) {
|
||||
outgoingEdgeCount[m]--;
|
||||
if (outgoingEdgeCount[m] === 0) {
|
||||
delete outgoingEdgeCount[m];
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as vm from 'vm';
|
||||
import fs = require('fs');
|
||||
import path = require('path');
|
||||
import vm = require('vm');
|
||||
|
||||
interface IPosition {
|
||||
line: number;
|
||||
@@ -46,7 +46,7 @@ export interface IEntryPoint {
|
||||
name: string;
|
||||
include?: string[];
|
||||
exclude?: string[];
|
||||
prepend?: string[];
|
||||
prepend: string[];
|
||||
append?: string[];
|
||||
dest?: string;
|
||||
}
|
||||
@@ -64,7 +64,7 @@ interface INodeSet {
|
||||
}
|
||||
|
||||
export interface IFile {
|
||||
path: string | null;
|
||||
path: string;
|
||||
contents: string;
|
||||
}
|
||||
|
||||
@@ -97,13 +97,13 @@ export interface ILoaderConfig {
|
||||
/**
|
||||
* Bundle `entryPoints` given config `config`.
|
||||
*/
|
||||
export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callback: (err: any, result: IBundleResult | null) => void): void {
|
||||
const entryPointsMap: IEntryPointMap = {};
|
||||
export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callback: (err: any, result: IBundleResult) => void): void {
|
||||
let entryPointsMap: IEntryPointMap = {};
|
||||
entryPoints.forEach((module: IEntryPoint) => {
|
||||
entryPointsMap[module.name] = module;
|
||||
});
|
||||
|
||||
const allMentionedModulesMap: { [modules: string]: boolean; } = {};
|
||||
let allMentionedModulesMap: { [modules: string]: boolean; } = {};
|
||||
entryPoints.forEach((module: IEntryPoint) => {
|
||||
allMentionedModulesMap[module.name] = true;
|
||||
(module.include || []).forEach(function (includedModule) {
|
||||
@@ -115,32 +115,28 @@ export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callba
|
||||
});
|
||||
|
||||
|
||||
const code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
|
||||
const r: Function = <any>vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
|
||||
const loaderModule = { exports: {} };
|
||||
var code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
|
||||
var r: Function = <any>vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
|
||||
var loaderModule = { exports: {} };
|
||||
r.call({}, require, loaderModule, loaderModule.exports);
|
||||
|
||||
const loader: any = loaderModule.exports;
|
||||
var loader: any = loaderModule.exports;
|
||||
config.isBuild = true;
|
||||
config.paths = config.paths || {};
|
||||
if (!config.paths['vs/nls']) {
|
||||
config.paths['vs/nls'] = 'out-build/vs/nls.build';
|
||||
}
|
||||
if (!config.paths['vs/css']) {
|
||||
config.paths['vs/css'] = 'out-build/vs/css.build';
|
||||
}
|
||||
config.paths['vs/nls'] = 'out-build/vs/nls.build';
|
||||
config.paths['vs/css'] = 'out-build/vs/css.build';
|
||||
loader.config(config);
|
||||
|
||||
loader(['require'], (localRequire: any) => {
|
||||
const resolvePath = (path: string) => {
|
||||
const r = localRequire.toUrl(path);
|
||||
loader(['require'], (localRequire) => {
|
||||
let resolvePath = (path: string) => {
|
||||
let r = localRequire.toUrl(path);
|
||||
if (!/\.js/.test(r)) {
|
||||
return r + '.js';
|
||||
}
|
||||
return r;
|
||||
};
|
||||
for (const moduleId in entryPointsMap) {
|
||||
const entryPoint = entryPointsMap[moduleId];
|
||||
for (let moduleId in entryPointsMap) {
|
||||
let entryPoint = entryPointsMap[moduleId];
|
||||
if (entryPoint.append) {
|
||||
entryPoint.append = entryPoint.append.map(resolvePath);
|
||||
}
|
||||
@@ -151,76 +147,76 @@ export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callba
|
||||
});
|
||||
|
||||
loader(Object.keys(allMentionedModulesMap), () => {
|
||||
const modules = <IBuildModuleInfo[]>loader.getBuildInfo();
|
||||
const partialResult = emitEntryPoints(modules, entryPointsMap);
|
||||
const cssInlinedResources = loader('vs/css').getInlinedResources();
|
||||
let modules = <IBuildModuleInfo[]>loader.getBuildInfo();
|
||||
let partialResult = emitEntryPoints(modules, entryPointsMap);
|
||||
let cssInlinedResources = loader('vs/css').getInlinedResources();
|
||||
callback(null, {
|
||||
files: partialResult.files,
|
||||
cssInlinedResources: cssInlinedResources,
|
||||
bundleData: partialResult.bundleData
|
||||
});
|
||||
}, (err: any) => callback(err, null));
|
||||
}, (err) => callback(err, null));
|
||||
}
|
||||
|
||||
function emitEntryPoints(modules: IBuildModuleInfo[], entryPoints: IEntryPointMap): IPartialBundleResult {
|
||||
const modulesMap: IBuildModuleInfoMap = {};
|
||||
let modulesMap: IBuildModuleInfoMap = {};
|
||||
modules.forEach((m: IBuildModuleInfo) => {
|
||||
modulesMap[m.id] = m;
|
||||
});
|
||||
|
||||
const modulesGraph: IGraph = {};
|
||||
let modulesGraph: IGraph = {};
|
||||
modules.forEach((m: IBuildModuleInfo) => {
|
||||
modulesGraph[m.id] = m.dependencies;
|
||||
});
|
||||
|
||||
const sortedModules = topologicalSort(modulesGraph);
|
||||
let sortedModules = topologicalSort(modulesGraph);
|
||||
|
||||
let result: IConcatFile[] = [];
|
||||
const usedPlugins: IPluginMap = {};
|
||||
const bundleData: IBundleData = {
|
||||
let usedPlugins: IPluginMap = {};
|
||||
let bundleData: IBundleData = {
|
||||
graph: modulesGraph,
|
||||
bundles: {}
|
||||
};
|
||||
|
||||
Object.keys(entryPoints).forEach((moduleToBundle: string) => {
|
||||
const info = entryPoints[moduleToBundle];
|
||||
const rootNodes = [moduleToBundle].concat(info.include || []);
|
||||
const allDependencies = visit(rootNodes, modulesGraph);
|
||||
const excludes: string[] = ['require', 'exports', 'module'].concat(info.exclude || []);
|
||||
let info = entryPoints[moduleToBundle];
|
||||
let rootNodes = [moduleToBundle].concat(info.include || []);
|
||||
let allDependencies = visit(rootNodes, modulesGraph);
|
||||
let excludes: string[] = ['require', 'exports', 'module'].concat(info.exclude || []);
|
||||
|
||||
excludes.forEach((excludeRoot: string) => {
|
||||
const allExcludes = visit([excludeRoot], modulesGraph);
|
||||
let allExcludes = visit([excludeRoot], modulesGraph);
|
||||
Object.keys(allExcludes).forEach((exclude: string) => {
|
||||
delete allDependencies[exclude];
|
||||
});
|
||||
});
|
||||
|
||||
const includedModules = sortedModules.filter((module: string) => {
|
||||
let includedModules = sortedModules.filter((module: string) => {
|
||||
return allDependencies[module];
|
||||
});
|
||||
|
||||
bundleData.bundles[moduleToBundle] = includedModules;
|
||||
|
||||
const res = emitEntryPoint(
|
||||
let res = emitEntryPoint(
|
||||
modulesMap,
|
||||
modulesGraph,
|
||||
moduleToBundle,
|
||||
includedModules,
|
||||
info.prepend || [],
|
||||
info.append || [],
|
||||
info.prepend,
|
||||
info.append,
|
||||
info.dest
|
||||
);
|
||||
|
||||
result = result.concat(res.files);
|
||||
for (const pluginName in res.usedPlugins) {
|
||||
for (let pluginName in res.usedPlugins) {
|
||||
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
|
||||
}
|
||||
});
|
||||
|
||||
Object.keys(usedPlugins).forEach((pluginName: string) => {
|
||||
const plugin = usedPlugins[pluginName];
|
||||
let plugin = usedPlugins[pluginName];
|
||||
if (typeof plugin.finishBuild === 'function') {
|
||||
const write = (filename: string, contents: string) => {
|
||||
let write = (filename: string, contents: string) => {
|
||||
result.push({
|
||||
dest: filename,
|
||||
sources: [{
|
||||
@@ -241,16 +237,16 @@ function emitEntryPoints(modules: IBuildModuleInfo[], entryPoints: IEntryPointMa
|
||||
}
|
||||
|
||||
function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
|
||||
const parseDefineCall = (moduleMatch: string, depsMatch: string) => {
|
||||
const module = moduleMatch.replace(/^"|"$/g, '');
|
||||
let parseDefineCall = (moduleMatch: string, depsMatch: string) => {
|
||||
let module = moduleMatch.replace(/^"|"$/g, '');
|
||||
let deps = depsMatch.split(',');
|
||||
deps = deps.map((dep) => {
|
||||
dep = dep.trim();
|
||||
dep = dep.replace(/^"|"$/g, '');
|
||||
dep = dep.replace(/^'|'$/g, '');
|
||||
let prefix: string | null = null;
|
||||
let _path: string | null = null;
|
||||
const pieces = dep.split('!');
|
||||
let prefix: string = null;
|
||||
let _path: string = null;
|
||||
let pieces = dep.split('!');
|
||||
if (pieces.length > 1) {
|
||||
prefix = pieces[0] + '!';
|
||||
_path = pieces[1];
|
||||
@@ -260,7 +256,7 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
|
||||
}
|
||||
|
||||
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
|
||||
const res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
|
||||
let res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
|
||||
return prefix + res;
|
||||
}
|
||||
return prefix + _path;
|
||||
@@ -271,7 +267,7 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
|
||||
};
|
||||
};
|
||||
|
||||
destFiles.forEach((destFile) => {
|
||||
destFiles.forEach((destFile, index) => {
|
||||
if (!/\.js$/.test(destFile.dest)) {
|
||||
return;
|
||||
}
|
||||
@@ -280,33 +276,33 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
|
||||
}
|
||||
|
||||
// Do one pass to record the usage counts for each module id
|
||||
const useCounts: { [moduleId: string]: number; } = {};
|
||||
let useCounts: { [moduleId: string]: number; } = {};
|
||||
destFile.sources.forEach((source) => {
|
||||
const matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
|
||||
let matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
|
||||
if (!matches) {
|
||||
return;
|
||||
}
|
||||
|
||||
const defineCall = parseDefineCall(matches[1], matches[2]);
|
||||
let defineCall = parseDefineCall(matches[1], matches[2]);
|
||||
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
|
||||
defineCall.deps.forEach((dep) => {
|
||||
useCounts[dep] = (useCounts[dep] || 0) + 1;
|
||||
});
|
||||
});
|
||||
|
||||
const sortedByUseModules = Object.keys(useCounts);
|
||||
let sortedByUseModules = Object.keys(useCounts);
|
||||
sortedByUseModules.sort((a, b) => {
|
||||
return useCounts[b] - useCounts[a];
|
||||
});
|
||||
|
||||
const replacementMap: { [moduleId: string]: number; } = {};
|
||||
let replacementMap: { [moduleId: string]: number; } = {};
|
||||
sortedByUseModules.forEach((module, index) => {
|
||||
replacementMap[module] = index;
|
||||
});
|
||||
|
||||
destFile.sources.forEach((source) => {
|
||||
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, (_, moduleMatch, depsMatch) => {
|
||||
const defineCall = parseDefineCall(moduleMatch, depsMatch);
|
||||
let defineCall = parseDefineCall(moduleMatch, depsMatch);
|
||||
return `define(__m[${replacementMap[defineCall.module]}/*${defineCall.module}*/], __M([${defineCall.deps.map(dep => replacementMap[dep] + '/*' + dep + '*/').join(',')}])`;
|
||||
});
|
||||
});
|
||||
@@ -336,7 +332,7 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
|
||||
|
||||
function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
|
||||
// Taken from typescript compiler => emitFiles
|
||||
const BOILERPLATE = [
|
||||
let BOILERPLATE = [
|
||||
{ start: /^var __extends/, end: /^}\)\(\);$/ },
|
||||
{ start: /^var __assign/, end: /^};$/ },
|
||||
{ start: /^var __decorate/, end: /^};$/ },
|
||||
@@ -347,22 +343,22 @@ function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
|
||||
];
|
||||
|
||||
destFiles.forEach((destFile) => {
|
||||
const SEEN_BOILERPLATE: boolean[] = [];
|
||||
let SEEN_BOILERPLATE = [];
|
||||
destFile.sources.forEach((source) => {
|
||||
const lines = source.contents.split(/\r\n|\n|\r/);
|
||||
const newLines: string[] = [];
|
||||
let lines = source.contents.split(/\r\n|\n|\r/);
|
||||
let newLines: string[] = [];
|
||||
let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE: RegExp;
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
let line = lines[i];
|
||||
if (IS_REMOVING_BOILERPLATE) {
|
||||
newLines.push('');
|
||||
if (END_BOILERPLATE!.test(line)) {
|
||||
if (END_BOILERPLATE.test(line)) {
|
||||
IS_REMOVING_BOILERPLATE = false;
|
||||
}
|
||||
} else {
|
||||
for (let j = 0; j < BOILERPLATE.length; j++) {
|
||||
const boilerplate = BOILERPLATE[j];
|
||||
let boilerplate = BOILERPLATE[j];
|
||||
if (boilerplate.start.test(line)) {
|
||||
if (SEEN_BOILERPLATE[j]) {
|
||||
IS_REMOVING_BOILERPLATE = true;
|
||||
@@ -402,19 +398,19 @@ function emitEntryPoint(
|
||||
includedModules: string[],
|
||||
prepend: string[],
|
||||
append: string[],
|
||||
dest: string | undefined
|
||||
dest: string
|
||||
): IEmitEntryPointResult {
|
||||
if (!dest) {
|
||||
dest = entryPoint + '.js';
|
||||
}
|
||||
const mainResult: IConcatFile = {
|
||||
let mainResult: IConcatFile = {
|
||||
sources: [],
|
||||
dest: dest
|
||||
},
|
||||
results: IConcatFile[] = [mainResult];
|
||||
|
||||
const usedPlugins: IPluginMap = {};
|
||||
const getLoaderPlugin = (pluginName: string): ILoaderPlugin => {
|
||||
let usedPlugins: IPluginMap = {};
|
||||
let getLoaderPlugin = (pluginName: string): ILoaderPlugin => {
|
||||
if (!usedPlugins[pluginName]) {
|
||||
usedPlugins[pluginName] = modulesMap[pluginName].exports;
|
||||
}
|
||||
@@ -422,39 +418,39 @@ function emitEntryPoint(
|
||||
};
|
||||
|
||||
includedModules.forEach((c: string) => {
|
||||
const bangIndex = c.indexOf('!');
|
||||
let bangIndex = c.indexOf('!');
|
||||
|
||||
if (bangIndex >= 0) {
|
||||
const pluginName = c.substr(0, bangIndex);
|
||||
const plugin = getLoaderPlugin(pluginName);
|
||||
let pluginName = c.substr(0, bangIndex);
|
||||
let plugin = getLoaderPlugin(pluginName);
|
||||
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
|
||||
return;
|
||||
}
|
||||
|
||||
const module = modulesMap[c];
|
||||
let module = modulesMap[c];
|
||||
|
||||
if (module.path === 'empty:') {
|
||||
return;
|
||||
}
|
||||
|
||||
const contents = readFileAndRemoveBOM(module.path);
|
||||
let contents = readFileAndRemoveBOM(module.path);
|
||||
|
||||
if (module.shim) {
|
||||
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
|
||||
} else {
|
||||
mainResult.sources.push(emitNamedModule(c, module.defineLocation, module.path, contents));
|
||||
mainResult.sources.push(emitNamedModule(c, deps[c], module.defineLocation, module.path, contents));
|
||||
}
|
||||
});
|
||||
|
||||
Object.keys(usedPlugins).forEach((pluginName: string) => {
|
||||
const plugin = usedPlugins[pluginName];
|
||||
let plugin = usedPlugins[pluginName];
|
||||
if (typeof plugin.writeFile === 'function') {
|
||||
const req: ILoaderPluginReqFunc = <any>(() => {
|
||||
let req: ILoaderPluginReqFunc = <any>(() => {
|
||||
throw new Error('no-no!');
|
||||
});
|
||||
req.toUrl = something => something;
|
||||
|
||||
const write = (filename: string, contents: string) => {
|
||||
let write = (filename: string, contents: string) => {
|
||||
results.push({
|
||||
dest: filename,
|
||||
sources: [{
|
||||
@@ -467,16 +463,16 @@ function emitEntryPoint(
|
||||
}
|
||||
});
|
||||
|
||||
const toIFile = (path: string): IFile => {
|
||||
const contents = readFileAndRemoveBOM(path);
|
||||
let toIFile = (path): IFile => {
|
||||
let contents = readFileAndRemoveBOM(path);
|
||||
return {
|
||||
path: path,
|
||||
contents: contents
|
||||
};
|
||||
};
|
||||
|
||||
const toPrepend = (prepend || []).map(toIFile);
|
||||
const toAppend = (append || []).map(toIFile);
|
||||
let toPrepend = (prepend || []).map(toIFile);
|
||||
let toAppend = (append || []).map(toIFile);
|
||||
|
||||
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
|
||||
|
||||
@@ -487,8 +483,8 @@ function emitEntryPoint(
|
||||
}
|
||||
|
||||
function readFileAndRemoveBOM(path: string): string {
|
||||
const BOM_CHAR_CODE = 65279;
|
||||
let contents = fs.readFileSync(path, 'utf8');
|
||||
var BOM_CHAR_CODE = 65279;
|
||||
var contents = fs.readFileSync(path, 'utf8');
|
||||
// Remove BOM
|
||||
if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
|
||||
contents = contents.substring(1);
|
||||
@@ -499,7 +495,7 @@ function readFileAndRemoveBOM(path: string): string {
|
||||
function emitPlugin(entryPoint: string, plugin: ILoaderPlugin, pluginName: string, moduleName: string): IFile {
|
||||
let result = '';
|
||||
if (typeof plugin.write === 'function') {
|
||||
const write: ILoaderPluginWriteFunc = <any>((what: string) => {
|
||||
let write: ILoaderPluginWriteFunc = <any>((what) => {
|
||||
result += what;
|
||||
});
|
||||
write.getEntryPoint = () => {
|
||||
@@ -517,15 +513,15 @@ function emitPlugin(entryPoint: string, plugin: ILoaderPlugin, pluginName: strin
|
||||
};
|
||||
}
|
||||
|
||||
function emitNamedModule(moduleId: string, defineCallPosition: IPosition, path: string, contents: string): IFile {
|
||||
function emitNamedModule(moduleId: string, myDeps: string[], defineCallPosition: IPosition, path: string, contents: string): IFile {
|
||||
|
||||
// `defineCallPosition` is the position in code: |define()
|
||||
const defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
|
||||
let defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
|
||||
|
||||
// `parensOffset` is the position in code: define|()
|
||||
const parensOffset = contents.indexOf('(', defineCallOffset);
|
||||
let parensOffset = contents.indexOf('(', defineCallOffset);
|
||||
|
||||
const insertStr = '"' + moduleId + '", ';
|
||||
let insertStr = '"' + moduleId + '", ';
|
||||
|
||||
return {
|
||||
path: path,
|
||||
@@ -534,8 +530,8 @@ function emitNamedModule(moduleId: string, defineCallPosition: IPosition, path:
|
||||
}
|
||||
|
||||
function emitShimmedModule(moduleId: string, myDeps: string[], factory: string, path: string, contents: string): IFile {
|
||||
const strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
|
||||
const strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
|
||||
let strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
|
||||
let strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
|
||||
return {
|
||||
path: path,
|
||||
contents: contents + '\n;\n' + strDefine
|
||||
@@ -550,8 +546,8 @@ function positionToOffset(str: string, desiredLine: number, desiredCol: number):
|
||||
return desiredCol - 1;
|
||||
}
|
||||
|
||||
let line = 1;
|
||||
let lastNewLineOffset = -1;
|
||||
let line = 1,
|
||||
lastNewLineOffset = -1;
|
||||
|
||||
do {
|
||||
if (desiredLine === line) {
|
||||
@@ -569,16 +565,16 @@ function positionToOffset(str: string, desiredLine: number, desiredCol: number):
|
||||
* Return a set of reachable nodes in `graph` starting from `rootNodes`
|
||||
*/
|
||||
function visit(rootNodes: string[], graph: IGraph): INodeSet {
|
||||
const result: INodeSet = {};
|
||||
const queue = rootNodes;
|
||||
let result: INodeSet = {},
|
||||
queue = rootNodes;
|
||||
|
||||
rootNodes.forEach((node) => {
|
||||
result[node] = true;
|
||||
});
|
||||
|
||||
while (queue.length > 0) {
|
||||
const el = queue.shift();
|
||||
const myEdges = graph[el!] || [];
|
||||
let el = queue.shift();
|
||||
let myEdges = graph[el] || [];
|
||||
myEdges.forEach((toNode) => {
|
||||
if (!result[toNode]) {
|
||||
result[toNode] = true;
|
||||
@@ -595,7 +591,7 @@ function visit(rootNodes: string[], graph: IGraph): INodeSet {
|
||||
*/
|
||||
function topologicalSort(graph: IGraph): string[] {
|
||||
|
||||
const allNodes: INodeSet = {},
|
||||
let allNodes: INodeSet = {},
|
||||
outgoingEdgeCount: { [node: string]: number; } = {},
|
||||
inverseEdges: IGraph = {};
|
||||
|
||||
@@ -613,7 +609,7 @@ function topologicalSort(graph: IGraph): string[] {
|
||||
});
|
||||
|
||||
// https://en.wikipedia.org/wiki/Topological_sorting
|
||||
const S: string[] = [],
|
||||
let S: string[] = [],
|
||||
L: string[] = [];
|
||||
|
||||
Object.keys(allNodes).forEach((node: string) => {
|
||||
@@ -627,10 +623,10 @@ function topologicalSort(graph: IGraph): string[] {
|
||||
// Ensure the exact same order all the time with the same inputs
|
||||
S.sort();
|
||||
|
||||
const n: string = S.shift()!;
|
||||
let n: string = S.shift();
|
||||
L.push(n);
|
||||
|
||||
const myInverseEdges = inverseEdges[n] || [];
|
||||
let myInverseEdges = inverseEdges[n] || [];
|
||||
myInverseEdges.forEach((m: string) => {
|
||||
outgoingEdgeCount[m]--;
|
||||
if (outgoingEdgeCount[m] === 0) {
|
||||
|
||||
@@ -4,53 +4,44 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const es = require("event-stream");
|
||||
const fs = require("fs");
|
||||
const gulp = require("gulp");
|
||||
const bom = require("gulp-bom");
|
||||
const sourcemaps = require("gulp-sourcemaps");
|
||||
const tsb = require("gulp-tsb");
|
||||
const path = require("path");
|
||||
const _ = require("underscore");
|
||||
const monacodts = require("../monaco/api");
|
||||
const nls = require("./nls");
|
||||
const reporter_1 = require("./reporter");
|
||||
const util = require("./util");
|
||||
const util2 = require("gulp-util");
|
||||
const watch = require('./watch');
|
||||
const reporter = reporter_1.createReporter();
|
||||
var gulp = require("gulp");
|
||||
var tsb = require("gulp-tsb");
|
||||
var es = require("event-stream");
|
||||
var watch = require('./watch');
|
||||
var nls = require("./nls");
|
||||
var util = require("./util");
|
||||
var reporter_1 = require("./reporter");
|
||||
var path = require("path");
|
||||
var bom = require("gulp-bom");
|
||||
var sourcemaps = require("gulp-sourcemaps");
|
||||
var _ = require("underscore");
|
||||
var monacodts = require("../monaco/api");
|
||||
var fs = require("fs");
|
||||
var reporter = reporter_1.createReporter();
|
||||
function getTypeScriptCompilerOptions(src) {
|
||||
const rootDir = path.join(__dirname, `../../${src}`);
|
||||
const tsconfig = require(`../../${src}/tsconfig.json`);
|
||||
let options;
|
||||
if (tsconfig.extends) {
|
||||
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
|
||||
}
|
||||
else {
|
||||
options = tsconfig.compilerOptions;
|
||||
}
|
||||
var rootDir = path.join(__dirname, "../../" + src);
|
||||
var options = require("../../" + src + "/tsconfig.json").compilerOptions;
|
||||
options.verbose = false;
|
||||
options.sourceMap = true;
|
||||
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
||||
options.sourceMap = false;
|
||||
}
|
||||
options.rootDir = rootDir;
|
||||
options.baseUrl = rootDir;
|
||||
options.sourceRoot = util.toFileUri(rootDir);
|
||||
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
|
||||
return options;
|
||||
}
|
||||
function createCompile(src, build, emitError) {
|
||||
const opts = _.clone(getTypeScriptCompilerOptions(src));
|
||||
var opts = _.clone(getTypeScriptCompilerOptions(src));
|
||||
opts.inlineSources = !!build;
|
||||
opts.noFilesystemLookup = true;
|
||||
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
|
||||
var ts = tsb.create(opts, null, null, function (err) { return reporter(err.toString()); });
|
||||
return function (token) {
|
||||
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
|
||||
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
|
||||
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path)));
|
||||
const input = es.through();
|
||||
const output = input
|
||||
var utf8Filter = util.filter(function (data) { return /(\/|\\)test(\/|\\).*utf8/.test(data.path); });
|
||||
var tsFilter = util.filter(function (data) { return /\.ts$/.test(data.path); });
|
||||
var noDeclarationsFilter = util.filter(function (data) { return !(/\.d\.ts$/.test(data.path)); });
|
||||
var input = es.through();
|
||||
var output = input
|
||||
.pipe(utf8Filter)
|
||||
.pipe(bom())
|
||||
.pipe(utf8Filter.restore)
|
||||
@@ -66,122 +57,91 @@ function createCompile(src, build, emitError) {
|
||||
sourceRoot: opts.sourceRoot
|
||||
}))
|
||||
.pipe(tsFilter.restore)
|
||||
.pipe(reporter.end(!!emitError));
|
||||
.pipe(reporter.end(emitError));
|
||||
return es.duplex(input, output);
|
||||
};
|
||||
}
|
||||
const typesDts = [
|
||||
'node_modules/typescript/lib/*.d.ts',
|
||||
'node_modules/@types/**/*.d.ts',
|
||||
'!node_modules/@types/webpack/**/*',
|
||||
'!node_modules/@types/uglify-js/**/*',
|
||||
];
|
||||
function compileTask(src, out, build) {
|
||||
return function () {
|
||||
const compile = createCompile(src, build, true);
|
||||
const srcPipe = es.merge(gulp.src(`${src}/**`, { base: `${src}` }), gulp.src(typesDts));
|
||||
let generator = new MonacoGenerator(false);
|
||||
if (src === 'src') {
|
||||
generator.execute();
|
||||
}
|
||||
var compile = createCompile(src, build, true);
|
||||
var srcPipe = es.merge(gulp.src(src + "/**", { base: "" + src }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
|
||||
return srcPipe
|
||||
.pipe(generator.stream)
|
||||
.pipe(compile())
|
||||
.pipe(gulp.dest(out));
|
||||
.pipe(dtsFilter)
|
||||
.pipe(gulp.dest(out))
|
||||
.pipe(dtsFilter.restore)
|
||||
.pipe(src !== 'src' ? es.through() : monacodtsTask(out, false));
|
||||
};
|
||||
}
|
||||
exports.compileTask = compileTask;
|
||||
function watchTask(out, build) {
|
||||
return function () {
|
||||
const compile = createCompile('src', build);
|
||||
const src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src(typesDts));
|
||||
const watchSrc = watch('src/**', { base: 'src' });
|
||||
let generator = new MonacoGenerator(true);
|
||||
generator.execute();
|
||||
var compile = createCompile('src', build);
|
||||
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
|
||||
var watchSrc = watch('src/**', { base: 'src' });
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
|
||||
return watchSrc
|
||||
.pipe(generator.stream)
|
||||
.pipe(util.incremental(compile, src, true))
|
||||
.pipe(gulp.dest(out));
|
||||
.pipe(dtsFilter)
|
||||
.pipe(gulp.dest(out))
|
||||
.pipe(dtsFilter.restore)
|
||||
.pipe(monacodtsTask(out, true));
|
||||
};
|
||||
}
|
||||
exports.watchTask = watchTask;
|
||||
const REPO_SRC_FOLDER = path.join(__dirname, '../../src');
|
||||
class MonacoGenerator {
|
||||
constructor(isWatch) {
|
||||
this._executeSoonTimer = null;
|
||||
this._isWatch = isWatch;
|
||||
this.stream = es.through();
|
||||
this._watchers = [];
|
||||
this._watchedFiles = {};
|
||||
let onWillReadFile = (moduleId, filePath) => {
|
||||
if (!this._isWatch) {
|
||||
return;
|
||||
}
|
||||
if (this._watchedFiles[filePath]) {
|
||||
return;
|
||||
}
|
||||
this._watchedFiles[filePath] = true;
|
||||
const watcher = fs.watch(filePath);
|
||||
watcher.addListener('change', () => {
|
||||
this._declarationResolver.invalidateCache(moduleId);
|
||||
this._executeSoon();
|
||||
});
|
||||
this._watchers.push(watcher);
|
||||
};
|
||||
this._fsProvider = new class extends monacodts.FSProvider {
|
||||
readFileSync(moduleId, filePath) {
|
||||
onWillReadFile(moduleId, filePath);
|
||||
return super.readFileSync(moduleId, filePath);
|
||||
}
|
||||
};
|
||||
this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider);
|
||||
if (this._isWatch) {
|
||||
const recipeWatcher = fs.watch(monacodts.RECIPE_PATH);
|
||||
recipeWatcher.addListener('change', () => {
|
||||
this._executeSoon();
|
||||
});
|
||||
this._watchers.push(recipeWatcher);
|
||||
function monacodtsTask(out, isWatch) {
|
||||
var basePath = path.resolve(process.cwd(), out);
|
||||
var neededFiles = {};
|
||||
monacodts.getFilesToWatch(out).forEach(function (filePath) {
|
||||
filePath = path.normalize(filePath);
|
||||
neededFiles[filePath] = true;
|
||||
});
|
||||
var inputFiles = {};
|
||||
for (var filePath in neededFiles) {
|
||||
if (/\bsrc(\/|\\)vs\b/.test(filePath)) {
|
||||
// This file is needed from source => simply read it now
|
||||
inputFiles[filePath] = fs.readFileSync(filePath).toString();
|
||||
}
|
||||
}
|
||||
_executeSoon() {
|
||||
if (this._executeSoonTimer !== null) {
|
||||
clearTimeout(this._executeSoonTimer);
|
||||
this._executeSoonTimer = null;
|
||||
}
|
||||
this._executeSoonTimer = setTimeout(() => {
|
||||
this._executeSoonTimer = null;
|
||||
this.execute();
|
||||
}, 20);
|
||||
}
|
||||
dispose() {
|
||||
this._watchers.forEach(watcher => watcher.close());
|
||||
}
|
||||
_run() {
|
||||
let r = monacodts.run3(this._declarationResolver);
|
||||
if (!r && !this._isWatch) {
|
||||
// The build must always be able to generate the monaco.d.ts
|
||||
throw new Error(`monaco.d.ts generation error - Cannot continue`);
|
||||
}
|
||||
return r;
|
||||
}
|
||||
_log(message, ...rest) {
|
||||
util2.log(util2.colors.cyan('[monaco.d.ts]'), message, ...rest);
|
||||
}
|
||||
execute() {
|
||||
const startTime = Date.now();
|
||||
const result = this._run();
|
||||
if (!result) {
|
||||
// nothing really changed
|
||||
var setInputFile = function (filePath, contents) {
|
||||
if (inputFiles[filePath] === contents) {
|
||||
// no change
|
||||
return;
|
||||
}
|
||||
if (result.isTheSame) {
|
||||
return;
|
||||
inputFiles[filePath] = contents;
|
||||
var neededInputFilesCount = Object.keys(neededFiles).length;
|
||||
var availableInputFilesCount = Object.keys(inputFiles).length;
|
||||
if (neededInputFilesCount === availableInputFilesCount) {
|
||||
run();
|
||||
}
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
fs.writeFileSync(path.join(REPO_SRC_FOLDER, 'vs/editor/common/standalone/standaloneEnums.ts'), result.enums);
|
||||
this._log(`monaco.d.ts is changed - total time took ${Date.now() - startTime} ms`);
|
||||
if (!this._isWatch) {
|
||||
this.stream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
|
||||
};
|
||||
var run = function () {
|
||||
var result = monacodts.run(out, inputFiles);
|
||||
if (!result.isTheSame) {
|
||||
if (isWatch) {
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
}
|
||||
else {
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
|
||||
}
|
||||
}
|
||||
};
|
||||
var resultStream;
|
||||
if (isWatch) {
|
||||
watch('build/monaco/*').pipe(es.through(function () {
|
||||
run();
|
||||
}));
|
||||
}
|
||||
resultStream = es.through(function (data) {
|
||||
var filePath = path.normalize(path.resolve(basePath, data.relative));
|
||||
if (neededFiles[filePath]) {
|
||||
setInputFile(filePath, data.contents.toString());
|
||||
}
|
||||
this.emit('data', data);
|
||||
});
|
||||
return resultStream;
|
||||
}
|
||||
|
||||
@@ -5,39 +5,31 @@
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as es from 'event-stream';
|
||||
import * as fs from 'fs';
|
||||
import * as gulp from 'gulp';
|
||||
import * as tsb from 'gulp-tsb';
|
||||
import * as es from 'event-stream';
|
||||
const watch = require('./watch');
|
||||
import * as nls from './nls';
|
||||
import * as util from './util';
|
||||
import { createReporter } from './reporter';
|
||||
import * as path from 'path';
|
||||
import * as bom from 'gulp-bom';
|
||||
import * as sourcemaps from 'gulp-sourcemaps';
|
||||
import * as tsb from 'gulp-tsb';
|
||||
import * as path from 'path';
|
||||
import * as _ from 'underscore';
|
||||
import * as monacodts from '../monaco/api';
|
||||
import * as nls from './nls';
|
||||
import { createReporter } from './reporter';
|
||||
import * as util from './util';
|
||||
import * as util2 from 'gulp-util';
|
||||
const watch = require('./watch');
|
||||
import * as fs from 'fs';
|
||||
|
||||
const reporter = createReporter();
|
||||
|
||||
function getTypeScriptCompilerOptions(src: string) {
|
||||
const rootDir = path.join(__dirname, `../../${src}`);
|
||||
const tsconfig = require(`../../${src}/tsconfig.json`);
|
||||
let options: { [key: string]: any };
|
||||
if (tsconfig.extends) {
|
||||
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
|
||||
} else {
|
||||
options = tsconfig.compilerOptions;
|
||||
}
|
||||
const options = require(`../../${src}/tsconfig.json`).compilerOptions;
|
||||
options.verbose = false;
|
||||
options.sourceMap = true;
|
||||
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
||||
options.sourceMap = false;
|
||||
}
|
||||
options.rootDir = rootDir;
|
||||
options.baseUrl = rootDir;
|
||||
options.sourceRoot = util.toFileUri(rootDir);
|
||||
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
|
||||
return options;
|
||||
@@ -48,7 +40,7 @@ function createCompile(src: string, build: boolean, emitError?: boolean): (token
|
||||
opts.inlineSources = !!build;
|
||||
opts.noFilesystemLookup = true;
|
||||
|
||||
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
|
||||
const ts = tsb.create(opts, null, null, err => reporter(err.toString()));
|
||||
|
||||
return function (token?: util.ICancellationToken) {
|
||||
|
||||
@@ -73,19 +65,12 @@ function createCompile(src: string, build: boolean, emitError?: boolean): (token
|
||||
sourceRoot: opts.sourceRoot
|
||||
}))
|
||||
.pipe(tsFilter.restore)
|
||||
.pipe(reporter.end(!!emitError));
|
||||
.pipe(reporter.end(emitError));
|
||||
|
||||
return es.duplex(input, output);
|
||||
};
|
||||
}
|
||||
|
||||
const typesDts = [
|
||||
'node_modules/typescript/lib/*.d.ts',
|
||||
'node_modules/@types/**/*.d.ts',
|
||||
'!node_modules/@types/webpack/**/*',
|
||||
'!node_modules/@types/uglify-js/**/*',
|
||||
];
|
||||
|
||||
export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream {
|
||||
|
||||
return function () {
|
||||
@@ -93,18 +78,18 @@ export function compileTask(src: string, out: string, build: boolean): () => Nod
|
||||
|
||||
const srcPipe = es.merge(
|
||||
gulp.src(`${src}/**`, { base: `${src}` }),
|
||||
gulp.src(typesDts),
|
||||
gulp.src('node_modules/typescript/lib/lib.d.ts'),
|
||||
);
|
||||
|
||||
let generator = new MonacoGenerator(false);
|
||||
if (src === 'src') {
|
||||
generator.execute();
|
||||
}
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
|
||||
|
||||
return srcPipe
|
||||
.pipe(generator.stream)
|
||||
.pipe(compile())
|
||||
.pipe(gulp.dest(out));
|
||||
.pipe(dtsFilter)
|
||||
.pipe(gulp.dest(out))
|
||||
.pipe(dtsFilter.restore)
|
||||
.pipe(src !== 'src' ? es.through() : monacodtsTask(out, false));
|
||||
};
|
||||
}
|
||||
|
||||
@@ -115,114 +100,80 @@ export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteSt
|
||||
|
||||
const src = es.merge(
|
||||
gulp.src('src/**', { base: 'src' }),
|
||||
gulp.src(typesDts),
|
||||
gulp.src('node_modules/typescript/lib/lib.d.ts'),
|
||||
);
|
||||
const watchSrc = watch('src/**', { base: 'src' });
|
||||
|
||||
let generator = new MonacoGenerator(true);
|
||||
generator.execute();
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
|
||||
|
||||
return watchSrc
|
||||
.pipe(generator.stream)
|
||||
.pipe(util.incremental(compile, src, true))
|
||||
.pipe(gulp.dest(out));
|
||||
.pipe(dtsFilter)
|
||||
.pipe(gulp.dest(out))
|
||||
.pipe(dtsFilter.restore)
|
||||
.pipe(monacodtsTask(out, true));
|
||||
};
|
||||
}
|
||||
|
||||
const REPO_SRC_FOLDER = path.join(__dirname, '../../src');
|
||||
function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
|
||||
|
||||
class MonacoGenerator {
|
||||
private readonly _isWatch: boolean;
|
||||
public readonly stream: NodeJS.ReadWriteStream;
|
||||
const basePath = path.resolve(process.cwd(), out);
|
||||
|
||||
private readonly _watchers: fs.FSWatcher[];
|
||||
private readonly _watchedFiles: { [filePath: string]: boolean; };
|
||||
private readonly _fsProvider: monacodts.FSProvider;
|
||||
private readonly _declarationResolver: monacodts.DeclarationResolver;
|
||||
const neededFiles: { [file: string]: boolean; } = {};
|
||||
monacodts.getFilesToWatch(out).forEach(function (filePath) {
|
||||
filePath = path.normalize(filePath);
|
||||
neededFiles[filePath] = true;
|
||||
});
|
||||
|
||||
constructor(isWatch: boolean) {
|
||||
this._isWatch = isWatch;
|
||||
this.stream = es.through();
|
||||
this._watchers = [];
|
||||
this._watchedFiles = {};
|
||||
let onWillReadFile = (moduleId: string, filePath: string) => {
|
||||
if (!this._isWatch) {
|
||||
return;
|
||||
}
|
||||
if (this._watchedFiles[filePath]) {
|
||||
return;
|
||||
}
|
||||
this._watchedFiles[filePath] = true;
|
||||
|
||||
const watcher = fs.watch(filePath);
|
||||
watcher.addListener('change', () => {
|
||||
this._declarationResolver.invalidateCache(moduleId);
|
||||
this._executeSoon();
|
||||
});
|
||||
this._watchers.push(watcher);
|
||||
};
|
||||
this._fsProvider = new class extends monacodts.FSProvider {
|
||||
public readFileSync(moduleId: string, filePath: string): Buffer {
|
||||
onWillReadFile(moduleId, filePath);
|
||||
return super.readFileSync(moduleId, filePath);
|
||||
}
|
||||
};
|
||||
this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider);
|
||||
|
||||
if (this._isWatch) {
|
||||
const recipeWatcher = fs.watch(monacodts.RECIPE_PATH);
|
||||
recipeWatcher.addListener('change', () => {
|
||||
this._executeSoon();
|
||||
});
|
||||
this._watchers.push(recipeWatcher);
|
||||
const inputFiles: { [file: string]: string; } = {};
|
||||
for (let filePath in neededFiles) {
|
||||
if (/\bsrc(\/|\\)vs\b/.test(filePath)) {
|
||||
// This file is needed from source => simply read it now
|
||||
inputFiles[filePath] = fs.readFileSync(filePath).toString();
|
||||
}
|
||||
}
|
||||
|
||||
private _executeSoonTimer: NodeJS.Timer | null = null;
|
||||
private _executeSoon(): void {
|
||||
if (this._executeSoonTimer !== null) {
|
||||
clearTimeout(this._executeSoonTimer);
|
||||
this._executeSoonTimer = null;
|
||||
}
|
||||
this._executeSoonTimer = setTimeout(() => {
|
||||
this._executeSoonTimer = null;
|
||||
this.execute();
|
||||
}, 20);
|
||||
}
|
||||
|
||||
public dispose(): void {
|
||||
this._watchers.forEach(watcher => watcher.close());
|
||||
}
|
||||
|
||||
private _run(): monacodts.IMonacoDeclarationResult | null {
|
||||
let r = monacodts.run3(this._declarationResolver);
|
||||
if (!r && !this._isWatch) {
|
||||
// The build must always be able to generate the monaco.d.ts
|
||||
throw new Error(`monaco.d.ts generation error - Cannot continue`);
|
||||
}
|
||||
return r;
|
||||
}
|
||||
|
||||
private _log(message: any, ...rest: any[]): void {
|
||||
util2.log(util2.colors.cyan('[monaco.d.ts]'), message, ...rest);
|
||||
}
|
||||
|
||||
public execute(): void {
|
||||
const startTime = Date.now();
|
||||
const result = this._run();
|
||||
if (!result) {
|
||||
// nothing really changed
|
||||
const setInputFile = (filePath: string, contents: string) => {
|
||||
if (inputFiles[filePath] === contents) {
|
||||
// no change
|
||||
return;
|
||||
}
|
||||
if (result.isTheSame) {
|
||||
return;
|
||||
inputFiles[filePath] = contents;
|
||||
const neededInputFilesCount = Object.keys(neededFiles).length;
|
||||
const availableInputFilesCount = Object.keys(inputFiles).length;
|
||||
if (neededInputFilesCount === availableInputFilesCount) {
|
||||
run();
|
||||
}
|
||||
};
|
||||
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
fs.writeFileSync(path.join(REPO_SRC_FOLDER, 'vs/editor/common/standalone/standaloneEnums.ts'), result.enums);
|
||||
this._log(`monaco.d.ts is changed - total time took ${Date.now() - startTime} ms`);
|
||||
if (!this._isWatch) {
|
||||
this.stream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
|
||||
const run = () => {
|
||||
const result = monacodts.run(out, inputFiles);
|
||||
if (!result.isTheSame) {
|
||||
if (isWatch) {
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
} else {
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let resultStream: NodeJS.ReadWriteStream;
|
||||
|
||||
if (isWatch) {
|
||||
watch('build/monaco/*').pipe(es.through(function () {
|
||||
run();
|
||||
}));
|
||||
}
|
||||
|
||||
resultStream = es.through(function (data) {
|
||||
const filePath = path.normalize(path.resolve(basePath, data.relative));
|
||||
if (neededFiles[filePath]) {
|
||||
setInputFile(filePath, data.contents.toString());
|
||||
}
|
||||
this.emit('data', data);
|
||||
});
|
||||
|
||||
return resultStream;
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ const root = path.dirname(path.dirname(__dirname));
|
||||
|
||||
function getElectronVersion() {
|
||||
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
|
||||
// @ts-ignore
|
||||
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
|
||||
|
||||
return target;
|
||||
@@ -20,7 +19,6 @@ function getElectronVersion() {
|
||||
module.exports.getElectronVersion = getElectronVersion;
|
||||
|
||||
// returns 0 if the right version of electron is in .build/electron
|
||||
// @ts-ignore
|
||||
if (require.main === module) {
|
||||
const version = getElectronVersion();
|
||||
const versionFile = path.join(root, '.build', 'electron', 'version');
|
||||
|
||||
@@ -4,317 +4,116 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const es = require("event-stream");
|
||||
const fs = require("fs");
|
||||
const glob = require("glob");
|
||||
const gulp = require("gulp");
|
||||
const path = require("path");
|
||||
const File = require("vinyl");
|
||||
const vsce = require("vsce");
|
||||
const stats_1 = require("./stats");
|
||||
const util2 = require("./util");
|
||||
const remote = require("gulp-remote-src");
|
||||
const vzip = require('gulp-vinyl-zip');
|
||||
const filter = require("gulp-filter");
|
||||
const rename = require("gulp-rename");
|
||||
const util = require('gulp-util');
|
||||
const buffer = require('gulp-buffer');
|
||||
const json = require("gulp-json-editor");
|
||||
const webpack = require('webpack');
|
||||
const webpackGulp = require('webpack-stream');
|
||||
const root = path.resolve(path.join(__dirname, '..', '..'));
|
||||
// {{SQL CARBON EDIT}}
|
||||
const _ = require("underscore");
|
||||
const vfs = require("vinyl-fs");
|
||||
const deps = require('../dependencies');
|
||||
const extensionsRoot = path.join(root, 'extensions');
|
||||
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
|
||||
function packageBuiltInExtensions() {
|
||||
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
|
||||
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
|
||||
const packagePath = path.join(path.dirname(root), element.name + '.vsix');
|
||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
||||
vsce.createVSIX({
|
||||
cwd: element.path,
|
||||
packagePath: packagePath,
|
||||
useYarn: true
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.packageBuiltInExtensions = packageBuiltInExtensions;
|
||||
function packageExtensionTask(extensionName, platform, arch) {
|
||||
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
||||
if (platform === 'darwin') {
|
||||
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
|
||||
}
|
||||
else {
|
||||
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
|
||||
}
|
||||
platform = platform || process.platform;
|
||||
return () => {
|
||||
const root = path.resolve(path.join(__dirname, '../..'));
|
||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => extensionName === name);
|
||||
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
||||
return fromLocal(extension.path);
|
||||
}));
|
||||
let result = localExtensions
|
||||
.pipe(util2.skipDirectories())
|
||||
.pipe(util2.fixWin32DirectoryPermissions())
|
||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
|
||||
return result.pipe(vfs.dest(destination));
|
||||
};
|
||||
}
|
||||
exports.packageExtensionTask = packageExtensionTask;
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
function fromLocal(extensionPath, sourceMappingURLBase) {
|
||||
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
|
||||
if (fs.existsSync(webpackFilename)) {
|
||||
return fromLocalWebpack(extensionPath, sourceMappingURLBase);
|
||||
}
|
||||
else {
|
||||
return fromLocalNormal(extensionPath);
|
||||
}
|
||||
}
|
||||
function fromLocalWebpack(extensionPath, sourceMappingURLBase) {
|
||||
const result = es.through();
|
||||
const packagedDependencies = [];
|
||||
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
|
||||
if (packageJsonConfig.dependencies) {
|
||||
const webpackRootConfig = require(path.join(extensionPath, 'extension.webpack.config.js'));
|
||||
for (const key in webpackRootConfig.externals) {
|
||||
if (key in packageJsonConfig.dependencies) {
|
||||
packagedDependencies.push(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
|
||||
const files = fileNames
|
||||
.map(fileName => path.join(extensionPath, fileName))
|
||||
.map(filePath => new File({
|
||||
path: filePath,
|
||||
stat: fs.statSync(filePath),
|
||||
base: extensionPath,
|
||||
contents: fs.createReadStream(filePath)
|
||||
}));
|
||||
const filesStream = es.readArray(files);
|
||||
// check for a webpack configuration files, then invoke webpack
|
||||
// and merge its output with the files stream. also rewrite the package.json
|
||||
// file to a new entry point
|
||||
const webpackConfigLocations = glob.sync(path.join(extensionPath, '/**/extension.webpack.config.js'), { ignore: ['**/node_modules'] });
|
||||
const packageJsonFilter = filter(f => {
|
||||
if (path.basename(f.path) === 'package.json') {
|
||||
// only modify package.json's next to the webpack file.
|
||||
// to be safe, use existsSync instead of path comparison.
|
||||
return fs.existsSync(path.join(path.dirname(f.path), 'extension.webpack.config.js'));
|
||||
}
|
||||
return false;
|
||||
}, { restore: true });
|
||||
const patchFilesStream = filesStream
|
||||
.pipe(packageJsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(json((data) => {
|
||||
if (data.main) {
|
||||
// hardcoded entry point directory!
|
||||
data.main = data.main.replace('/out/', /dist/);
|
||||
}
|
||||
return data;
|
||||
}))
|
||||
.pipe(packageJsonFilter.restore);
|
||||
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => () => {
|
||||
const webpackDone = (err, stats) => {
|
||||
util.log(`Bundled extension: ${util.colors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
|
||||
if (err) {
|
||||
result.emit('error', err);
|
||||
}
|
||||
const { compilation } = stats;
|
||||
if (compilation.errors.length > 0) {
|
||||
result.emit('error', compilation.errors.join('\n'));
|
||||
}
|
||||
if (compilation.warnings.length > 0) {
|
||||
result.emit('error', compilation.warnings.join('\n'));
|
||||
}
|
||||
};
|
||||
const webpackConfig = Object.assign({}, require(webpackConfigPath), { mode: 'production' });
|
||||
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
|
||||
return webpackGulp(webpackConfig, webpack, webpackDone)
|
||||
.pipe(es.through(function (data) {
|
||||
data.stat = data.stat || {};
|
||||
data.base = extensionPath;
|
||||
this.emit('data', data);
|
||||
}))
|
||||
.pipe(es.through(function (data) {
|
||||
// source map handling:
|
||||
// * rewrite sourceMappingURL
|
||||
// * save to disk so that upload-task picks this up
|
||||
if (sourceMappingURLBase) {
|
||||
const contents = data.contents.toString('utf8');
|
||||
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
|
||||
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
|
||||
}), 'utf8');
|
||||
if (/\.js\.map$/.test(data.path)) {
|
||||
if (!fs.existsSync(path.dirname(data.path))) {
|
||||
fs.mkdirSync(path.dirname(data.path));
|
||||
}
|
||||
fs.writeFileSync(data.path, data.contents);
|
||||
}
|
||||
}
|
||||
this.emit('data', data);
|
||||
}));
|
||||
});
|
||||
es.merge(sequence(webpackStreams), patchFilesStream)
|
||||
// .pipe(es.through(function (data) {
|
||||
// // debug
|
||||
// console.log('out', data.path, data.contents.length);
|
||||
// this.emit('data', data);
|
||||
// }))
|
||||
.pipe(result);
|
||||
}).catch(err => {
|
||||
console.error(extensionPath);
|
||||
console.error(packagedDependencies);
|
||||
result.emit('error', err);
|
||||
});
|
||||
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
|
||||
}
|
||||
function fromLocalNormal(extensionPath) {
|
||||
const result = es.through();
|
||||
var es = require("event-stream");
|
||||
var assign = require("object-assign");
|
||||
var remote = require("gulp-remote-src");
|
||||
var flatmap = require('gulp-flatmap');
|
||||
var vzip = require('gulp-vinyl-zip');
|
||||
var filter = require('gulp-filter');
|
||||
var rename = require('gulp-rename');
|
||||
var util = require('gulp-util');
|
||||
var buffer = require('gulp-buffer');
|
||||
var json = require('gulp-json-editor');
|
||||
var fs = require("fs");
|
||||
var path = require("path");
|
||||
var vsce = require("vsce");
|
||||
var File = require("vinyl");
|
||||
function fromLocal(extensionPath) {
|
||||
var result = es.through();
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
||||
.then(fileNames => {
|
||||
const files = fileNames
|
||||
.map(fileName => path.join(extensionPath, fileName))
|
||||
.map(filePath => new File({
|
||||
.then(function (fileNames) {
|
||||
var files = fileNames
|
||||
.map(function (fileName) { return path.join(extensionPath, fileName); })
|
||||
.map(function (filePath) { return new File({
|
||||
path: filePath,
|
||||
stat: fs.statSync(filePath),
|
||||
base: extensionPath,
|
||||
contents: fs.createReadStream(filePath)
|
||||
}));
|
||||
}); });
|
||||
es.readArray(files).pipe(result);
|
||||
})
|
||||
.catch(err => result.emit('error', err));
|
||||
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
|
||||
.catch(function (err) { return result.emit('error', err); });
|
||||
return result;
|
||||
}
|
||||
const baseHeaders = {
|
||||
exports.fromLocal = fromLocal;
|
||||
function error(err) {
|
||||
var result = es.through();
|
||||
setTimeout(function () { return result.emit('error', err); });
|
||||
return result;
|
||||
}
|
||||
var baseHeaders = {
|
||||
'X-Market-Client-Id': 'VSCode Build',
|
||||
'User-Agent': 'VSCode Build',
|
||||
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
|
||||
};
|
||||
function fromMarketplace(extensionName, version, metadata) {
|
||||
const [publisher, name] = extensionName.split('.');
|
||||
const url = `https://marketplace.visualstudio.com/_apis/public/gallery/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`;
|
||||
util.log('Downloading extension:', util.colors.yellow(`${extensionName}@${version}`), '...');
|
||||
const options = {
|
||||
base: url,
|
||||
function fromMarketplace(extensionName, version) {
|
||||
var filterType = 7;
|
||||
var value = extensionName;
|
||||
var criterium = { filterType: filterType, value: value };
|
||||
var criteria = [criterium];
|
||||
var pageNumber = 1;
|
||||
var pageSize = 1;
|
||||
var sortBy = 0;
|
||||
var sortOrder = 0;
|
||||
var flags = 0x1 | 0x2 | 0x80;
|
||||
var assetTypes = ['Microsoft.VisualStudio.Services.VSIXPackage'];
|
||||
var filters = [{ criteria: criteria, pageNumber: pageNumber, pageSize: pageSize, sortBy: sortBy, sortOrder: sortOrder }];
|
||||
var body = JSON.stringify({ filters: filters, assetTypes: assetTypes, flags: flags });
|
||||
var headers = assign({}, baseHeaders, {
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json;api-version=3.0-preview.1',
|
||||
'Content-Length': body.length
|
||||
});
|
||||
var options = {
|
||||
base: 'https://marketplace.visualstudio.com/_apis/public/gallery',
|
||||
requestOptions: {
|
||||
method: 'POST',
|
||||
gzip: true,
|
||||
headers: baseHeaders
|
||||
headers: headers,
|
||||
body: body
|
||||
}
|
||||
};
|
||||
const packageJsonFilter = filter('package.json', { restore: true });
|
||||
return remote('', options)
|
||||
.pipe(vzip.src())
|
||||
.pipe(filter('extension/**'))
|
||||
.pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, '')))
|
||||
.pipe(packageJsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(json({ __metadata: metadata }))
|
||||
.pipe(packageJsonFilter.restore);
|
||||
return remote('/extensionquery', options)
|
||||
.pipe(flatmap(function (stream, f) {
|
||||
var rawResult = f.contents.toString('utf8');
|
||||
var result = JSON.parse(rawResult);
|
||||
var extension = result.results[0].extensions[0];
|
||||
if (!extension) {
|
||||
return error("No such extension: " + extension);
|
||||
}
|
||||
var metadata = {
|
||||
id: extension.extensionId,
|
||||
publisherId: extension.publisher,
|
||||
publisherDisplayName: extension.publisher.displayName
|
||||
};
|
||||
var extensionVersion = extension.versions.filter(function (v) { return v.version === version; })[0];
|
||||
if (!extensionVersion) {
|
||||
return error("No such extension version: " + extensionName + " @ " + version);
|
||||
}
|
||||
var asset = extensionVersion.files.filter(function (f) { return f.assetType === 'Microsoft.VisualStudio.Services.VSIXPackage'; })[0];
|
||||
if (!asset) {
|
||||
return error("No VSIX found for extension version: " + extensionName + " @ " + version);
|
||||
}
|
||||
util.log('Downloading extension:', util.colors.yellow(extensionName + "@" + version), '...');
|
||||
var options = {
|
||||
base: asset.source,
|
||||
requestOptions: {
|
||||
gzip: true,
|
||||
headers: baseHeaders
|
||||
}
|
||||
};
|
||||
return remote('', options)
|
||||
.pipe(flatmap(function (stream) {
|
||||
var packageJsonFilter = filter('package.json', { restore: true });
|
||||
return stream
|
||||
.pipe(vzip.src())
|
||||
.pipe(filter('extension/**'))
|
||||
.pipe(rename(function (p) { return p.dirname = p.dirname.replace(/^extension\/?/, ''); }))
|
||||
.pipe(packageJsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(json({ __metadata: metadata }))
|
||||
.pipe(packageJsonFilter.restore);
|
||||
}));
|
||||
}));
|
||||
}
|
||||
exports.fromMarketplace = fromMarketplace;
|
||||
const excludedExtensions = [
|
||||
'vscode-api-tests',
|
||||
'vscode-colorize-tests',
|
||||
'ms-vscode.node-debug',
|
||||
'ms-vscode.node-debug2',
|
||||
// {{SQL CARBON EDIT}}
|
||||
'integration-tests'
|
||||
];
|
||||
// {{SQL CARBON EDIT}}
|
||||
const sqlBuiltInExtensions = [
|
||||
// Add SQL built-in extensions here.
|
||||
// the extension will be excluded from SQLOps package and will have separate vsix packages
|
||||
'agent',
|
||||
'import',
|
||||
'profiler',
|
||||
'admin-pack',
|
||||
'big-data-cluster',
|
||||
'dacpac'
|
||||
];
|
||||
var azureExtensions = ['azurecore', 'mssql'];
|
||||
const builtInExtensions = require('../builtInExtensions.json');
|
||||
/**
|
||||
* We're doing way too much stuff at once, with webpack et al. So much stuff
|
||||
* that while downloading extensions from the marketplace, node js doesn't get enough
|
||||
* stack frames to complete the download in under 2 minutes, at which point the
|
||||
* marketplace server cuts off the http request. So, we sequentialize the extensino tasks.
|
||||
*/
|
||||
function sequence(streamProviders) {
|
||||
const result = es.through();
|
||||
function pop() {
|
||||
if (streamProviders.length === 0) {
|
||||
result.emit('end');
|
||||
}
|
||||
else {
|
||||
const fn = streamProviders.shift();
|
||||
fn()
|
||||
.on('end', function () { setTimeout(pop, 0); })
|
||||
.pipe(result, { end: false });
|
||||
}
|
||||
}
|
||||
pop();
|
||||
return result;
|
||||
}
|
||||
function packageExtensionsStream(optsIn) {
|
||||
const opts = optsIn || {};
|
||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
// {{SQL CARBON EDIT}}
|
||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => azureExtensions.indexOf(name) === -1);
|
||||
const localExtensions = () => sequence([...localExtensionDescriptions.map(extension => () => {
|
||||
return fromLocal(extension.path, opts.sourceMappingURLBase)
|
||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
})]);
|
||||
// {{SQL CARBON EDIT}}
|
||||
const extensionDepsSrc = [
|
||||
..._.flatten(extensionsProductionDependencies.map((d) => path.relative(root, d.path)).map((d) => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
|
||||
];
|
||||
const localExtensionDependencies = () => gulp.src(extensionDepsSrc, { base: '.', dot: true })
|
||||
.pipe(filter(['**', '!**/package-lock.json']))
|
||||
.pipe(util2.cleanNodeModule('account-provider-azure', ['node_modules/date-utils/doc/**', 'node_modules/adal_node/node_modules/**'], undefined))
|
||||
.pipe(util2.cleanNodeModule('typescript', ['**/**'], undefined));
|
||||
// Original code commented out here
|
||||
// const localExtensionDependencies = () => gulp.src('extensions/node_modules/**', { base: '.' });
|
||||
// const marketplaceExtensions = () => es.merge(
|
||||
// ...builtInExtensions
|
||||
// .filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
|
||||
// .map(extension => {
|
||||
// return fromMarketplace(extension.name, extension.version, extension.metadata)
|
||||
// .pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
// })
|
||||
// );
|
||||
return sequence([localExtensions, localExtensionDependencies,])
|
||||
.pipe(util2.setExecutableBit(['**/*.sh']))
|
||||
.pipe(filter(['**', '!**/*.js.map']));
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
}
|
||||
exports.packageExtensionsStream = packageExtensionsStream;
|
||||
|
||||
@@ -4,221 +4,22 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as es from 'event-stream';
|
||||
import * as fs from 'fs';
|
||||
import * as glob from 'glob';
|
||||
import * as gulp from 'gulp';
|
||||
import * as path from 'path';
|
||||
import { Stream } from 'stream';
|
||||
import * as File from 'vinyl';
|
||||
import * as vsce from 'vsce';
|
||||
import { createStatsStream } from './stats';
|
||||
import * as util2 from './util';
|
||||
import assign = require('object-assign');
|
||||
import remote = require('gulp-remote-src');
|
||||
const flatmap = require('gulp-flatmap');
|
||||
const vzip = require('gulp-vinyl-zip');
|
||||
import filter = require('gulp-filter');
|
||||
import rename = require('gulp-rename');
|
||||
const filter = require('gulp-filter');
|
||||
const rename = require('gulp-rename');
|
||||
const util = require('gulp-util');
|
||||
const buffer = require('gulp-buffer');
|
||||
import json = require('gulp-json-editor');
|
||||
const webpack = require('webpack');
|
||||
const webpackGulp = require('webpack-stream');
|
||||
const json = require('gulp-json-editor');
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as vsce from 'vsce';
|
||||
import * as File from 'vinyl';
|
||||
|
||||
const root = path.resolve(path.join(__dirname, '..', '..'));
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
import * as _ from 'underscore';
|
||||
import * as vfs from 'vinyl-fs';
|
||||
const deps = require('../dependencies');
|
||||
const extensionsRoot = path.join(root, 'extensions');
|
||||
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
|
||||
|
||||
export function packageBuiltInExtensions() {
|
||||
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
|
||||
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
|
||||
const packagePath = path.join(path.dirname(root), element.name + '.vsix');
|
||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
||||
vsce.createVSIX({
|
||||
cwd: element.path,
|
||||
packagePath: packagePath,
|
||||
useYarn: true
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export function packageExtensionTask(extensionName: string, platform: string, arch: string) {
|
||||
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
||||
if (platform === 'darwin') {
|
||||
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
|
||||
} else {
|
||||
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
|
||||
}
|
||||
|
||||
platform = platform || process.platform;
|
||||
|
||||
return () => {
|
||||
const root = path.resolve(path.join(__dirname, '../..'));
|
||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => extensionName === name);
|
||||
|
||||
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
||||
return fromLocal(extension.path);
|
||||
}));
|
||||
|
||||
let result = localExtensions
|
||||
.pipe(util2.skipDirectories())
|
||||
.pipe(util2.fixWin32DirectoryPermissions())
|
||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
|
||||
|
||||
return result.pipe(vfs.dest(destination));
|
||||
};
|
||||
}
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
|
||||
function fromLocal(extensionPath: string, sourceMappingURLBase?: string): Stream {
|
||||
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
|
||||
if (fs.existsSync(webpackFilename)) {
|
||||
return fromLocalWebpack(extensionPath, sourceMappingURLBase);
|
||||
} else {
|
||||
return fromLocalNormal(extensionPath);
|
||||
}
|
||||
}
|
||||
|
||||
function fromLocalWebpack(extensionPath: string, sourceMappingURLBase: string | undefined): Stream {
|
||||
const result = es.through();
|
||||
|
||||
const packagedDependencies: string[] = [];
|
||||
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
|
||||
if (packageJsonConfig.dependencies) {
|
||||
const webpackRootConfig = require(path.join(extensionPath, 'extension.webpack.config.js'));
|
||||
for (const key in webpackRootConfig.externals) {
|
||||
if (key in packageJsonConfig.dependencies) {
|
||||
packagedDependencies.push(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
|
||||
const files = fileNames
|
||||
.map(fileName => path.join(extensionPath, fileName))
|
||||
.map(filePath => new File({
|
||||
path: filePath,
|
||||
stat: fs.statSync(filePath),
|
||||
base: extensionPath,
|
||||
contents: fs.createReadStream(filePath) as any
|
||||
}));
|
||||
|
||||
const filesStream = es.readArray(files);
|
||||
|
||||
// check for a webpack configuration files, then invoke webpack
|
||||
// and merge its output with the files stream. also rewrite the package.json
|
||||
// file to a new entry point
|
||||
const webpackConfigLocations = (<string[]>glob.sync(
|
||||
path.join(extensionPath, '/**/extension.webpack.config.js'),
|
||||
{ ignore: ['**/node_modules'] }
|
||||
));
|
||||
|
||||
const packageJsonFilter = filter(f => {
|
||||
if (path.basename(f.path) === 'package.json') {
|
||||
// only modify package.json's next to the webpack file.
|
||||
// to be safe, use existsSync instead of path comparison.
|
||||
return fs.existsSync(path.join(path.dirname(f.path), 'extension.webpack.config.js'));
|
||||
}
|
||||
return false;
|
||||
}, { restore: true });
|
||||
|
||||
const patchFilesStream = filesStream
|
||||
.pipe(packageJsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(json((data: any) => {
|
||||
if (data.main) {
|
||||
// hardcoded entry point directory!
|
||||
data.main = data.main.replace('/out/', /dist/);
|
||||
}
|
||||
return data;
|
||||
}))
|
||||
.pipe(packageJsonFilter.restore);
|
||||
|
||||
|
||||
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => () => {
|
||||
|
||||
const webpackDone = (err: any, stats: any) => {
|
||||
util.log(`Bundled extension: ${util.colors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
|
||||
if (err) {
|
||||
result.emit('error', err);
|
||||
}
|
||||
const { compilation } = stats;
|
||||
if (compilation.errors.length > 0) {
|
||||
result.emit('error', compilation.errors.join('\n'));
|
||||
}
|
||||
if (compilation.warnings.length > 0) {
|
||||
result.emit('error', compilation.warnings.join('\n'));
|
||||
}
|
||||
};
|
||||
|
||||
const webpackConfig = {
|
||||
...require(webpackConfigPath),
|
||||
...{ mode: 'production' }
|
||||
};
|
||||
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
|
||||
|
||||
return webpackGulp(webpackConfig, webpack, webpackDone)
|
||||
.pipe(es.through(function (data) {
|
||||
data.stat = data.stat || {};
|
||||
data.base = extensionPath;
|
||||
this.emit('data', data);
|
||||
}))
|
||||
.pipe(es.through(function (data: File) {
|
||||
// source map handling:
|
||||
// * rewrite sourceMappingURL
|
||||
// * save to disk so that upload-task picks this up
|
||||
if (sourceMappingURLBase) {
|
||||
const contents = (<Buffer>data.contents).toString('utf8');
|
||||
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
|
||||
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
|
||||
}), 'utf8');
|
||||
|
||||
if (/\.js\.map$/.test(data.path)) {
|
||||
if (!fs.existsSync(path.dirname(data.path))) {
|
||||
fs.mkdirSync(path.dirname(data.path));
|
||||
}
|
||||
fs.writeFileSync(data.path, data.contents);
|
||||
}
|
||||
}
|
||||
this.emit('data', data);
|
||||
}));
|
||||
});
|
||||
|
||||
es.merge(sequence(webpackStreams), patchFilesStream)
|
||||
// .pipe(es.through(function (data) {
|
||||
// // debug
|
||||
// console.log('out', data.path, data.contents.length);
|
||||
// this.emit('data', data);
|
||||
// }))
|
||||
.pipe(result);
|
||||
|
||||
}).catch(err => {
|
||||
console.error(extensionPath);
|
||||
console.error(packagedDependencies);
|
||||
result.emit('error', err);
|
||||
});
|
||||
|
||||
return result.pipe(createStatsStream(path.basename(extensionPath)));
|
||||
}
|
||||
|
||||
function fromLocalNormal(extensionPath: string): Stream {
|
||||
export function fromLocal(extensionPath: string): Stream {
|
||||
const result = es.through();
|
||||
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
||||
@@ -236,7 +37,13 @@ function fromLocalNormal(extensionPath: string): Stream {
|
||||
})
|
||||
.catch(err => result.emit('error', err));
|
||||
|
||||
return result.pipe(createStatsStream(path.basename(extensionPath)));
|
||||
return result;
|
||||
}
|
||||
|
||||
function error(err: any): Stream {
|
||||
const result = es.through();
|
||||
setTimeout(() => result.emit('error', err));
|
||||
return result;
|
||||
}
|
||||
|
||||
const baseHeaders = {
|
||||
@@ -245,141 +52,82 @@ const baseHeaders = {
|
||||
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
|
||||
};
|
||||
|
||||
export function fromMarketplace(extensionName: string, version: string, metadata: any): Stream {
|
||||
const [publisher, name] = extensionName.split('.');
|
||||
const url = `https://marketplace.visualstudio.com/_apis/public/gallery/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`;
|
||||
|
||||
util.log('Downloading extension:', util.colors.yellow(`${extensionName}@${version}`), '...');
|
||||
export function fromMarketplace(extensionName: string, version: string): Stream {
|
||||
const filterType = 7;
|
||||
const value = extensionName;
|
||||
const criterium = { filterType, value };
|
||||
const criteria = [criterium];
|
||||
const pageNumber = 1;
|
||||
const pageSize = 1;
|
||||
const sortBy = 0;
|
||||
const sortOrder = 0;
|
||||
const flags = 0x1 | 0x2 | 0x80;
|
||||
const assetTypes = ['Microsoft.VisualStudio.Services.VSIXPackage'];
|
||||
const filters = [{ criteria, pageNumber, pageSize, sortBy, sortOrder }];
|
||||
const body = JSON.stringify({ filters, assetTypes, flags });
|
||||
const headers: any = assign({}, baseHeaders, {
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json;api-version=3.0-preview.1',
|
||||
'Content-Length': body.length
|
||||
});
|
||||
|
||||
const options = {
|
||||
base: url,
|
||||
base: 'https://marketplace.visualstudio.com/_apis/public/gallery',
|
||||
requestOptions: {
|
||||
method: 'POST',
|
||||
gzip: true,
|
||||
headers: baseHeaders
|
||||
headers,
|
||||
body: body
|
||||
}
|
||||
};
|
||||
|
||||
const packageJsonFilter = filter('package.json', { restore: true });
|
||||
return remote('/extensionquery', options)
|
||||
.pipe(flatmap((stream, f) => {
|
||||
const rawResult = f.contents.toString('utf8');
|
||||
const result = JSON.parse(rawResult);
|
||||
const extension = result.results[0].extensions[0];
|
||||
if (!extension) {
|
||||
return error(`No such extension: ${extension}`);
|
||||
}
|
||||
|
||||
return remote('', options)
|
||||
.pipe(vzip.src())
|
||||
.pipe(filter('extension/**'))
|
||||
.pipe(rename(p => p.dirname = p.dirname!.replace(/^extension\/?/, '')))
|
||||
.pipe(packageJsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(json({ __metadata: metadata }))
|
||||
.pipe(packageJsonFilter.restore);
|
||||
}
|
||||
|
||||
interface IPackageExtensionsOptions {
|
||||
/**
|
||||
* Set to undefined to package all of them.
|
||||
*/
|
||||
desiredExtensions?: string[];
|
||||
sourceMappingURLBase?: string;
|
||||
}
|
||||
|
||||
const excludedExtensions = [
|
||||
'vscode-api-tests',
|
||||
'vscode-colorize-tests',
|
||||
'ms-vscode.node-debug',
|
||||
'ms-vscode.node-debug2',
|
||||
// {{SQL CARBON EDIT}}
|
||||
'integration-tests'
|
||||
];
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const sqlBuiltInExtensions = [
|
||||
// Add SQL built-in extensions here.
|
||||
// the extension will be excluded from SQLOps package and will have separate vsix packages
|
||||
'agent',
|
||||
'import',
|
||||
'profiler',
|
||||
'admin-pack',
|
||||
'big-data-cluster',
|
||||
'dacpac'
|
||||
];
|
||||
var azureExtensions = ['azurecore', 'mssql'];
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
|
||||
interface IBuiltInExtension {
|
||||
name: string;
|
||||
version: string;
|
||||
repo: string;
|
||||
metadata: any;
|
||||
}
|
||||
|
||||
const builtInExtensions: IBuiltInExtension[] = require('../builtInExtensions.json');
|
||||
|
||||
/**
|
||||
* We're doing way too much stuff at once, with webpack et al. So much stuff
|
||||
* that while downloading extensions from the marketplace, node js doesn't get enough
|
||||
* stack frames to complete the download in under 2 minutes, at which point the
|
||||
* marketplace server cuts off the http request. So, we sequentialize the extensino tasks.
|
||||
*/
|
||||
function sequence(streamProviders: { (): Stream }[]): Stream {
|
||||
const result = es.through();
|
||||
|
||||
function pop() {
|
||||
if (streamProviders.length === 0) {
|
||||
result.emit('end');
|
||||
} else {
|
||||
const fn = streamProviders.shift()!;
|
||||
fn()
|
||||
.on('end', function () { setTimeout(pop, 0); })
|
||||
.pipe(result, { end: false });
|
||||
}
|
||||
}
|
||||
|
||||
pop();
|
||||
return result;
|
||||
}
|
||||
|
||||
export function packageExtensionsStream(optsIn?: IPackageExtensionsOptions): NodeJS.ReadWriteStream {
|
||||
const opts = optsIn || {};
|
||||
|
||||
const localExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
// {{SQL CARBON EDIT}}
|
||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => azureExtensions.indexOf(name) === -1);
|
||||
|
||||
const localExtensions = () => sequence([...localExtensionDescriptions.map(extension => () => {
|
||||
return fromLocal(extension.path, opts.sourceMappingURLBase)
|
||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
})]);
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const extensionDepsSrc = [
|
||||
..._.flatten(extensionsProductionDependencies.map((d: any) => path.relative(root, d.path)).map((d: any) => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
|
||||
];
|
||||
|
||||
const localExtensionDependencies = () => gulp.src(extensionDepsSrc, { base: '.', dot: true })
|
||||
.pipe(filter(['**', '!**/package-lock.json']))
|
||||
.pipe(util2.cleanNodeModule('account-provider-azure', ['node_modules/date-utils/doc/**', 'node_modules/adal_node/node_modules/**'], undefined))
|
||||
.pipe(util2.cleanNodeModule('typescript', ['**/**'], undefined));
|
||||
|
||||
// Original code commented out here
|
||||
// const localExtensionDependencies = () => gulp.src('extensions/node_modules/**', { base: '.' });
|
||||
|
||||
// const marketplaceExtensions = () => es.merge(
|
||||
// ...builtInExtensions
|
||||
// .filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
|
||||
// .map(extension => {
|
||||
// return fromMarketplace(extension.name, extension.version, extension.metadata)
|
||||
// .pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
// })
|
||||
// );
|
||||
|
||||
return sequence([localExtensions, localExtensionDependencies, /*marketplaceExtensions*/])
|
||||
.pipe(util2.setExecutableBit(['**/*.sh']))
|
||||
.pipe(filter(['**', '!**/*.js.map']));
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
const metadata = {
|
||||
id: extension.extensionId,
|
||||
publisherId: extension.publisher,
|
||||
publisherDisplayName: extension.publisher.displayName
|
||||
};
|
||||
|
||||
const extensionVersion = extension.versions.filter(v => v.version === version)[0];
|
||||
if (!extensionVersion) {
|
||||
return error(`No such extension version: ${extensionName} @ ${version}`);
|
||||
}
|
||||
|
||||
const asset = extensionVersion.files.filter(f => f.assetType === 'Microsoft.VisualStudio.Services.VSIXPackage')[0];
|
||||
if (!asset) {
|
||||
return error(`No VSIX found for extension version: ${extensionName} @ ${version}`);
|
||||
}
|
||||
|
||||
util.log('Downloading extension:', util.colors.yellow(`${extensionName}@${version}`), '...');
|
||||
|
||||
const options = {
|
||||
base: asset.source,
|
||||
requestOptions: {
|
||||
gzip: true,
|
||||
headers: baseHeaders
|
||||
}
|
||||
};
|
||||
|
||||
return remote('', options)
|
||||
.pipe(flatmap(stream => {
|
||||
const packageJsonFilter = filter('package.json', { restore: true });
|
||||
|
||||
return stream
|
||||
.pipe(vzip.src())
|
||||
.pipe(filter('extension/**'))
|
||||
.pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, '')))
|
||||
.pipe(packageJsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(json({ __metadata: metadata }))
|
||||
.pipe(packageJsonFilter.restore);
|
||||
}));
|
||||
}));
|
||||
}
|
||||
|
||||
@@ -4,15 +4,15 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
var path = require("path");
|
||||
var fs = require("fs");
|
||||
/**
|
||||
* Returns the sha1 commit version of a repository or undefined in case of failure.
|
||||
*/
|
||||
function getVersion(repo) {
|
||||
const git = path.join(repo, '.git');
|
||||
const headPath = path.join(git, 'HEAD');
|
||||
let head;
|
||||
var git = path.join(repo, '.git');
|
||||
var headPath = path.join(git, 'HEAD');
|
||||
var head;
|
||||
try {
|
||||
head = fs.readFileSync(headPath, 'utf8').trim();
|
||||
}
|
||||
@@ -22,29 +22,29 @@ function getVersion(repo) {
|
||||
if (/^[0-9a-f]{40}$/i.test(head)) {
|
||||
return head;
|
||||
}
|
||||
const refMatch = /^ref: (.*)$/.exec(head);
|
||||
var refMatch = /^ref: (.*)$/.exec(head);
|
||||
if (!refMatch) {
|
||||
return void 0;
|
||||
}
|
||||
const ref = refMatch[1];
|
||||
const refPath = path.join(git, ref);
|
||||
var ref = refMatch[1];
|
||||
var refPath = path.join(git, ref);
|
||||
try {
|
||||
return fs.readFileSync(refPath, 'utf8').trim();
|
||||
}
|
||||
catch (e) {
|
||||
// noop
|
||||
}
|
||||
const packedRefsPath = path.join(git, 'packed-refs');
|
||||
let refsRaw;
|
||||
var packedRefsPath = path.join(git, 'packed-refs');
|
||||
var refsRaw;
|
||||
try {
|
||||
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
|
||||
}
|
||||
catch (e) {
|
||||
return void 0;
|
||||
}
|
||||
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
|
||||
let refsMatch;
|
||||
let refs = {};
|
||||
var refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
|
||||
var refsMatch;
|
||||
var refs = {};
|
||||
while (refsMatch = refsRegex.exec(refsRaw)) {
|
||||
refs[refsMatch[2]] = refsMatch[1];
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import * as fs from 'fs';
|
||||
/**
|
||||
* Returns the sha1 commit version of a repository or undefined in case of failure.
|
||||
*/
|
||||
export function getVersion(repo: string): string | undefined {
|
||||
export function getVersion(repo: string): string {
|
||||
const git = path.join(repo, '.git');
|
||||
const headPath = path.join(git, 'HEAD');
|
||||
let head: string;
|
||||
@@ -50,7 +50,7 @@ export function getVersion(repo: string): string | undefined {
|
||||
}
|
||||
|
||||
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
|
||||
let refsMatch: RegExpExecArray | null;
|
||||
let refsMatch: RegExpExecArray;
|
||||
let refs: { [ref: string]: string } = {};
|
||||
|
||||
while (refsMatch = refsRegex.exec(refsRaw)) {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -78,6 +78,10 @@
|
||||
"name": "vs/workbench/parts/logs",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/navigation",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/output",
|
||||
"project": "vscode-workbench"
|
||||
@@ -110,10 +114,6 @@
|
||||
"name": "vs/workbench/parts/snippets",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/stats",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/surveys",
|
||||
"project": "vscode-workbench"
|
||||
@@ -166,10 +166,6 @@
|
||||
"name": "vs/workbench/services/bulkEdit",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/commands",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/configuration",
|
||||
"project": "vscode-workbench"
|
||||
@@ -214,10 +210,6 @@
|
||||
"name": "vs/workbench/services/progress",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/remote",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/textfile",
|
||||
"project": "vscode-workbench"
|
||||
|
||||
@@ -7,15 +7,15 @@ import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
|
||||
import { through, readable, ThroughStream } from 'event-stream';
|
||||
import * as File from 'vinyl';
|
||||
import File = require('vinyl');
|
||||
import * as Is from 'is';
|
||||
import * as xml2js from 'xml2js';
|
||||
import * as glob from 'glob';
|
||||
import * as https from 'https';
|
||||
import * as gulp from 'gulp';
|
||||
|
||||
import * as util from 'gulp-util';
|
||||
import * as iconv from 'iconv-lite';
|
||||
var util = require('gulp-util');
|
||||
var iconv = require('iconv-lite');
|
||||
|
||||
const NUMBER_OF_CONCURRENT_DOWNLOADS = 4;
|
||||
|
||||
@@ -57,7 +57,7 @@ export const extraLanguages: Language[] = [
|
||||
];
|
||||
|
||||
// non built-in extensions also that are transifex and need to be part of the language packs
|
||||
export const externalExtensionsWithTranslations = {
|
||||
const externalExtensionsWithTranslations = {
|
||||
'vscode-chrome-debug': 'msjsdiag.debugger-for-chrome',
|
||||
'vscode-node-debug': 'ms-vscode.node-debug',
|
||||
'vscode-node-debug2': 'ms-vscode.node-debug2'
|
||||
@@ -71,7 +71,7 @@ interface Map<V> {
|
||||
interface Item {
|
||||
id: string;
|
||||
message: string;
|
||||
comment?: string;
|
||||
comment: string;
|
||||
}
|
||||
|
||||
export interface Resource {
|
||||
@@ -137,6 +137,27 @@ module PackageJsonFormat {
|
||||
}
|
||||
}
|
||||
|
||||
interface ModuleJsonFormat {
|
||||
messages: string[];
|
||||
keys: (string | LocalizeInfo)[];
|
||||
}
|
||||
|
||||
module ModuleJsonFormat {
|
||||
export function is(value: any): value is ModuleJsonFormat {
|
||||
let candidate = value as ModuleJsonFormat;
|
||||
return Is.defined(candidate)
|
||||
&& Is.array(candidate.messages) && candidate.messages.every(message => Is.string(message))
|
||||
&& Is.array(candidate.keys) && candidate.keys.every(key => Is.string(key) || LocalizeInfo.is(key));
|
||||
}
|
||||
}
|
||||
|
||||
interface BundledExtensionHeaderFormat {
|
||||
id: string;
|
||||
type: string;
|
||||
hash: string;
|
||||
outDir: string;
|
||||
}
|
||||
|
||||
interface BundledExtensionFormat {
|
||||
[key: string]: {
|
||||
messages: string[];
|
||||
@@ -147,7 +168,7 @@ interface BundledExtensionFormat {
|
||||
export class Line {
|
||||
private buffer: string[] = [];
|
||||
|
||||
constructor(indent: number = 0) {
|
||||
constructor(private indent: number = 0) {
|
||||
if (indent > 0) {
|
||||
this.buffer.push(new Array(indent + 1).join(' '));
|
||||
}
|
||||
@@ -214,8 +235,8 @@ export class XLF {
|
||||
let existingKeys = new Set<string>();
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
let key = keys[i];
|
||||
let realKey: string | undefined;
|
||||
let comment: string | undefined;
|
||||
let realKey: string;
|
||||
let comment: string;
|
||||
if (Is.string(key)) {
|
||||
realKey = key;
|
||||
comment = undefined;
|
||||
@@ -265,17 +286,17 @@ export class XLF {
|
||||
}
|
||||
|
||||
static parsePseudo = function (xlfString: string): Promise<ParsedXLF[]> {
|
||||
return new Promise((resolve) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
let parser = new xml2js.Parser();
|
||||
let files: { messages: Map<string>, originalFilePath: string, language: string }[] = [];
|
||||
parser.parseString(xlfString, function (_err: any, result: any) {
|
||||
parser.parseString(xlfString, function (err, result) {
|
||||
const fileNodes: any[] = result['xliff']['file'];
|
||||
fileNodes.forEach(file => {
|
||||
const originalFilePath = file.$.original;
|
||||
const messages: Map<string> = {};
|
||||
const transUnits = file.body[0]['trans-unit'];
|
||||
if (transUnits) {
|
||||
transUnits.forEach((unit: any) => {
|
||||
transUnits.forEach(unit => {
|
||||
const key = unit.$.id;
|
||||
const val = pseudify(unit.source[0]['_'].toString());
|
||||
if (key && val) {
|
||||
@@ -296,7 +317,7 @@ export class XLF {
|
||||
|
||||
let files: { messages: Map<string>, originalFilePath: string, language: string }[] = [];
|
||||
|
||||
parser.parseString(xlfString, function (err: any, result: any) {
|
||||
parser.parseString(xlfString, function (err, result) {
|
||||
if (err) {
|
||||
reject(new Error(`XLF parsing error: Failed to parse XLIFF string. ${err}`));
|
||||
}
|
||||
@@ -319,20 +340,17 @@ export class XLF {
|
||||
|
||||
const transUnits = file.body[0]['trans-unit'];
|
||||
if (transUnits) {
|
||||
transUnits.forEach((unit: any) => {
|
||||
transUnits.forEach(unit => {
|
||||
const key = unit.$.id;
|
||||
if (!unit.target) {
|
||||
return; // No translation available
|
||||
}
|
||||
|
||||
let val = unit.target[0];
|
||||
if (typeof val !== 'string') {
|
||||
val = val._;
|
||||
}
|
||||
const val = unit.target.toString();
|
||||
if (key && val) {
|
||||
messages[key] = decodeEntities(val);
|
||||
} else {
|
||||
reject(new Error(`XLF parsing error: XLIFF file ${originalFilePath} does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.`));
|
||||
reject(new Error(`XLF parsing error: XLIFF file does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.`));
|
||||
}
|
||||
});
|
||||
files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() });
|
||||
@@ -351,7 +369,7 @@ export interface ITask<T> {
|
||||
|
||||
interface ILimitedTaskFactory<T> {
|
||||
factory: ITask<Promise<T>>;
|
||||
c: (value?: T | Promise<T>) => void;
|
||||
c: (value?: T | Thenable<T>) => void;
|
||||
e: (error?: any) => void;
|
||||
}
|
||||
|
||||
@@ -373,7 +391,7 @@ export class Limiter<T> {
|
||||
|
||||
private consume(): void {
|
||||
while (this.outstandingPromises.length && this.runningPromises < this.maxDegreeOfParalellism) {
|
||||
const iLimitedTask = this.outstandingPromises.shift()!;
|
||||
const iLimitedTask = this.outstandingPromises.shift();
|
||||
this.runningPromises++;
|
||||
|
||||
const promise = iLimitedTask.factory();
|
||||
@@ -401,8 +419,8 @@ function stripComments(content: string): string {
|
||||
* Third matches block comments
|
||||
* Fourth matches line comments
|
||||
*/
|
||||
const regexp = /("(?:[^\\\"]*(?:\\.)?)*")|('(?:[^\\\']*(?:\\.)?)*')|(\/\*(?:\r?\n|.)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g;
|
||||
let result = content.replace(regexp, (match, _m1, _m2, m3, m4) => {
|
||||
var regexp: RegExp = /("(?:[^\\\"]*(?:\\.)?)*")|('(?:[^\\\']*(?:\\.)?)*')|(\/\*(?:\r?\n|.)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g;
|
||||
let result = content.replace(regexp, (match, m1, m2, m3, m4) => {
|
||||
// Only one of m1, m2, m3, m4 matches
|
||||
if (m3) {
|
||||
// A block comment. Replace with nothing
|
||||
@@ -424,9 +442,9 @@ function stripComments(content: string): string {
|
||||
}
|
||||
|
||||
function escapeCharacters(value: string): string {
|
||||
const result: string[] = [];
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
const ch = value.charAt(i);
|
||||
var result: string[] = [];
|
||||
for (var i = 0; i < value.length; i++) {
|
||||
var ch = value.charAt(i);
|
||||
switch (ch) {
|
||||
case '\'':
|
||||
result.push('\\\'');
|
||||
@@ -466,6 +484,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
|
||||
|
||||
let statistics: Map<number> = Object.create(null);
|
||||
|
||||
let total: number = 0;
|
||||
let defaultMessages: Map<Map<string>> = Object.create(null);
|
||||
let modules = Object.keys(keysSection);
|
||||
modules.forEach((module) => {
|
||||
@@ -478,6 +497,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
|
||||
let messageMap: Map<string> = Object.create(null);
|
||||
defaultMessages[module] = messageMap;
|
||||
keys.map((key, i) => {
|
||||
total++;
|
||||
if (typeof key === 'string') {
|
||||
messageMap[key] = messages[i];
|
||||
} else {
|
||||
@@ -500,7 +520,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
|
||||
modules.forEach((module) => {
|
||||
let order = keysSection[module];
|
||||
let i18nFile = path.join(cwd, module) + '.i18n.json';
|
||||
let messages: Map<string> | null = null;
|
||||
let messages: Map<string> = null;
|
||||
if (fs.existsSync(i18nFile)) {
|
||||
let content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
|
||||
messages = JSON.parse(content);
|
||||
@@ -513,13 +533,13 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
|
||||
}
|
||||
let localizedMessages: string[] = [];
|
||||
order.forEach((keyInfo) => {
|
||||
let key: string | null = null;
|
||||
let key: string = null;
|
||||
if (typeof keyInfo === 'string') {
|
||||
key = keyInfo;
|
||||
} else {
|
||||
key = keyInfo.key;
|
||||
}
|
||||
let message: string = messages![key];
|
||||
let message: string = messages[key];
|
||||
if (!message) {
|
||||
if (process.env['VSCODE_BUILD_VERBOSE']) {
|
||||
log(`No localized message found for key ${key} in module ${module}. Using default message.`);
|
||||
@@ -804,8 +824,8 @@ export function createXlfFilesForIsl(): ThroughStream {
|
||||
}
|
||||
|
||||
export function pushXlfFiles(apiHostname: string, username: string, password: string): ThroughStream {
|
||||
let tryGetPromises: Array<Promise<boolean>> = [];
|
||||
let updateCreatePromises: Array<Promise<boolean>> = [];
|
||||
let tryGetPromises = [];
|
||||
let updateCreatePromises = [];
|
||||
|
||||
return through(function (this: ThroughStream, file: File) {
|
||||
const project = path.dirname(file.relative);
|
||||
@@ -870,7 +890,7 @@ function getAllResources(project: string, apiHostname: string, username: string,
|
||||
|
||||
export function findObsoleteResources(apiHostname: string, username: string, password: string): ThroughStream {
|
||||
let resourcesByProject: Map<string[]> = Object.create(null);
|
||||
resourcesByProject[extensionsProject] = ([] as any[]).concat(externalExtensionsWithTranslations); // clone
|
||||
resourcesByProject[extensionsProject] = [].concat(externalExtensionsWithTranslations); // clone
|
||||
|
||||
return through(function (this: ThroughStream, file: File) {
|
||||
const project = path.dirname(file.relative);
|
||||
@@ -887,7 +907,7 @@ export function findObsoleteResources(apiHostname: string, username: string, pas
|
||||
|
||||
const json = JSON.parse(fs.readFileSync('./build/lib/i18n.resources.json', 'utf8'));
|
||||
let i18Resources = [...json.editor, ...json.workbench].map((r: Resource) => r.project + '/' + r.name.replace(/\//g, '_'));
|
||||
let extractedResources: string[] = [];
|
||||
let extractedResources = [];
|
||||
for (let project of [workbenchProject, editorProject]) {
|
||||
for (let resource of resourcesByProject[project]) {
|
||||
if (resource !== 'setup_messages') {
|
||||
@@ -900,7 +920,7 @@ export function findObsoleteResources(apiHostname: string, username: string, pas
|
||||
console.log(`[i18n] Missing resources in file 'build/lib/i18n.resources.json': JSON.stringify(${extractedResources.filter(p => i18Resources.indexOf(p) === -1)})`);
|
||||
}
|
||||
|
||||
let promises: Array<Promise<void>> = [];
|
||||
let promises = [];
|
||||
for (let project in resourcesByProject) {
|
||||
promises.push(
|
||||
getAllResources(project, apiHostname, username, password).then(resources => {
|
||||
@@ -945,7 +965,7 @@ function tryGetResource(project: string, slug: string, apiHostname: string, cred
|
||||
}
|
||||
|
||||
function createResource(project: string, slug: string, xlfFile: File, apiHostname: string, credentials: any): Promise<any> {
|
||||
return new Promise((_resolve, reject) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const data = JSON.stringify({
|
||||
'content': xlfFile.contents.toString(),
|
||||
'name': slug,
|
||||
@@ -1036,8 +1056,8 @@ export function pullCoreAndExtensionsXlfFiles(apiHostname: string, username: str
|
||||
|
||||
// extensions
|
||||
let extensionsToLocalize = Object.create(null);
|
||||
glob.sync('./extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
||||
glob.sync('./extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
||||
glob.sync('./extensions/**/*.nls.json', ).forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
||||
glob.sync('./extensions/*/node_modules/vscode-nls', ).forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
||||
|
||||
Object.keys(extensionsToLocalize).forEach(extension => {
|
||||
_coreAndExtensionResources.push({ name: extension, project: extensionsProject });
|
||||
@@ -1065,7 +1085,7 @@ function pullXlfFiles(apiHostname: string, username: string, password: string, l
|
||||
let expectedTranslationsCount = resources.length;
|
||||
let translationsRetrieved = 0, called = false;
|
||||
|
||||
return readable(function (_count: any, callback: any) {
|
||||
return readable(function (count, callback) {
|
||||
// Mark end of stream when all resources were retrieved
|
||||
if (translationsRetrieved === expectedTranslationsCount) {
|
||||
return this.emit('end');
|
||||
@@ -1075,7 +1095,7 @@ function pullXlfFiles(apiHostname: string, username: string, password: string, l
|
||||
called = true;
|
||||
const stream = this;
|
||||
resources.map(function (resource) {
|
||||
retrieveResource(language, resource, apiHostname, credentials).then((file: File | null) => {
|
||||
retrieveResource(language, resource, apiHostname, credentials).then((file: File) => {
|
||||
if (file) {
|
||||
stream.emit('data', file);
|
||||
}
|
||||
@@ -1087,10 +1107,10 @@ function pullXlfFiles(apiHostname: string, username: string, password: string, l
|
||||
callback();
|
||||
});
|
||||
}
|
||||
const limiter = new Limiter<File | null>(NUMBER_OF_CONCURRENT_DOWNLOADS);
|
||||
const limiter = new Limiter<File>(NUMBER_OF_CONCURRENT_DOWNLOADS);
|
||||
|
||||
function retrieveResource(language: Language, resource: Resource, apiHostname: string, credentials: string): Promise<File | null> {
|
||||
return limiter.queue(() => new Promise<File | null>((resolve, reject) => {
|
||||
function retrieveResource(language: Language, resource: Resource, apiHostname, credentials): Promise<File> {
|
||||
return limiter.queue(() => new Promise<File>((resolve, reject) => {
|
||||
const slug = resource.name.replace(/\//g, '_');
|
||||
const project = resource.project;
|
||||
let transifexLanguageId = language.id === 'ps' ? 'en' : language.transifexId || language.id;
|
||||
@@ -1192,10 +1212,10 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
|
||||
let parsePromises: Promise<ParsedXLF[]>[] = [];
|
||||
let mainPack: I18nPack = { version: i18nPackVersion, contents: {} };
|
||||
let extensionsPacks: Map<I18nPack> = {};
|
||||
let errors: any[] = [];
|
||||
return through(function (this: ThroughStream, xlf: File) {
|
||||
let project = path.dirname(xlf.relative);
|
||||
let resource = path.basename(xlf.relative, '.xlf');
|
||||
let stream = this;
|
||||
let project = path.dirname(xlf.path);
|
||||
let resource = path.basename(xlf.path, '.xlf');
|
||||
let contents = xlf.contents.toString();
|
||||
let parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
|
||||
parsePromises.push(parsePromise);
|
||||
@@ -1222,15 +1242,10 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
|
||||
}
|
||||
});
|
||||
}
|
||||
).catch(reason => {
|
||||
errors.push(reason);
|
||||
});
|
||||
);
|
||||
}, function () {
|
||||
Promise.all(parsePromises)
|
||||
.then(() => {
|
||||
if (errors.length > 0) {
|
||||
throw errors;
|
||||
}
|
||||
const translatedMainFile = createI18nFile('./main', mainPack);
|
||||
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
|
||||
|
||||
@@ -1249,9 +1264,7 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
|
||||
}
|
||||
this.queue(null);
|
||||
})
|
||||
.catch((reason) => {
|
||||
this.emit('error', reason);
|
||||
});
|
||||
.catch(reason => { throw new Error(reason); });
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1272,15 +1285,11 @@ export function prepareIslFiles(language: Language, innoSetupConfig: InnoSetup):
|
||||
stream.queue(translatedFile);
|
||||
});
|
||||
}
|
||||
).catch(reason => {
|
||||
this.emit('error', reason);
|
||||
});
|
||||
);
|
||||
}, function () {
|
||||
Promise.all(parsePromises)
|
||||
.then(() => { this.queue(null); })
|
||||
.catch(reason => {
|
||||
this.emit('error', reason);
|
||||
});
|
||||
.catch(reason => { throw new Error(reason); });
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1297,7 +1306,7 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
|
||||
let firstChar = line.charAt(0);
|
||||
if (firstChar === '[' || firstChar === ';') {
|
||||
if (line === '; *** Inno Setup version 5.5.3+ English messages ***') {
|
||||
content.push(`; *** Inno Setup version 5.5.3+ ${innoSetup.defaultInfo!.name} messages ***`);
|
||||
content.push(`; *** Inno Setup version 5.5.3+ ${innoSetup.defaultInfo.name} messages ***`);
|
||||
} else {
|
||||
content.push(line);
|
||||
}
|
||||
@@ -1307,9 +1316,9 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
|
||||
let translated = line;
|
||||
if (key) {
|
||||
if (key === 'LanguageName') {
|
||||
translated = `${key}=${innoSetup.defaultInfo!.name}`;
|
||||
translated = `${key}=${innoSetup.defaultInfo.name}`;
|
||||
} else if (key === 'LanguageID') {
|
||||
translated = `${key}=${innoSetup.defaultInfo!.id}`;
|
||||
translated = `${key}=${innoSetup.defaultInfo.id}`;
|
||||
} else if (key === 'LanguageCodePage') {
|
||||
translated = `${key}=${innoSetup.codePage.substr(2)}`;
|
||||
} else {
|
||||
@@ -1330,14 +1339,14 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
|
||||
|
||||
return new File({
|
||||
path: filePath,
|
||||
contents: iconv.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage)
|
||||
contents: iconv.encode(Buffer.from(content.join('\r\n'), 'utf8'), innoSetup.codePage)
|
||||
});
|
||||
}
|
||||
|
||||
function encodeEntities(value: string): string {
|
||||
let result: string[] = [];
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
let ch = value[i];
|
||||
var result: string[] = [];
|
||||
for (var i = 0; i < value.length; i++) {
|
||||
var ch = value[i];
|
||||
switch (ch) {
|
||||
case '<':
|
||||
result.push('<');
|
||||
|
||||
304
build/lib/nls.js
304
build/lib/nls.js
@@ -3,12 +3,13 @@
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const ts = require("typescript");
|
||||
const lazy = require("lazy.js");
|
||||
const event_stream_1 = require("event-stream");
|
||||
const File = require("vinyl");
|
||||
const sm = require("source-map");
|
||||
const path = require("path");
|
||||
var ts = require("typescript");
|
||||
var lazy = require("lazy.js");
|
||||
var event_stream_1 = require("event-stream");
|
||||
var File = require("vinyl");
|
||||
var sm = require("source-map");
|
||||
var assign = require("object-assign");
|
||||
var path = require("path");
|
||||
var CollectStepResult;
|
||||
(function (CollectStepResult) {
|
||||
CollectStepResult[CollectStepResult["Yes"] = 0] = "Yes";
|
||||
@@ -17,9 +18,9 @@ var CollectStepResult;
|
||||
CollectStepResult[CollectStepResult["NoAndRecurse"] = 3] = "NoAndRecurse";
|
||||
})(CollectStepResult || (CollectStepResult = {}));
|
||||
function collect(node, fn) {
|
||||
const result = [];
|
||||
var result = [];
|
||||
function loop(node) {
|
||||
const stepResult = fn(node);
|
||||
var stepResult = fn(node);
|
||||
if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) {
|
||||
result.push(node);
|
||||
}
|
||||
@@ -31,45 +32,43 @@ function collect(node, fn) {
|
||||
return result;
|
||||
}
|
||||
function clone(object) {
|
||||
const result = {};
|
||||
for (const id in object) {
|
||||
var result = {};
|
||||
for (var id in object) {
|
||||
result[id] = object[id];
|
||||
}
|
||||
return result;
|
||||
}
|
||||
function template(lines) {
|
||||
let indent = '', wrap = '';
|
||||
var indent = '', wrap = '';
|
||||
if (lines.length > 1) {
|
||||
indent = '\t';
|
||||
wrap = '\n';
|
||||
}
|
||||
return `/*---------------------------------------------------------
|
||||
* Copyright (C) Microsoft Corporation. All rights reserved.
|
||||
*--------------------------------------------------------*/
|
||||
define([], [${wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
|
||||
return "/*---------------------------------------------------------\n * Copyright (C) Microsoft Corporation. All rights reserved.\n *--------------------------------------------------------*/\ndefine([], [" + (wrap + lines.map(function (l) { return indent + l; }).join(',\n') + wrap) + "]);";
|
||||
}
|
||||
/**
|
||||
* Returns a stream containing the patched JavaScript and source maps.
|
||||
*/
|
||||
function nls() {
|
||||
const input = event_stream_1.through();
|
||||
const output = input.pipe(event_stream_1.through(function (f) {
|
||||
var input = event_stream_1.through();
|
||||
var output = input.pipe(event_stream_1.through(function (f) {
|
||||
var _this = this;
|
||||
if (!f.sourceMap) {
|
||||
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`));
|
||||
return this.emit('error', new Error("File " + f.relative + " does not have sourcemaps."));
|
||||
}
|
||||
let source = f.sourceMap.sources[0];
|
||||
var source = f.sourceMap.sources[0];
|
||||
if (!source) {
|
||||
return this.emit('error', new Error(`File ${f.relative} does not have a source in the source map.`));
|
||||
return this.emit('error', new Error("File " + f.relative + " does not have a source in the source map."));
|
||||
}
|
||||
const root = f.sourceMap.sourceRoot;
|
||||
var root = f.sourceMap.sourceRoot;
|
||||
if (root) {
|
||||
source = path.join(root, source);
|
||||
}
|
||||
const typescript = f.sourceMap.sourcesContent[0];
|
||||
var typescript = f.sourceMap.sourcesContent[0];
|
||||
if (!typescript) {
|
||||
return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`));
|
||||
return this.emit('error', new Error("File " + f.relative + " does not have the original content in the source map."));
|
||||
}
|
||||
nls.patchFiles(f, typescript).forEach(f => this.emit('data', f));
|
||||
nls.patchFiles(f, typescript).forEach(function (f) { return _this.emit('data', f); });
|
||||
}));
|
||||
return event_stream_1.duplex(input, output);
|
||||
}
|
||||
@@ -77,7 +76,8 @@ function isImportNode(node) {
|
||||
return node.kind === ts.SyntaxKind.ImportDeclaration || node.kind === ts.SyntaxKind.ImportEqualsDeclaration;
|
||||
}
|
||||
(function (nls_1) {
|
||||
function fileFrom(file, contents, path = file.path) {
|
||||
function fileFrom(file, contents, path) {
|
||||
if (path === void 0) { path = file.path; }
|
||||
return new File({
|
||||
contents: Buffer.from(contents),
|
||||
base: file.base,
|
||||
@@ -87,27 +87,29 @@ function isImportNode(node) {
|
||||
}
|
||||
nls_1.fileFrom = fileFrom;
|
||||
function mappedPositionFrom(source, lc) {
|
||||
return { source, line: lc.line + 1, column: lc.character };
|
||||
return { source: source, line: lc.line + 1, column: lc.character };
|
||||
}
|
||||
nls_1.mappedPositionFrom = mappedPositionFrom;
|
||||
function lcFrom(position) {
|
||||
return { line: position.line - 1, character: position.column };
|
||||
}
|
||||
nls_1.lcFrom = lcFrom;
|
||||
class SingleFileServiceHost {
|
||||
constructor(options, filename, contents) {
|
||||
var SingleFileServiceHost = /** @class */ (function () {
|
||||
function SingleFileServiceHost(options, filename, contents) {
|
||||
var _this = this;
|
||||
this.options = options;
|
||||
this.filename = filename;
|
||||
this.getCompilationSettings = () => this.options;
|
||||
this.getScriptFileNames = () => [this.filename];
|
||||
this.getScriptVersion = () => '1';
|
||||
this.getScriptSnapshot = (name) => name === this.filename ? this.file : this.lib;
|
||||
this.getCurrentDirectory = () => '';
|
||||
this.getDefaultLibFileName = () => 'lib.d.ts';
|
||||
this.getCompilationSettings = function () { return _this.options; };
|
||||
this.getScriptFileNames = function () { return [_this.filename]; };
|
||||
this.getScriptVersion = function () { return '1'; };
|
||||
this.getScriptSnapshot = function (name) { return name === _this.filename ? _this.file : _this.lib; };
|
||||
this.getCurrentDirectory = function () { return ''; };
|
||||
this.getDefaultLibFileName = function () { return 'lib.d.ts'; };
|
||||
this.file = ts.ScriptSnapshot.fromString(contents);
|
||||
this.lib = ts.ScriptSnapshot.fromString('');
|
||||
}
|
||||
}
|
||||
return SingleFileServiceHost;
|
||||
}());
|
||||
nls_1.SingleFileServiceHost = SingleFileServiceHost;
|
||||
function isCallExpressionWithinTextSpanCollectStep(textSpan, node) {
|
||||
if (!ts.textSpanContainsTextSpan({ start: node.pos, length: node.end - node.pos }, textSpan)) {
|
||||
@@ -115,96 +117,97 @@ function isImportNode(node) {
|
||||
}
|
||||
return node.kind === ts.SyntaxKind.CallExpression ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse;
|
||||
}
|
||||
function analyze(contents, options = {}) {
|
||||
const filename = 'file.ts';
|
||||
const serviceHost = new SingleFileServiceHost(Object.assign(clone(options), { noResolve: true }), filename, contents);
|
||||
const service = ts.createLanguageService(serviceHost);
|
||||
const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
|
||||
function analyze(contents, options) {
|
||||
if (options === void 0) { options = {}; }
|
||||
var filename = 'file.ts';
|
||||
var serviceHost = new SingleFileServiceHost(assign(clone(options), { noResolve: true }), filename, contents);
|
||||
var service = ts.createLanguageService(serviceHost);
|
||||
var sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
|
||||
// all imports
|
||||
const imports = lazy(collect(sourceFile, n => isImportNode(n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse));
|
||||
var imports = lazy(collect(sourceFile, function (n) { return isImportNode(n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse; }));
|
||||
// import nls = require('vs/nls');
|
||||
const importEqualsDeclarations = imports
|
||||
.filter(n => n.kind === ts.SyntaxKind.ImportEqualsDeclaration)
|
||||
.map(n => n)
|
||||
.filter(d => d.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference)
|
||||
.filter(d => d.moduleReference.expression.getText() === '\'vs/nls\'');
|
||||
var importEqualsDeclarations = imports
|
||||
.filter(function (n) { return n.kind === ts.SyntaxKind.ImportEqualsDeclaration; })
|
||||
.map(function (n) { return n; })
|
||||
.filter(function (d) { return d.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference; })
|
||||
.filter(function (d) { return d.moduleReference.expression.getText() === '\'vs/nls\''; });
|
||||
// import ... from 'vs/nls';
|
||||
const importDeclarations = imports
|
||||
.filter(n => n.kind === ts.SyntaxKind.ImportDeclaration)
|
||||
.map(n => n)
|
||||
.filter(d => d.moduleSpecifier.kind === ts.SyntaxKind.StringLiteral)
|
||||
.filter(d => d.moduleSpecifier.getText() === '\'vs/nls\'')
|
||||
.filter(d => !!d.importClause && !!d.importClause.namedBindings);
|
||||
const nlsExpressions = importEqualsDeclarations
|
||||
.map(d => d.moduleReference.expression)
|
||||
.concat(importDeclarations.map(d => d.moduleSpecifier))
|
||||
.map(d => ({
|
||||
var importDeclarations = imports
|
||||
.filter(function (n) { return n.kind === ts.SyntaxKind.ImportDeclaration; })
|
||||
.map(function (n) { return n; })
|
||||
.filter(function (d) { return d.moduleSpecifier.kind === ts.SyntaxKind.StringLiteral; })
|
||||
.filter(function (d) { return d.moduleSpecifier.getText() === '\'vs/nls\''; })
|
||||
.filter(function (d) { return !!d.importClause && !!d.importClause.namedBindings; });
|
||||
var nlsExpressions = importEqualsDeclarations
|
||||
.map(function (d) { return d.moduleReference.expression; })
|
||||
.concat(importDeclarations.map(function (d) { return d.moduleSpecifier; }))
|
||||
.map(function (d) { return ({
|
||||
start: ts.getLineAndCharacterOfPosition(sourceFile, d.getStart()),
|
||||
end: ts.getLineAndCharacterOfPosition(sourceFile, d.getEnd())
|
||||
}));
|
||||
}); });
|
||||
// `nls.localize(...)` calls
|
||||
const nlsLocalizeCallExpressions = importDeclarations
|
||||
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport))
|
||||
.map(d => d.importClause.namedBindings.name)
|
||||
.concat(importEqualsDeclarations.map(d => d.name))
|
||||
var nlsLocalizeCallExpressions = importDeclarations
|
||||
.filter(function (d) { return d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport; })
|
||||
.map(function (d) { return d.importClause.namedBindings.name; })
|
||||
.concat(importEqualsDeclarations.map(function (d) { return d.name; }))
|
||||
// find read-only references to `nls`
|
||||
.map(n => service.getReferencesAtPosition(filename, n.pos + 1))
|
||||
.map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
|
||||
.flatten()
|
||||
.filter(r => !r.isWriteAccess)
|
||||
.filter(function (r) { return !r.isWriteAccess; })
|
||||
// find the deepest call expressions AST nodes that contain those references
|
||||
.map(r => collect(sourceFile, n => isCallExpressionWithinTextSpanCollectStep(r.textSpan, n)))
|
||||
.map(a => lazy(a).last())
|
||||
.filter(n => !!n)
|
||||
.map(n => n)
|
||||
.map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
|
||||
.map(function (a) { return lazy(a).last(); })
|
||||
.filter(function (n) { return !!n; })
|
||||
.map(function (n) { return n; })
|
||||
// only `localize` calls
|
||||
.filter(n => n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && n.expression.name.getText() === 'localize');
|
||||
.filter(function (n) { return n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && n.expression.name.getText() === 'localize'; });
|
||||
// `localize` named imports
|
||||
const allLocalizeImportDeclarations = importDeclarations
|
||||
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports))
|
||||
.map(d => [].concat(d.importClause.namedBindings.elements))
|
||||
var allLocalizeImportDeclarations = importDeclarations
|
||||
.filter(function (d) { return d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports; })
|
||||
.map(function (d) { return [].concat(d.importClause.namedBindings.elements); })
|
||||
.flatten();
|
||||
// `localize` read-only references
|
||||
const localizeReferences = allLocalizeImportDeclarations
|
||||
.filter(d => d.name.getText() === 'localize')
|
||||
.map(n => service.getReferencesAtPosition(filename, n.pos + 1))
|
||||
var localizeReferences = allLocalizeImportDeclarations
|
||||
.filter(function (d) { return d.name.getText() === 'localize'; })
|
||||
.map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
|
||||
.flatten()
|
||||
.filter(r => !r.isWriteAccess);
|
||||
.filter(function (r) { return !r.isWriteAccess; });
|
||||
// custom named `localize` read-only references
|
||||
const namedLocalizeReferences = allLocalizeImportDeclarations
|
||||
.filter(d => d.propertyName && d.propertyName.getText() === 'localize')
|
||||
.map(n => service.getReferencesAtPosition(filename, n.name.pos + 1))
|
||||
var namedLocalizeReferences = allLocalizeImportDeclarations
|
||||
.filter(function (d) { return d.propertyName && d.propertyName.getText() === 'localize'; })
|
||||
.map(function (n) { return service.getReferencesAtPosition(filename, n.name.pos + 1); })
|
||||
.flatten()
|
||||
.filter(r => !r.isWriteAccess);
|
||||
.filter(function (r) { return !r.isWriteAccess; });
|
||||
// find the deepest call expressions AST nodes that contain those references
|
||||
const localizeCallExpressions = localizeReferences
|
||||
var localizeCallExpressions = localizeReferences
|
||||
.concat(namedLocalizeReferences)
|
||||
.map(r => collect(sourceFile, n => isCallExpressionWithinTextSpanCollectStep(r.textSpan, n)))
|
||||
.map(a => lazy(a).last())
|
||||
.filter(n => !!n)
|
||||
.map(n => n);
|
||||
.map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
|
||||
.map(function (a) { return lazy(a).last(); })
|
||||
.filter(function (n) { return !!n; })
|
||||
.map(function (n) { return n; });
|
||||
// collect everything
|
||||
const localizeCalls = nlsLocalizeCallExpressions
|
||||
var localizeCalls = nlsLocalizeCallExpressions
|
||||
.concat(localizeCallExpressions)
|
||||
.map(e => e.arguments)
|
||||
.filter(a => a.length > 1)
|
||||
.sort((a, b) => a[0].getStart() - b[0].getStart())
|
||||
.map(a => ({
|
||||
.map(function (e) { return e.arguments; })
|
||||
.filter(function (a) { return a.length > 1; })
|
||||
.sort(function (a, b) { return a[0].getStart() - b[0].getStart(); })
|
||||
.map(function (a) { return ({
|
||||
keySpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getEnd()) },
|
||||
key: a[0].getText(),
|
||||
valueSpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getEnd()) },
|
||||
value: a[1].getText()
|
||||
}));
|
||||
}); });
|
||||
return {
|
||||
localizeCalls: localizeCalls.toArray(),
|
||||
nlsExpressions: nlsExpressions.toArray()
|
||||
};
|
||||
}
|
||||
nls_1.analyze = analyze;
|
||||
class TextModel {
|
||||
constructor(contents) {
|
||||
const regex = /\r\n|\r|\n/g;
|
||||
let index = 0;
|
||||
let match;
|
||||
var TextModel = /** @class */ (function () {
|
||||
function TextModel(contents) {
|
||||
var regex = /\r\n|\r|\n/g;
|
||||
var index = 0;
|
||||
var match;
|
||||
this.lines = [];
|
||||
this.lineEndings = [];
|
||||
while (match = regex.exec(contents)) {
|
||||
@@ -217,80 +220,85 @@ function isImportNode(node) {
|
||||
this.lineEndings.push('');
|
||||
}
|
||||
}
|
||||
get(index) {
|
||||
TextModel.prototype.get = function (index) {
|
||||
return this.lines[index];
|
||||
}
|
||||
set(index, line) {
|
||||
};
|
||||
TextModel.prototype.set = function (index, line) {
|
||||
this.lines[index] = line;
|
||||
}
|
||||
get lineCount() {
|
||||
return this.lines.length;
|
||||
}
|
||||
};
|
||||
Object.defineProperty(TextModel.prototype, "lineCount", {
|
||||
get: function () {
|
||||
return this.lines.length;
|
||||
},
|
||||
enumerable: true,
|
||||
configurable: true
|
||||
});
|
||||
/**
|
||||
* Applies patch(es) to the model.
|
||||
* Multiple patches must be ordered.
|
||||
* Does not support patches spanning multiple lines.
|
||||
*/
|
||||
apply(patch) {
|
||||
const startLineNumber = patch.span.start.line;
|
||||
const endLineNumber = patch.span.end.line;
|
||||
const startLine = this.lines[startLineNumber] || '';
|
||||
const endLine = this.lines[endLineNumber] || '';
|
||||
TextModel.prototype.apply = function (patch) {
|
||||
var startLineNumber = patch.span.start.line;
|
||||
var endLineNumber = patch.span.end.line;
|
||||
var startLine = this.lines[startLineNumber] || '';
|
||||
var endLine = this.lines[endLineNumber] || '';
|
||||
this.lines[startLineNumber] = [
|
||||
startLine.substring(0, patch.span.start.character),
|
||||
patch.content,
|
||||
endLine.substring(patch.span.end.character)
|
||||
].join('');
|
||||
for (let i = startLineNumber + 1; i <= endLineNumber; i++) {
|
||||
for (var i = startLineNumber + 1; i <= endLineNumber; i++) {
|
||||
this.lines[i] = '';
|
||||
}
|
||||
}
|
||||
toString() {
|
||||
};
|
||||
TextModel.prototype.toString = function () {
|
||||
return lazy(this.lines).zip(this.lineEndings)
|
||||
.flatten().toArray().join('');
|
||||
}
|
||||
}
|
||||
};
|
||||
return TextModel;
|
||||
}());
|
||||
nls_1.TextModel = TextModel;
|
||||
function patchJavascript(patches, contents, moduleId) {
|
||||
const model = new nls.TextModel(contents);
|
||||
var model = new nls.TextModel(contents);
|
||||
// patch the localize calls
|
||||
lazy(patches).reverse().each(p => model.apply(p));
|
||||
lazy(patches).reverse().each(function (p) { return model.apply(p); });
|
||||
// patch the 'vs/nls' imports
|
||||
const firstLine = model.get(0);
|
||||
const patchedFirstLine = firstLine.replace(/(['"])vs\/nls\1/g, `$1vs/nls!${moduleId}$1`);
|
||||
var firstLine = model.get(0);
|
||||
var patchedFirstLine = firstLine.replace(/(['"])vs\/nls\1/g, "$1vs/nls!" + moduleId + "$1");
|
||||
model.set(0, patchedFirstLine);
|
||||
return model.toString();
|
||||
}
|
||||
nls_1.patchJavascript = patchJavascript;
|
||||
function patchSourcemap(patches, rsm, smc) {
|
||||
const smg = new sm.SourceMapGenerator({
|
||||
var smg = new sm.SourceMapGenerator({
|
||||
file: rsm.file,
|
||||
sourceRoot: rsm.sourceRoot
|
||||
});
|
||||
patches = patches.reverse();
|
||||
let currentLine = -1;
|
||||
let currentLineDiff = 0;
|
||||
let source = null;
|
||||
smc.eachMapping(m => {
|
||||
const patch = patches[patches.length - 1];
|
||||
const original = { line: m.originalLine, column: m.originalColumn };
|
||||
const generated = { line: m.generatedLine, column: m.generatedColumn };
|
||||
var currentLine = -1;
|
||||
var currentLineDiff = 0;
|
||||
var source = null;
|
||||
smc.eachMapping(function (m) {
|
||||
var patch = patches[patches.length - 1];
|
||||
var original = { line: m.originalLine, column: m.originalColumn };
|
||||
var generated = { line: m.generatedLine, column: m.generatedColumn };
|
||||
if (currentLine !== generated.line) {
|
||||
currentLineDiff = 0;
|
||||
}
|
||||
currentLine = generated.line;
|
||||
generated.column += currentLineDiff;
|
||||
if (patch && m.generatedLine - 1 === patch.span.end.line && m.generatedColumn === patch.span.end.character) {
|
||||
const originalLength = patch.span.end.character - patch.span.start.character;
|
||||
const modifiedLength = patch.content.length;
|
||||
const lengthDiff = modifiedLength - originalLength;
|
||||
var originalLength = patch.span.end.character - patch.span.start.character;
|
||||
var modifiedLength = patch.content.length;
|
||||
var lengthDiff = modifiedLength - originalLength;
|
||||
currentLineDiff += lengthDiff;
|
||||
generated.column += lengthDiff;
|
||||
patches.pop();
|
||||
}
|
||||
source = rsm.sourceRoot ? path.relative(rsm.sourceRoot, m.source) : m.source;
|
||||
source = source.replace(/\\/g, '/');
|
||||
smg.addMapping({ source, name: m.name, original, generated });
|
||||
smg.addMapping({ source: source, name: m.name, original: original, generated: generated });
|
||||
}, null, sm.SourceMapConsumer.GENERATED_ORDER);
|
||||
if (source) {
|
||||
smg.setSourceContent(source, smc.sourceContentFor(source));
|
||||
@@ -299,47 +307,47 @@ function isImportNode(node) {
|
||||
}
|
||||
nls_1.patchSourcemap = patchSourcemap;
|
||||
function patch(moduleId, typescript, javascript, sourcemap) {
|
||||
const { localizeCalls, nlsExpressions } = analyze(typescript);
|
||||
var _a = analyze(typescript), localizeCalls = _a.localizeCalls, nlsExpressions = _a.nlsExpressions;
|
||||
if (localizeCalls.length === 0) {
|
||||
return { javascript, sourcemap };
|
||||
return { javascript: javascript, sourcemap: sourcemap };
|
||||
}
|
||||
const nlsKeys = template(localizeCalls.map(lc => lc.key));
|
||||
const nls = template(localizeCalls.map(lc => lc.value));
|
||||
const smc = new sm.SourceMapConsumer(sourcemap);
|
||||
const positionFrom = mappedPositionFrom.bind(null, sourcemap.sources[0]);
|
||||
let i = 0;
|
||||
var nlsKeys = template(localizeCalls.map(function (lc) { return lc.key; }));
|
||||
var nls = template(localizeCalls.map(function (lc) { return lc.value; }));
|
||||
var smc = new sm.SourceMapConsumer(sourcemap);
|
||||
var positionFrom = mappedPositionFrom.bind(null, sourcemap.sources[0]);
|
||||
var i = 0;
|
||||
// build patches
|
||||
const patches = lazy(localizeCalls)
|
||||
.map(lc => ([
|
||||
var patches = lazy(localizeCalls)
|
||||
.map(function (lc) { return ([
|
||||
{ range: lc.keySpan, content: '' + (i++) },
|
||||
{ range: lc.valueSpan, content: 'null' }
|
||||
]))
|
||||
]); })
|
||||
.flatten()
|
||||
.map(c => {
|
||||
const start = lcFrom(smc.generatedPositionFor(positionFrom(c.range.start)));
|
||||
const end = lcFrom(smc.generatedPositionFor(positionFrom(c.range.end)));
|
||||
return { span: { start, end }, content: c.content };
|
||||
.map(function (c) {
|
||||
var start = lcFrom(smc.generatedPositionFor(positionFrom(c.range.start)));
|
||||
var end = lcFrom(smc.generatedPositionFor(positionFrom(c.range.end)));
|
||||
return { span: { start: start, end: end }, content: c.content };
|
||||
})
|
||||
.toArray();
|
||||
javascript = patchJavascript(patches, javascript, moduleId);
|
||||
// since imports are not within the sourcemap information,
|
||||
// we must do this MacGyver style
|
||||
if (nlsExpressions.length) {
|
||||
javascript = javascript.replace(/^define\(.*$/m, line => {
|
||||
return line.replace(/(['"])vs\/nls\1/g, `$1vs/nls!${moduleId}$1`);
|
||||
javascript = javascript.replace(/^define\(.*$/m, function (line) {
|
||||
return line.replace(/(['"])vs\/nls\1/g, "$1vs/nls!" + moduleId + "$1");
|
||||
});
|
||||
}
|
||||
sourcemap = patchSourcemap(patches, sourcemap, smc);
|
||||
return { javascript, sourcemap, nlsKeys, nls };
|
||||
return { javascript: javascript, sourcemap: sourcemap, nlsKeys: nlsKeys, nls: nls };
|
||||
}
|
||||
nls_1.patch = patch;
|
||||
function patchFiles(javascriptFile, typescript) {
|
||||
// hack?
|
||||
const moduleId = javascriptFile.relative
|
||||
var moduleId = javascriptFile.relative
|
||||
.replace(/\.js$/, '')
|
||||
.replace(/\\/g, '/');
|
||||
const { javascript, sourcemap, nlsKeys, nls } = patch(moduleId, typescript, javascriptFile.contents.toString(), javascriptFile.sourceMap);
|
||||
const result = [fileFrom(javascriptFile, javascript)];
|
||||
var _a = patch(moduleId, typescript, javascriptFile.contents.toString(), javascriptFile.sourceMap), javascript = _a.javascript, sourcemap = _a.sourcemap, nlsKeys = _a.nlsKeys, nls = _a.nls;
|
||||
var result = [fileFrom(javascriptFile, javascript)];
|
||||
result[0].sourceMap = sourcemap;
|
||||
if (nlsKeys) {
|
||||
result.push(fileFrom(javascriptFile, nlsKeys, javascriptFile.path.replace(/\.js$/, '.nls.keys.js')));
|
||||
|
||||
@@ -6,9 +6,10 @@
|
||||
import * as ts from 'typescript';
|
||||
import * as lazy from 'lazy.js';
|
||||
import { duplex, through } from 'event-stream';
|
||||
import * as File from 'vinyl';
|
||||
import File = require('vinyl');
|
||||
import * as sm from 'source-map';
|
||||
import * as path from 'path';
|
||||
import assign = require('object-assign');
|
||||
import path = require('path');
|
||||
|
||||
declare class FileSourceMap extends File {
|
||||
public sourceMap: sm.RawSourceMap;
|
||||
@@ -25,7 +26,7 @@ function collect(node: ts.Node, fn: (node: ts.Node) => CollectStepResult): ts.No
|
||||
const result: ts.Node[] = [];
|
||||
|
||||
function loop(node: ts.Node) {
|
||||
const stepResult = fn(node);
|
||||
var stepResult = fn(node);
|
||||
|
||||
if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) {
|
||||
result.push(node);
|
||||
@@ -41,8 +42,8 @@ function collect(node: ts.Node, fn: (node: ts.Node) => CollectStepResult): ts.No
|
||||
}
|
||||
|
||||
function clone<T>(object: T): T {
|
||||
const result = <T>{};
|
||||
for (const id in object) {
|
||||
var result = <T>{};
|
||||
for (var id in object) {
|
||||
result[id] = object[id];
|
||||
}
|
||||
return result;
|
||||
@@ -66,8 +67,8 @@ define([], [${ wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
|
||||
* Returns a stream containing the patched JavaScript and source maps.
|
||||
*/
|
||||
function nls(): NodeJS.ReadWriteStream {
|
||||
const input = through();
|
||||
const output = input.pipe(through(function (f: FileSourceMap) {
|
||||
var input = through();
|
||||
var output = input.pipe(through(function (f: FileSourceMap) {
|
||||
if (!f.sourceMap) {
|
||||
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`));
|
||||
}
|
||||
@@ -82,7 +83,7 @@ function nls(): NodeJS.ReadWriteStream {
|
||||
source = path.join(root, source);
|
||||
}
|
||||
|
||||
const typescript = f.sourceMap.sourcesContent![0];
|
||||
const typescript = f.sourceMap.sourcesContent[0];
|
||||
if (!typescript) {
|
||||
return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`));
|
||||
}
|
||||
@@ -173,7 +174,7 @@ module nls {
|
||||
|
||||
export function analyze(contents: string, options: ts.CompilerOptions = {}): ILocalizeAnalysisResult {
|
||||
const filename = 'file.ts';
|
||||
const serviceHost = new SingleFileServiceHost(Object.assign(clone(options), { noResolve: true }), filename, contents);
|
||||
const serviceHost = new SingleFileServiceHost(assign(clone(options), { noResolve: true }), filename, contents);
|
||||
const service = ts.createLanguageService(serviceHost);
|
||||
const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
|
||||
|
||||
@@ -205,8 +206,8 @@ module nls {
|
||||
|
||||
// `nls.localize(...)` calls
|
||||
const nlsLocalizeCallExpressions = importDeclarations
|
||||
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport))
|
||||
.map(d => (<ts.NamespaceImport>d.importClause!.namedBindings).name)
|
||||
.filter(d => d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport)
|
||||
.map(d => (<ts.NamespaceImport>d.importClause.namedBindings).name)
|
||||
.concat(importEqualsDeclarations.map(d => d.name))
|
||||
|
||||
// find read-only references to `nls`
|
||||
@@ -225,8 +226,8 @@ module nls {
|
||||
|
||||
// `localize` named imports
|
||||
const allLocalizeImportDeclarations = importDeclarations
|
||||
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports))
|
||||
.map(d => ([] as any[]).concat((<ts.NamedImports>d.importClause!.namedBindings!).elements))
|
||||
.filter(d => d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports)
|
||||
.map(d => [].concat((<ts.NamedImports>d.importClause.namedBindings).elements))
|
||||
.flatten();
|
||||
|
||||
// `localize` read-only references
|
||||
@@ -278,7 +279,7 @@ module nls {
|
||||
constructor(contents: string) {
|
||||
const regex = /\r\n|\r|\n/g;
|
||||
let index = 0;
|
||||
let match: RegExpExecArray | null;
|
||||
let match: RegExpExecArray;
|
||||
|
||||
this.lines = [];
|
||||
this.lineEndings = [];
|
||||
@@ -359,7 +360,7 @@ module nls {
|
||||
patches = patches.reverse();
|
||||
let currentLine = -1;
|
||||
let currentLineDiff = 0;
|
||||
let source: string | null = null;
|
||||
let source = null;
|
||||
|
||||
smc.eachMapping(m => {
|
||||
const patch = patches[patches.length - 1];
|
||||
|
||||
@@ -4,31 +4,29 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const es = require("event-stream");
|
||||
const gulp = require("gulp");
|
||||
const concat = require("gulp-concat");
|
||||
const minifyCSS = require("gulp-cssnano");
|
||||
const filter = require("gulp-filter");
|
||||
const flatmap = require("gulp-flatmap");
|
||||
const sourcemaps = require("gulp-sourcemaps");
|
||||
const uglify = require("gulp-uglify");
|
||||
const composer = require("gulp-uglify/composer");
|
||||
const gulpUtil = require("gulp-util");
|
||||
const path = require("path");
|
||||
const pump = require("pump");
|
||||
const uglifyes = require("uglify-es");
|
||||
const VinylFile = require("vinyl");
|
||||
const bundle = require("./bundle");
|
||||
const i18n_1 = require("./i18n");
|
||||
const stats_1 = require("./stats");
|
||||
const util = require("./util");
|
||||
const REPO_ROOT_PATH = path.join(__dirname, '../..');
|
||||
var path = require("path");
|
||||
var gulp = require("gulp");
|
||||
var sourcemaps = require("gulp-sourcemaps");
|
||||
var filter = require("gulp-filter");
|
||||
var minifyCSS = require("gulp-cssnano");
|
||||
var uglify = require("gulp-uglify");
|
||||
var composer = require("gulp-uglify/composer");
|
||||
var uglifyes = require("uglify-es");
|
||||
var es = require("event-stream");
|
||||
var concat = require("gulp-concat");
|
||||
var VinylFile = require("vinyl");
|
||||
var bundle = require("./bundle");
|
||||
var util = require("./util");
|
||||
var gulpUtil = require("gulp-util");
|
||||
var flatmap = require("gulp-flatmap");
|
||||
var pump = require("pump");
|
||||
var REPO_ROOT_PATH = path.join(__dirname, '../..');
|
||||
function log(prefix, message) {
|
||||
gulpUtil.log(gulpUtil.colors.cyan('[' + prefix + ']'), message);
|
||||
}
|
||||
// {{SQL CARBON EDIT}}
|
||||
function loaderConfig(emptyPaths) {
|
||||
const result = {
|
||||
var result = {
|
||||
paths: {
|
||||
'vs': 'out-build/vs',
|
||||
'sql': 'out-build/sql',
|
||||
@@ -40,26 +38,26 @@ function loaderConfig(emptyPaths) {
|
||||
return result;
|
||||
}
|
||||
exports.loaderConfig = loaderConfig;
|
||||
const IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i;
|
||||
var IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i;
|
||||
function loader(src, bundledFileHeader, bundleLoader) {
|
||||
let sources = [
|
||||
`${src}/vs/loader.js`
|
||||
var sources = [
|
||||
src + "/vs/loader.js"
|
||||
];
|
||||
if (bundleLoader) {
|
||||
sources = sources.concat([
|
||||
`${src}/vs/css.js`,
|
||||
`${src}/vs/nls.js`
|
||||
src + "/vs/css.js",
|
||||
src + "/vs/nls.js"
|
||||
]);
|
||||
}
|
||||
let isFirst = true;
|
||||
var isFirst = true;
|
||||
return (gulp
|
||||
.src(sources, { base: `${src}` })
|
||||
.src(sources, { base: "" + src })
|
||||
.pipe(es.through(function (data) {
|
||||
if (isFirst) {
|
||||
isFirst = false;
|
||||
this.emit('data', new VinylFile({
|
||||
path: 'fake',
|
||||
base: undefined,
|
||||
base: '',
|
||||
contents: Buffer.from(bundledFileHeader)
|
||||
}));
|
||||
this.emit('data', data);
|
||||
@@ -76,12 +74,12 @@ function loader(src, bundledFileHeader, bundleLoader) {
|
||||
})));
|
||||
}
|
||||
function toConcatStream(src, bundledFileHeader, sources, dest) {
|
||||
const useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
|
||||
var useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
|
||||
// If a bundle ends up including in any of the sources our copyright, then
|
||||
// insert a fake source at the beginning of each bundle with our copyright
|
||||
let containsOurCopyright = false;
|
||||
for (let i = 0, len = sources.length; i < len; i++) {
|
||||
const fileContents = sources[i].contents;
|
||||
var containsOurCopyright = false;
|
||||
for (var i = 0, len = sources.length; i < len; i++) {
|
||||
var fileContents = sources[i].contents;
|
||||
if (IS_OUR_COPYRIGHT_REGEXP.test(fileContents)) {
|
||||
containsOurCopyright = true;
|
||||
break;
|
||||
@@ -93,9 +91,9 @@ function toConcatStream(src, bundledFileHeader, sources, dest) {
|
||||
contents: bundledFileHeader
|
||||
});
|
||||
}
|
||||
const treatedSources = sources.map(function (source) {
|
||||
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
|
||||
const base = source.path ? root + `/${src}` : undefined;
|
||||
var treatedSources = sources.map(function (source) {
|
||||
var root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
|
||||
var base = source.path ? root + ("/" + src) : '';
|
||||
return new VinylFile({
|
||||
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
|
||||
base: base,
|
||||
@@ -104,8 +102,7 @@ function toConcatStream(src, bundledFileHeader, sources, dest) {
|
||||
});
|
||||
return es.readArray(treatedSources)
|
||||
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
|
||||
.pipe(concat(dest))
|
||||
.pipe(stats_1.createStatsStream(dest));
|
||||
.pipe(concat(dest));
|
||||
}
|
||||
function toBundleStream(src, bundledFileHeader, bundles) {
|
||||
return es.merge(bundles.map(function (bundle) {
|
||||
@@ -113,33 +110,33 @@ function toBundleStream(src, bundledFileHeader, bundles) {
|
||||
}));
|
||||
}
|
||||
function optimizeTask(opts) {
|
||||
const src = opts.src;
|
||||
const entryPoints = opts.entryPoints;
|
||||
const otherSources = opts.otherSources;
|
||||
const resources = opts.resources;
|
||||
const loaderConfig = opts.loaderConfig;
|
||||
const bundledFileHeader = opts.header;
|
||||
const bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader);
|
||||
const out = opts.out;
|
||||
var src = opts.src;
|
||||
var entryPoints = opts.entryPoints;
|
||||
var otherSources = opts.otherSources;
|
||||
var resources = opts.resources;
|
||||
var loaderConfig = opts.loaderConfig;
|
||||
var bundledFileHeader = opts.header;
|
||||
var bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader);
|
||||
var out = opts.out;
|
||||
return function () {
|
||||
const bundlesStream = es.through(); // this stream will contain the bundled files
|
||||
const resourcesStream = es.through(); // this stream will contain the resources
|
||||
const bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
|
||||
var bundlesStream = es.through(); // this stream will contain the bundled files
|
||||
var resourcesStream = es.through(); // this stream will contain the resources
|
||||
var bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
|
||||
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
|
||||
if (err || !result) {
|
||||
if (err) {
|
||||
return bundlesStream.emit('error', JSON.stringify(err));
|
||||
}
|
||||
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
|
||||
// Remove css inlined resources
|
||||
const filteredResources = resources.slice();
|
||||
var filteredResources = resources.slice();
|
||||
result.cssInlinedResources.forEach(function (resource) {
|
||||
if (process.env['VSCODE_BUILD_VERBOSE']) {
|
||||
log('optimizer', 'excluding inlined: ' + resource);
|
||||
}
|
||||
filteredResources.push('!' + resource);
|
||||
});
|
||||
gulp.src(filteredResources, { base: `${src}` }).pipe(resourcesStream);
|
||||
const bundleInfoArray = [];
|
||||
gulp.src(filteredResources, { base: "" + src }).pipe(resourcesStream);
|
||||
var bundleInfoArray = [];
|
||||
if (opts.bundleInfo) {
|
||||
bundleInfoArray.push(new VinylFile({
|
||||
path: 'bundleInfo.json',
|
||||
@@ -149,9 +146,9 @@ function optimizeTask(opts) {
|
||||
}
|
||||
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
|
||||
});
|
||||
const otherSourcesStream = es.through();
|
||||
const otherSourcesStreamArr = [];
|
||||
gulp.src(otherSources, { base: `${src}` })
|
||||
var otherSourcesStream = es.through();
|
||||
var otherSourcesStreamArr = [];
|
||||
gulp.src(otherSources, { base: "" + src })
|
||||
.pipe(es.through(function (data) {
|
||||
otherSourcesStreamArr.push(toConcatStream(src, bundledFileHeader, [data], data.relative));
|
||||
}, function () {
|
||||
@@ -162,17 +159,13 @@ function optimizeTask(opts) {
|
||||
es.merge(otherSourcesStreamArr).pipe(otherSourcesStream);
|
||||
}
|
||||
}));
|
||||
const result = es.merge(loader(src, bundledFileHeader, bundleLoader), bundlesStream, otherSourcesStream, resourcesStream, bundleInfoStream);
|
||||
var result = es.merge(loader(src, bundledFileHeader, bundleLoader), bundlesStream, otherSourcesStream, resourcesStream, bundleInfoStream);
|
||||
return result
|
||||
.pipe(sourcemaps.write('./', {
|
||||
sourceRoot: undefined,
|
||||
sourceRoot: null,
|
||||
addComment: true,
|
||||
includeContent: true
|
||||
}))
|
||||
.pipe(opts.languages && opts.languages.length ? i18n_1.processNlsFiles({
|
||||
fileHeader: bundledFileHeader,
|
||||
languages: opts.languages
|
||||
}) : es.through())
|
||||
.pipe(gulp.dest(out));
|
||||
};
|
||||
}
|
||||
@@ -182,14 +175,14 @@ exports.optimizeTask = optimizeTask;
|
||||
* to have a file "context" to include our copyright only once per file.
|
||||
*/
|
||||
function uglifyWithCopyrights() {
|
||||
const preserveComments = (f) => {
|
||||
return (_node, comment) => {
|
||||
const text = comment.value;
|
||||
const type = comment.type;
|
||||
var preserveComments = function (f) {
|
||||
return function (node, comment) {
|
||||
var text = comment.value;
|
||||
var type = comment.type;
|
||||
if (/@minifier_do_not_preserve/.test(text)) {
|
||||
return false;
|
||||
}
|
||||
const isOurCopyright = IS_OUR_COPYRIGHT_REGEXP.test(text);
|
||||
var isOurCopyright = IS_OUR_COPYRIGHT_REGEXP.test(text);
|
||||
if (isOurCopyright) {
|
||||
if (f.__hasOurCopyright) {
|
||||
return false;
|
||||
@@ -207,10 +200,10 @@ function uglifyWithCopyrights() {
|
||||
return false;
|
||||
};
|
||||
};
|
||||
const minify = composer(uglifyes);
|
||||
const input = es.through();
|
||||
const output = input
|
||||
.pipe(flatmap((stream, f) => {
|
||||
var minify = composer(uglifyes);
|
||||
var input = es.through();
|
||||
var output = input
|
||||
.pipe(flatmap(function (stream, f) {
|
||||
return stream.pipe(minify({
|
||||
output: {
|
||||
comments: preserveComments(f),
|
||||
@@ -221,18 +214,18 @@ function uglifyWithCopyrights() {
|
||||
return es.duplex(input, output);
|
||||
}
|
||||
function minifyTask(src, sourceMapBaseUrl) {
|
||||
const sourceMappingURL = sourceMapBaseUrl ? ((f) => `${sourceMapBaseUrl}/${f.relative}.map`) : undefined;
|
||||
return cb => {
|
||||
const jsFilter = filter('**/*.js', { restore: true });
|
||||
const cssFilter = filter('**/*.css', { restore: true });
|
||||
var sourceMappingURL = sourceMapBaseUrl && (function (f) { return sourceMapBaseUrl + "/" + f.relative + ".map"; });
|
||||
return function (cb) {
|
||||
var jsFilter = filter('**/*.js', { restore: true });
|
||||
var cssFilter = filter('**/*.css', { restore: true });
|
||||
pump(gulp.src([src + '/**', '!' + src + '/**/*.map']), jsFilter, sourcemaps.init({ loadMaps: true }), uglifyWithCopyrights(), jsFilter.restore, cssFilter, minifyCSS({ reduceIdents: false }), cssFilter.restore, sourcemaps.write('./', {
|
||||
sourceMappingURL,
|
||||
sourceRoot: undefined,
|
||||
sourceMappingURL: sourceMappingURL,
|
||||
sourceRoot: null,
|
||||
includeContent: true,
|
||||
addComment: true
|
||||
}), gulp.dest(src + '-min'), (err) => {
|
||||
}), gulp.dest(src + '-min'), function (err) {
|
||||
if (err instanceof uglify.GulpUglifyError) {
|
||||
console.error(`Uglify error in '${err.cause && err.cause.filename}'`);
|
||||
console.error("Uglify error in '" + (err.cause && err.cause.filename) + "'");
|
||||
}
|
||||
cb(err);
|
||||
});
|
||||
|
||||
@@ -5,25 +5,24 @@
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as es from 'event-stream';
|
||||
import * as path from 'path';
|
||||
import * as gulp from 'gulp';
|
||||
import * as concat from 'gulp-concat';
|
||||
import * as minifyCSS from 'gulp-cssnano';
|
||||
import * as filter from 'gulp-filter';
|
||||
import * as flatmap from 'gulp-flatmap';
|
||||
import * as sourcemaps from 'gulp-sourcemaps';
|
||||
import * as filter from 'gulp-filter';
|
||||
import * as minifyCSS from 'gulp-cssnano';
|
||||
import * as uglify from 'gulp-uglify';
|
||||
import * as composer from 'gulp-uglify/composer';
|
||||
import * as gulpUtil from 'gulp-util';
|
||||
import * as path from 'path';
|
||||
import * as pump from 'pump';
|
||||
import * as sm from 'source-map';
|
||||
import * as uglifyes from 'uglify-es';
|
||||
import * as es from 'event-stream';
|
||||
import * as concat from 'gulp-concat';
|
||||
import * as VinylFile from 'vinyl';
|
||||
import * as bundle from './bundle';
|
||||
import { Language, processNlsFiles } from './i18n';
|
||||
import { createStatsStream } from './stats';
|
||||
import * as util from './util';
|
||||
import * as gulpUtil from 'gulp-util';
|
||||
import * as flatmap from 'gulp-flatmap';
|
||||
import * as pump from 'pump';
|
||||
import * as sm from 'source-map';
|
||||
import { Language } from './i18n';
|
||||
|
||||
const REPO_ROOT_PATH = path.join(__dirname, '../..');
|
||||
|
||||
@@ -33,7 +32,7 @@ function log(prefix: string, message: string): void {
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
export function loaderConfig(emptyPaths?: string[]) {
|
||||
const result: any = {
|
||||
const result = {
|
||||
paths: {
|
||||
'vs': 'out-build/vs',
|
||||
'sql': 'out-build/sql',
|
||||
@@ -73,7 +72,7 @@ function loader(src: string, bundledFileHeader: string, bundleLoader: boolean):
|
||||
isFirst = false;
|
||||
this.emit('data', new VinylFile({
|
||||
path: 'fake',
|
||||
base: undefined,
|
||||
base: '',
|
||||
contents: Buffer.from(bundledFileHeader)
|
||||
}));
|
||||
this.emit('data', data);
|
||||
@@ -113,7 +112,7 @@ function toConcatStream(src: string, bundledFileHeader: string, sources: bundle.
|
||||
|
||||
const treatedSources = sources.map(function (source) {
|
||||
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
|
||||
const base = source.path ? root + `/${src}` : undefined;
|
||||
const base = source.path ? root + `/${src}` : '';
|
||||
|
||||
return new VinylFile({
|
||||
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
|
||||
@@ -124,11 +123,10 @@ function toConcatStream(src: string, bundledFileHeader: string, sources: bundle.
|
||||
|
||||
return es.readArray(treatedSources)
|
||||
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
|
||||
.pipe(concat(dest))
|
||||
.pipe(createStatsStream(dest));
|
||||
.pipe(concat(dest));
|
||||
}
|
||||
|
||||
function toBundleStream(src: string, bundledFileHeader: string, bundles: bundle.IConcatFile[]): NodeJS.ReadWriteStream {
|
||||
function toBundleStream(src:string, bundledFileHeader: string, bundles: bundle.IConcatFile[]): NodeJS.ReadWriteStream {
|
||||
return es.merge(bundles.map(function (bundle) {
|
||||
return toConcatStream(src, bundledFileHeader, bundle.sources, bundle.dest);
|
||||
}));
|
||||
@@ -190,7 +188,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
|
||||
const bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
|
||||
|
||||
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
|
||||
if (err || !result) { return bundlesStream.emit('error', JSON.stringify(err)); }
|
||||
if (err) { return bundlesStream.emit('error', JSON.stringify(err)); }
|
||||
|
||||
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
|
||||
|
||||
@@ -239,14 +237,10 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
|
||||
|
||||
return result
|
||||
.pipe(sourcemaps.write('./', {
|
||||
sourceRoot: undefined,
|
||||
sourceRoot: null,
|
||||
addComment: true,
|
||||
includeContent: true
|
||||
}))
|
||||
.pipe(opts.languages && opts.languages.length ? processNlsFiles({
|
||||
fileHeader: bundledFileHeader,
|
||||
languages: opts.languages
|
||||
}) : es.through())
|
||||
.pipe(gulp.dest(out));
|
||||
};
|
||||
}
|
||||
@@ -260,7 +254,7 @@ declare class FileWithCopyright extends VinylFile {
|
||||
*/
|
||||
function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
|
||||
const preserveComments = (f: FileWithCopyright) => {
|
||||
return (_node: any, comment: { value: string; type: string; }) => {
|
||||
return (node, comment: { value: string; type: string; }) => {
|
||||
const text = comment.value;
|
||||
const type = comment.type;
|
||||
|
||||
@@ -288,7 +282,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
|
||||
};
|
||||
};
|
||||
|
||||
const minify = (composer as any)(uglifyes);
|
||||
const minify = composer(uglifyes);
|
||||
const input = es.through();
|
||||
const output = input
|
||||
.pipe(flatmap((stream, f) => {
|
||||
@@ -304,7 +298,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
|
||||
}
|
||||
|
||||
export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) => void {
|
||||
const sourceMappingURL = sourceMapBaseUrl ? ((f: any) => `${sourceMapBaseUrl}/${f.relative}.map`) : undefined;
|
||||
const sourceMappingURL = sourceMapBaseUrl && (f => `${sourceMapBaseUrl}/${f.relative}.map`);
|
||||
|
||||
return cb => {
|
||||
const jsFilter = filter('**/*.js', { restore: true });
|
||||
@@ -321,13 +315,13 @@ export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) =>
|
||||
cssFilter.restore,
|
||||
sourcemaps.write('./', {
|
||||
sourceMappingURL,
|
||||
sourceRoot: undefined,
|
||||
sourceRoot: null,
|
||||
includeContent: true,
|
||||
addComment: true
|
||||
} as any),
|
||||
}),
|
||||
gulp.dest(src + '-min')
|
||||
, (err: any) => {
|
||||
if (err instanceof (uglify as any).GulpUglifyError) {
|
||||
if (err instanceof uglify.GulpUglifyError) {
|
||||
console.error(`Uglify error in '${err.cause && err.cause.filename}'`);
|
||||
}
|
||||
|
||||
|
||||
@@ -4,20 +4,20 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const es = require("event-stream");
|
||||
const _ = require("underscore");
|
||||
const util = require("gulp-util");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const allErrors = [];
|
||||
let startTime = null;
|
||||
let count = 0;
|
||||
var es = require("event-stream");
|
||||
var _ = require("underscore");
|
||||
var util = require("gulp-util");
|
||||
var fs = require("fs");
|
||||
var path = require("path");
|
||||
var allErrors = [];
|
||||
var startTime = null;
|
||||
var count = 0;
|
||||
function onStart() {
|
||||
if (count++ > 0) {
|
||||
return;
|
||||
}
|
||||
startTime = new Date().getTime();
|
||||
util.log(`Starting ${util.colors.green('compilation')}...`);
|
||||
util.log("Starting " + util.colors.green('compilation') + "...");
|
||||
}
|
||||
function onEnd() {
|
||||
if (--count > 0) {
|
||||
@@ -25,7 +25,7 @@ function onEnd() {
|
||||
}
|
||||
log();
|
||||
}
|
||||
const buildLogPath = path.join(path.dirname(path.dirname(__dirname)), '.build', 'log');
|
||||
var buildLogPath = path.join(path.dirname(path.dirname(__dirname)), '.build', 'log');
|
||||
try {
|
||||
fs.mkdirSync(path.dirname(buildLogPath));
|
||||
}
|
||||
@@ -33,52 +33,61 @@ catch (err) {
|
||||
// ignore
|
||||
}
|
||||
function log() {
|
||||
const errors = _.flatten(allErrors);
|
||||
const seen = new Set();
|
||||
errors.map(err => {
|
||||
var errors = _.flatten(allErrors);
|
||||
var seen = new Set();
|
||||
errors.map(function (err) {
|
||||
if (!seen.has(err)) {
|
||||
seen.add(err);
|
||||
util.log(`${util.colors.red('Error')}: ${err}`);
|
||||
util.log(util.colors.red('Error') + ": " + err);
|
||||
}
|
||||
});
|
||||
const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
|
||||
const messages = errors
|
||||
.map(err => regex.exec(err))
|
||||
.filter(match => !!match)
|
||||
.map(x => x)
|
||||
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message }));
|
||||
var regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
|
||||
var messages = errors
|
||||
.map(function (err) { return regex.exec(err); })
|
||||
.filter(function (match) { return !!match; })
|
||||
.map(function (_a) {
|
||||
var path = _a[1], line = _a[2], column = _a[3], message = _a[4];
|
||||
return ({ path: path, line: parseInt(line), column: parseInt(column), message: message });
|
||||
});
|
||||
try {
|
||||
fs.writeFileSync(buildLogPath, JSON.stringify(messages));
|
||||
}
|
||||
catch (err) {
|
||||
//noop
|
||||
}
|
||||
util.log(`Finished ${util.colors.green('compilation')} with ${errors.length} errors after ${util.colors.magenta((new Date().getTime() - startTime) + ' ms')}`);
|
||||
util.log("Finished " + util.colors.green('compilation') + " with " + errors.length + " errors after " + util.colors.magenta((new Date().getTime() - startTime) + ' ms'));
|
||||
}
|
||||
function createReporter() {
|
||||
const errors = [];
|
||||
var errors = [];
|
||||
allErrors.push(errors);
|
||||
const result = (err) => errors.push(err);
|
||||
result.hasErrors = () => errors.length > 0;
|
||||
result.end = (emitError) => {
|
||||
errors.length = 0;
|
||||
onStart();
|
||||
return es.through(undefined, function () {
|
||||
onEnd();
|
||||
if (emitError && errors.length > 0) {
|
||||
if (!errors.__logged__) {
|
||||
log();
|
||||
var ReportFunc = /** @class */ (function () {
|
||||
function ReportFunc(err) {
|
||||
errors.push(err);
|
||||
}
|
||||
ReportFunc.hasErrors = function () {
|
||||
return errors.length > 0;
|
||||
};
|
||||
ReportFunc.end = function (emitError) {
|
||||
errors.length = 0;
|
||||
onStart();
|
||||
return es.through(null, function () {
|
||||
onEnd();
|
||||
if (emitError && errors.length > 0) {
|
||||
errors.__logged__ = true;
|
||||
if (!errors.__logged__) {
|
||||
log();
|
||||
}
|
||||
var err = new Error("Found " + errors.length + " errors");
|
||||
err.__reporter__ = true;
|
||||
this.emit('error', err);
|
||||
}
|
||||
errors.__logged__ = true;
|
||||
const err = new Error(`Found ${errors.length} errors`);
|
||||
err.__reporter__ = true;
|
||||
this.emit('error', err);
|
||||
}
|
||||
else {
|
||||
this.emit('end');
|
||||
}
|
||||
});
|
||||
};
|
||||
return result;
|
||||
else {
|
||||
this.emit('end');
|
||||
}
|
||||
});
|
||||
};
|
||||
return ReportFunc;
|
||||
}());
|
||||
return ReportFunc;
|
||||
}
|
||||
exports.createReporter = createReporter;
|
||||
|
||||
@@ -12,7 +12,7 @@ import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
const allErrors: string[][] = [];
|
||||
let startTime: number | null = null;
|
||||
let startTime: number = null;
|
||||
let count = 0;
|
||||
|
||||
function onStart(): void {
|
||||
@@ -55,7 +55,6 @@ function log(): void {
|
||||
const messages = errors
|
||||
.map(err => regex.exec(err))
|
||||
.filter(match => !!match)
|
||||
.map(x => x as string[])
|
||||
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message }));
|
||||
|
||||
try {
|
||||
@@ -65,7 +64,7 @@ function log(): void {
|
||||
//noop
|
||||
}
|
||||
|
||||
util.log(`Finished ${util.colors.green('compilation')} with ${errors.length} errors after ${util.colors.magenta((new Date().getTime() - startTime!) + ' ms')}`);
|
||||
util.log(`Finished ${util.colors.green('compilation')} with ${errors.length} errors after ${util.colors.magenta((new Date().getTime() - startTime) + ' ms')}`);
|
||||
}
|
||||
|
||||
export interface IReporter {
|
||||
@@ -78,32 +77,38 @@ export function createReporter(): IReporter {
|
||||
const errors: string[] = [];
|
||||
allErrors.push(errors);
|
||||
|
||||
const result = (err: string) => errors.push(err);
|
||||
class ReportFunc {
|
||||
constructor(err: string) {
|
||||
errors.push(err);
|
||||
}
|
||||
|
||||
result.hasErrors = () => errors.length > 0;
|
||||
static hasErrors(): boolean {
|
||||
return errors.length > 0;
|
||||
}
|
||||
|
||||
result.end = (emitError: boolean): NodeJS.ReadWriteStream => {
|
||||
errors.length = 0;
|
||||
onStart();
|
||||
static end(emitError: boolean): NodeJS.ReadWriteStream {
|
||||
errors.length = 0;
|
||||
onStart();
|
||||
|
||||
return es.through(undefined, function () {
|
||||
onEnd();
|
||||
return es.through(null, function () {
|
||||
onEnd();
|
||||
|
||||
if (emitError && errors.length > 0) {
|
||||
if (!(errors as any).__logged__) {
|
||||
log();
|
||||
if (emitError && errors.length > 0) {
|
||||
(errors as any).__logged__ = true;
|
||||
|
||||
if (!(errors as any).__logged__) {
|
||||
log();
|
||||
}
|
||||
|
||||
const err = new Error(`Found ${errors.length} errors`);
|
||||
(err as any).__reporter__ = true;
|
||||
this.emit('error', err);
|
||||
} else {
|
||||
this.emit('end');
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
(errors as any).__logged__ = true;
|
||||
|
||||
const err = new Error(`Found ${errors.length} errors`);
|
||||
(err as any).__reporter__ = true;
|
||||
this.emit('error', err);
|
||||
} else {
|
||||
this.emit('end');
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
return result;
|
||||
return <IReporter><any>ReportFunc;
|
||||
}
|
||||
|
||||
@@ -5,51 +5,35 @@
|
||||
'use strict';
|
||||
var snaps;
|
||||
(function (snaps) {
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const cp = require('child_process');
|
||||
const mksnapshot = path.join(__dirname, `../../node_modules/.bin/${process.platform === 'win32' ? 'mksnapshot.cmd' : 'mksnapshot'}`);
|
||||
const product = require('../../product.json');
|
||||
const arch = (process.argv.join('').match(/--arch=(.*)/) || [])[1];
|
||||
var fs = require('fs');
|
||||
var path = require('path');
|
||||
var os = require('os');
|
||||
var cp = require('child_process');
|
||||
var mksnapshot = path.join(__dirname, "../../node_modules/.bin/" + (process.platform === 'win32' ? 'mksnapshot.cmd' : 'mksnapshot'));
|
||||
var product = require('../../product.json');
|
||||
var arch = (process.argv.join('').match(/--arch=(.*)/) || [])[1];
|
||||
//
|
||||
let loaderFilepath;
|
||||
let startupBlobFilepath;
|
||||
var loaderFilepath;
|
||||
var startupBlobFilepath;
|
||||
switch (process.platform) {
|
||||
case 'darwin':
|
||||
loaderFilepath = `VSCode-darwin/${product.nameLong}.app/Contents/Resources/app/out/vs/loader.js`;
|
||||
startupBlobFilepath = `VSCode-darwin/${product.nameLong}.app/Contents/Frameworks/Electron Framework.framework/Resources/snapshot_blob.bin`;
|
||||
loaderFilepath = "VSCode-darwin/" + product.nameLong + ".app/Contents/Resources/app/out/vs/loader.js";
|
||||
startupBlobFilepath = "VSCode-darwin/" + product.nameLong + ".app/Contents/Frameworks/Electron Framework.framework/Resources/snapshot_blob.bin";
|
||||
break;
|
||||
case 'win32':
|
||||
case 'linux':
|
||||
loaderFilepath = `VSCode-${process.platform}-${arch}/resources/app/out/vs/loader.js`;
|
||||
startupBlobFilepath = `VSCode-${process.platform}-${arch}/snapshot_blob.bin`;
|
||||
break;
|
||||
default:
|
||||
throw new Error('Unknown platform');
|
||||
loaderFilepath = "VSCode-" + process.platform + "-" + arch + "/resources/app/out/vs/loader.js";
|
||||
startupBlobFilepath = "VSCode-" + process.platform + "-" + arch + "/snapshot_blob.bin";
|
||||
}
|
||||
loaderFilepath = path.join(__dirname, '../../../', loaderFilepath);
|
||||
startupBlobFilepath = path.join(__dirname, '../../../', startupBlobFilepath);
|
||||
snapshotLoader(loaderFilepath, startupBlobFilepath);
|
||||
function snapshotLoader(loaderFilepath, startupBlobFilepath) {
|
||||
const inputFile = fs.readFileSync(loaderFilepath);
|
||||
const wrappedInputFile = `
|
||||
var Monaco_Loader_Init;
|
||||
(function() {
|
||||
var doNotInitLoader = true;
|
||||
${inputFile.toString()};
|
||||
Monaco_Loader_Init = function() {
|
||||
AMDLoader.init();
|
||||
CSSLoaderPlugin.init();
|
||||
NLSLoaderPlugin.init();
|
||||
|
||||
return { define, require };
|
||||
}
|
||||
})();
|
||||
`;
|
||||
const wrappedInputFilepath = path.join(os.tmpdir(), 'wrapped-loader.js');
|
||||
var inputFile = fs.readFileSync(loaderFilepath);
|
||||
var wrappedInputFile = "\n\t\tvar Monaco_Loader_Init;\n\t\t(function() {\n\t\t\tvar doNotInitLoader = true;\n\t\t\t" + inputFile.toString() + ";\n\t\t\tMonaco_Loader_Init = function() {\n\t\t\t\tAMDLoader.init();\n\t\t\t\tCSSLoaderPlugin.init();\n\t\t\t\tNLSLoaderPlugin.init();\n\n\t\t\t\treturn { define, require };\n\t\t\t}\n\t\t})();\n\t\t";
|
||||
var wrappedInputFilepath = path.join(os.tmpdir(), 'wrapped-loader.js');
|
||||
console.log(wrappedInputFilepath);
|
||||
fs.writeFileSync(wrappedInputFilepath, wrappedInputFile);
|
||||
cp.execFileSync(mksnapshot, [wrappedInputFilepath, `--startup_blob`, startupBlobFilepath]);
|
||||
cp.execFileSync(mksnapshot, [wrappedInputFilepath, "--startup_blob", startupBlobFilepath]);
|
||||
}
|
||||
})(snaps || (snaps = {}));
|
||||
|
||||
@@ -30,10 +30,6 @@ namespace snaps {
|
||||
case 'linux':
|
||||
loaderFilepath = `VSCode-${process.platform}-${arch}/resources/app/out/vs/loader.js`;
|
||||
startupBlobFilepath = `VSCode-${process.platform}-${arch}/snapshot_blob.bin`;
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error('Unknown platform');
|
||||
}
|
||||
|
||||
loaderFilepath = path.join(__dirname, '../../../', loaderFilepath);
|
||||
|
||||
@@ -4,13 +4,14 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ts = require("typescript");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const tss = require("./treeshaking");
|
||||
const REPO_ROOT = path.join(__dirname, '../../');
|
||||
const SRC_DIR = path.join(REPO_ROOT, 'src');
|
||||
let dirCache = {};
|
||||
var ts = require("typescript");
|
||||
var fs = require("fs");
|
||||
var path = require("path");
|
||||
var tss = require("./treeshaking");
|
||||
var REPO_ROOT = path.join(__dirname, '../../');
|
||||
var SRC_DIR = path.join(REPO_ROOT, 'src');
|
||||
var OUT_EDITOR = path.join(REPO_ROOT, 'out-editor');
|
||||
var dirCache = {};
|
||||
function writeFile(filePath, contents) {
|
||||
function ensureDirs(dirPath) {
|
||||
if (dirCache[dirPath]) {
|
||||
@@ -27,49 +28,32 @@ function writeFile(filePath, contents) {
|
||||
fs.writeFileSync(filePath, contents);
|
||||
}
|
||||
function extractEditor(options) {
|
||||
const tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.json')).toString());
|
||||
let compilerOptions;
|
||||
if (tsConfig.extends) {
|
||||
compilerOptions = Object.assign({}, require(path.join(options.sourcesRoot, tsConfig.extends)).compilerOptions, tsConfig.compilerOptions);
|
||||
}
|
||||
else {
|
||||
compilerOptions = tsConfig.compilerOptions;
|
||||
}
|
||||
tsConfig.compilerOptions = compilerOptions;
|
||||
compilerOptions.noUnusedLocals = false;
|
||||
compilerOptions.preserveConstEnums = false;
|
||||
compilerOptions.declaration = false;
|
||||
compilerOptions.moduleResolution = ts.ModuleResolutionKind.Classic;
|
||||
delete compilerOptions.types;
|
||||
delete tsConfig.extends;
|
||||
tsConfig.exclude = [];
|
||||
options.compilerOptions = compilerOptions;
|
||||
let result = tss.shake(options);
|
||||
for (let fileName in result) {
|
||||
var result = tss.shake(options);
|
||||
for (var fileName in result) {
|
||||
if (result.hasOwnProperty(fileName)) {
|
||||
writeFile(path.join(options.destRoot, fileName), result[fileName]);
|
||||
}
|
||||
}
|
||||
let copied = {};
|
||||
const copyFile = (fileName) => {
|
||||
var copied = {};
|
||||
var copyFile = function (fileName) {
|
||||
if (copied[fileName]) {
|
||||
return;
|
||||
}
|
||||
copied[fileName] = true;
|
||||
const srcPath = path.join(options.sourcesRoot, fileName);
|
||||
const dstPath = path.join(options.destRoot, fileName);
|
||||
var srcPath = path.join(options.sourcesRoot, fileName);
|
||||
var dstPath = path.join(options.destRoot, fileName);
|
||||
writeFile(dstPath, fs.readFileSync(srcPath));
|
||||
};
|
||||
const writeOutputFile = (fileName, contents) => {
|
||||
var writeOutputFile = function (fileName, contents) {
|
||||
writeFile(path.join(options.destRoot, fileName), contents);
|
||||
};
|
||||
for (let fileName in result) {
|
||||
for (var fileName in result) {
|
||||
if (result.hasOwnProperty(fileName)) {
|
||||
const fileContents = result[fileName];
|
||||
const info = ts.preProcessFile(fileContents);
|
||||
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
|
||||
const importedFileName = info.importedFiles[i].fileName;
|
||||
let importedFilePath;
|
||||
var fileContents = result[fileName];
|
||||
var info = ts.preProcessFile(fileContents);
|
||||
for (var i = info.importedFiles.length - 1; i >= 0; i--) {
|
||||
var importedFileName = info.importedFiles[i].fileName;
|
||||
var importedFilePath = void 0;
|
||||
if (/^vs\/css!/.test(importedFileName)) {
|
||||
importedFilePath = importedFileName.substr('vs/css!'.length) + '.css';
|
||||
}
|
||||
@@ -90,188 +74,215 @@ function extractEditor(options) {
|
||||
}
|
||||
}
|
||||
}
|
||||
delete tsConfig.compilerOptions.moduleResolution;
|
||||
var tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.json')).toString());
|
||||
tsConfig.compilerOptions.noUnusedLocals = false;
|
||||
writeOutputFile('tsconfig.json', JSON.stringify(tsConfig, null, '\t'));
|
||||
[
|
||||
'vs/css.build.js',
|
||||
'vs/css.d.ts',
|
||||
'vs/css.js',
|
||||
'vs/loader.js',
|
||||
'vs/monaco.d.ts',
|
||||
'vs/nls.build.js',
|
||||
'vs/nls.d.ts',
|
||||
'vs/nls.js',
|
||||
'vs/nls.mock.ts',
|
||||
'typings/lib.ie11_safe_es6.d.ts',
|
||||
'typings/thenable.d.ts',
|
||||
'typings/es6-promise.d.ts',
|
||||
'typings/require.d.ts',
|
||||
].forEach(copyFile);
|
||||
}
|
||||
exports.extractEditor = extractEditor;
|
||||
function createESMSourcesAndResources2(options) {
|
||||
const SRC_FOLDER = path.join(REPO_ROOT, options.srcFolder);
|
||||
const OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
|
||||
const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
|
||||
const getDestAbsoluteFilePath = (file) => {
|
||||
let dest = options.renames[file.replace(/\\/g, '/')] || file;
|
||||
if (dest === 'tsconfig.json') {
|
||||
return path.join(OUT_FOLDER, `tsconfig.json`);
|
||||
function createESMSourcesAndResources(options) {
|
||||
var OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
|
||||
var OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
|
||||
var in_queue = Object.create(null);
|
||||
var queue = [];
|
||||
var enqueue = function (module) {
|
||||
if (in_queue[module]) {
|
||||
return;
|
||||
}
|
||||
if (/\.ts$/.test(dest)) {
|
||||
return path.join(OUT_FOLDER, dest);
|
||||
}
|
||||
return path.join(OUT_RESOURCES_FOLDER, dest);
|
||||
in_queue[module] = true;
|
||||
queue.push(module);
|
||||
};
|
||||
const allFiles = walkDirRecursive(SRC_FOLDER);
|
||||
for (let i = 0; i < allFiles.length; i++) {
|
||||
const file = allFiles[i];
|
||||
if (options.ignores.indexOf(file.replace(/\\/g, '/')) >= 0) {
|
||||
continue;
|
||||
var seenDir = {};
|
||||
var createDirectoryRecursive = function (dir) {
|
||||
if (seenDir[dir]) {
|
||||
return;
|
||||
}
|
||||
if (file === 'tsconfig.json') {
|
||||
const tsConfig = JSON.parse(fs.readFileSync(path.join(SRC_FOLDER, file)).toString());
|
||||
tsConfig.compilerOptions.module = 'es6';
|
||||
tsConfig.compilerOptions.outDir = path.join(path.relative(OUT_FOLDER, OUT_RESOURCES_FOLDER), 'vs').replace(/\\/g, '/');
|
||||
write(getDestAbsoluteFilePath(file), JSON.stringify(tsConfig, null, '\t'));
|
||||
continue;
|
||||
var lastSlash = dir.lastIndexOf('/');
|
||||
if (lastSlash === -1) {
|
||||
lastSlash = dir.lastIndexOf('\\');
|
||||
}
|
||||
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file)) {
|
||||
// Transport the files directly
|
||||
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
|
||||
continue;
|
||||
if (lastSlash !== -1) {
|
||||
createDirectoryRecursive(dir.substring(0, lastSlash));
|
||||
}
|
||||
if (/\.ts$/.test(file)) {
|
||||
// Transform the .ts file
|
||||
let fileContents = fs.readFileSync(path.join(SRC_FOLDER, file)).toString();
|
||||
const info = ts.preProcessFile(fileContents);
|
||||
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
|
||||
const importedFilename = info.importedFiles[i].fileName;
|
||||
const pos = info.importedFiles[i].pos;
|
||||
const end = info.importedFiles[i].end;
|
||||
let importedFilepath;
|
||||
if (/^vs\/css!/.test(importedFilename)) {
|
||||
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css';
|
||||
seenDir[dir] = true;
|
||||
try {
|
||||
fs.mkdirSync(dir);
|
||||
}
|
||||
catch (err) { }
|
||||
};
|
||||
seenDir[REPO_ROOT] = true;
|
||||
var toggleComments = function (fileContents) {
|
||||
var lines = fileContents.split(/\r\n|\r|\n/);
|
||||
var mode = 0;
|
||||
for (var i = 0; i < lines.length; i++) {
|
||||
var line = lines[i];
|
||||
if (mode === 0) {
|
||||
if (/\/\/ ESM-comment-begin/.test(line)) {
|
||||
mode = 1;
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
importedFilepath = importedFilename;
|
||||
if (/\/\/ ESM-uncomment-begin/.test(line)) {
|
||||
mode = 2;
|
||||
continue;
|
||||
}
|
||||
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) {
|
||||
importedFilepath = path.join(path.dirname(file), importedFilepath);
|
||||
}
|
||||
let relativePath;
|
||||
if (importedFilepath === path.dirname(file).replace(/\\/g, '/')) {
|
||||
relativePath = '../' + path.basename(path.dirname(file));
|
||||
}
|
||||
else if (importedFilepath === path.dirname(path.dirname(file)).replace(/\\/g, '/')) {
|
||||
relativePath = '../../' + path.basename(path.dirname(path.dirname(file)));
|
||||
}
|
||||
else {
|
||||
relativePath = path.relative(path.dirname(file), importedFilepath);
|
||||
}
|
||||
relativePath = relativePath.replace(/\\/g, '/');
|
||||
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
|
||||
relativePath = './' + relativePath;
|
||||
}
|
||||
fileContents = (fileContents.substring(0, pos + 1)
|
||||
+ relativePath
|
||||
+ fileContents.substring(end + 1));
|
||||
continue;
|
||||
}
|
||||
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) {
|
||||
return `import * as ${m1} from ${m2};`;
|
||||
});
|
||||
write(getDestAbsoluteFilePath(file), fileContents);
|
||||
continue;
|
||||
}
|
||||
console.log(`UNKNOWN FILE: ${file}`);
|
||||
}
|
||||
function walkDirRecursive(dir) {
|
||||
if (dir.charAt(dir.length - 1) !== '/' || dir.charAt(dir.length - 1) !== '\\') {
|
||||
dir += '/';
|
||||
}
|
||||
let result = [];
|
||||
_walkDirRecursive(dir, result, dir.length);
|
||||
return result;
|
||||
}
|
||||
function _walkDirRecursive(dir, result, trimPos) {
|
||||
const files = fs.readdirSync(dir);
|
||||
for (let i = 0; i < files.length; i++) {
|
||||
const file = path.join(dir, files[i]);
|
||||
if (fs.statSync(file).isDirectory()) {
|
||||
_walkDirRecursive(file, result, trimPos);
|
||||
if (mode === 1) {
|
||||
if (/\/\/ ESM-comment-end/.test(line)) {
|
||||
mode = 0;
|
||||
continue;
|
||||
}
|
||||
lines[i] = '// ' + line;
|
||||
continue;
|
||||
}
|
||||
else {
|
||||
result.push(file.substr(trimPos));
|
||||
if (mode === 2) {
|
||||
if (/\/\/ ESM-uncomment-end/.test(line)) {
|
||||
mode = 0;
|
||||
continue;
|
||||
}
|
||||
lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) {
|
||||
return indent;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
function write(absoluteFilePath, contents) {
|
||||
if (/(\.ts$)|(\.js$)/.test(absoluteFilePath)) {
|
||||
return lines.join('\n');
|
||||
};
|
||||
var write = function (filePath, contents) {
|
||||
var absoluteFilePath;
|
||||
if (/\.ts$/.test(filePath)) {
|
||||
absoluteFilePath = path.join(OUT_FOLDER, filePath);
|
||||
}
|
||||
else {
|
||||
absoluteFilePath = path.join(OUT_RESOURCES_FOLDER, filePath);
|
||||
}
|
||||
createDirectoryRecursive(path.dirname(absoluteFilePath));
|
||||
if (/(\.ts$)|(\.js$)/.test(filePath)) {
|
||||
contents = toggleComments(contents.toString());
|
||||
}
|
||||
writeFile(absoluteFilePath, contents);
|
||||
function toggleComments(fileContents) {
|
||||
let lines = fileContents.split(/\r\n|\r|\n/);
|
||||
let mode = 0;
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (mode === 0) {
|
||||
if (/\/\/ ESM-comment-begin/.test(line)) {
|
||||
mode = 1;
|
||||
continue;
|
||||
}
|
||||
if (/\/\/ ESM-uncomment-begin/.test(line)) {
|
||||
mode = 2;
|
||||
continue;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if (mode === 1) {
|
||||
if (/\/\/ ESM-comment-end/.test(line)) {
|
||||
mode = 0;
|
||||
continue;
|
||||
}
|
||||
lines[i] = '// ' + line;
|
||||
continue;
|
||||
}
|
||||
if (mode === 2) {
|
||||
if (/\/\/ ESM-uncomment-end/.test(line)) {
|
||||
mode = 0;
|
||||
continue;
|
||||
}
|
||||
lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) {
|
||||
return indent;
|
||||
});
|
||||
}
|
||||
}
|
||||
return lines.join('\n');
|
||||
fs.writeFileSync(absoluteFilePath, contents);
|
||||
};
|
||||
options.entryPoints.forEach(function (entryPoint) { return enqueue(entryPoint); });
|
||||
while (queue.length > 0) {
|
||||
var module_1 = queue.shift();
|
||||
if (transportCSS(module_1, enqueue, write)) {
|
||||
continue;
|
||||
}
|
||||
if (transportResource(options, module_1, enqueue, write)) {
|
||||
continue;
|
||||
}
|
||||
if (transportDTS(options, module_1, enqueue, write)) {
|
||||
continue;
|
||||
}
|
||||
var filename = void 0;
|
||||
if (options.redirects[module_1]) {
|
||||
filename = path.join(SRC_DIR, options.redirects[module_1] + '.ts');
|
||||
}
|
||||
else {
|
||||
filename = path.join(SRC_DIR, module_1 + '.ts');
|
||||
}
|
||||
var fileContents = fs.readFileSync(filename).toString();
|
||||
var info = ts.preProcessFile(fileContents);
|
||||
for (var i = info.importedFiles.length - 1; i >= 0; i--) {
|
||||
var importedFilename = info.importedFiles[i].fileName;
|
||||
var pos = info.importedFiles[i].pos;
|
||||
var end = info.importedFiles[i].end;
|
||||
var importedFilepath = void 0;
|
||||
if (/^vs\/css!/.test(importedFilename)) {
|
||||
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css';
|
||||
}
|
||||
else {
|
||||
importedFilepath = importedFilename;
|
||||
}
|
||||
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) {
|
||||
importedFilepath = path.join(path.dirname(module_1), importedFilepath);
|
||||
}
|
||||
enqueue(importedFilepath);
|
||||
var relativePath = void 0;
|
||||
if (importedFilepath === path.dirname(module_1)) {
|
||||
relativePath = '../' + path.basename(path.dirname(module_1));
|
||||
}
|
||||
else if (importedFilepath === path.dirname(path.dirname(module_1))) {
|
||||
relativePath = '../../' + path.basename(path.dirname(path.dirname(module_1)));
|
||||
}
|
||||
else {
|
||||
relativePath = path.relative(path.dirname(module_1), importedFilepath);
|
||||
}
|
||||
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
|
||||
relativePath = './' + relativePath;
|
||||
}
|
||||
fileContents = (fileContents.substring(0, pos + 1)
|
||||
+ relativePath
|
||||
+ fileContents.substring(end + 1));
|
||||
}
|
||||
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) {
|
||||
return "import * as " + m1 + " from " + m2 + ";";
|
||||
});
|
||||
fileContents = fileContents.replace(/Thenable/g, 'PromiseLike');
|
||||
write(module_1 + '.ts', fileContents);
|
||||
}
|
||||
var esm_opts = {
|
||||
"compilerOptions": {
|
||||
"outDir": path.relative(path.dirname(OUT_FOLDER), OUT_RESOURCES_FOLDER),
|
||||
"rootDir": "src",
|
||||
"module": "es6",
|
||||
"target": "es5",
|
||||
"experimentalDecorators": true,
|
||||
"lib": [
|
||||
"dom",
|
||||
"es5",
|
||||
"es2015.collection",
|
||||
"es2015.promise"
|
||||
],
|
||||
"types": []
|
||||
}
|
||||
};
|
||||
fs.writeFileSync(path.join(path.dirname(OUT_FOLDER), 'tsconfig.json'), JSON.stringify(esm_opts, null, '\t'));
|
||||
var monacodts = fs.readFileSync(path.join(SRC_DIR, 'vs/monaco.d.ts')).toString();
|
||||
fs.writeFileSync(path.join(OUT_FOLDER, 'vs/monaco.d.ts'), monacodts);
|
||||
}
|
||||
exports.createESMSourcesAndResources2 = createESMSourcesAndResources2;
|
||||
exports.createESMSourcesAndResources = createESMSourcesAndResources;
|
||||
function transportCSS(module, enqueue, write) {
|
||||
if (!/\.css/.test(module)) {
|
||||
return false;
|
||||
}
|
||||
const filename = path.join(SRC_DIR, module);
|
||||
const fileContents = fs.readFileSync(filename).toString();
|
||||
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
|
||||
const inlineResourcesLimit = 300000; //3000; // see https://github.com/Microsoft/monaco-editor/issues/336
|
||||
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
|
||||
var filename = path.join(SRC_DIR, module);
|
||||
var fileContents = fs.readFileSync(filename).toString();
|
||||
var inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
|
||||
var inlineResourcesLimit = 300000; //3000; // see https://github.com/Microsoft/monaco-editor/issues/336
|
||||
var newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
|
||||
write(module, newContents);
|
||||
return true;
|
||||
function _rewriteOrInlineUrls(contents, forceBase64, inlineByteLimit) {
|
||||
return _replaceURL(contents, (url) => {
|
||||
let imagePath = path.join(path.dirname(module), url);
|
||||
let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
|
||||
return _replaceURL(contents, function (url) {
|
||||
var imagePath = path.join(path.dirname(module), url);
|
||||
var fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
|
||||
if (fileContents.length < inlineByteLimit) {
|
||||
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
|
||||
let DATA = ';base64,' + fileContents.toString('base64');
|
||||
var MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
|
||||
var DATA = ';base64,' + fileContents.toString('base64');
|
||||
if (!forceBase64 && /\.svg$/.test(url)) {
|
||||
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
|
||||
let newText = fileContents.toString()
|
||||
var newText = fileContents.toString()
|
||||
.replace(/"/g, '\'')
|
||||
.replace(/</g, '%3C')
|
||||
.replace(/>/g, '%3E')
|
||||
.replace(/&/g, '%26')
|
||||
.replace(/#/g, '%23')
|
||||
.replace(/\s+/g, ' ');
|
||||
let encodedData = ',' + newText;
|
||||
var encodedData = ',' + newText;
|
||||
if (encodedData.length < DATA.length) {
|
||||
DATA = encodedData;
|
||||
}
|
||||
@@ -284,8 +295,12 @@ function transportCSS(module, enqueue, write) {
|
||||
}
|
||||
function _replaceURL(contents, replacer) {
|
||||
// Use ")" as the terminator as quotes are oftentimes not used at all
|
||||
return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, (_, ...matches) => {
|
||||
let url = matches[0];
|
||||
return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, function (_) {
|
||||
var matches = [];
|
||||
for (var _i = 1; _i < arguments.length; _i++) {
|
||||
matches[_i - 1] = arguments[_i];
|
||||
}
|
||||
var url = matches[0];
|
||||
// Eliminate starting quotes (the initial whitespace is not captured)
|
||||
if (url.charAt(0) === '"' || url.charAt(0) === '\'') {
|
||||
url = url.substring(1);
|
||||
@@ -308,3 +323,27 @@ function transportCSS(module, enqueue, write) {
|
||||
return haystack.length >= needle.length && haystack.substr(0, needle.length) === needle;
|
||||
}
|
||||
}
|
||||
function transportResource(options, module, enqueue, write) {
|
||||
if (!/\.svg/.test(module)) {
|
||||
return false;
|
||||
}
|
||||
write(module, fs.readFileSync(path.join(SRC_DIR, module)));
|
||||
return true;
|
||||
}
|
||||
function transportDTS(options, module, enqueue, write) {
|
||||
if (options.redirects[module] && fs.existsSync(path.join(SRC_DIR, options.redirects[module] + '.ts'))) {
|
||||
return false;
|
||||
}
|
||||
if (!fs.existsSync(path.join(SRC_DIR, module + '.d.ts'))) {
|
||||
return false;
|
||||
}
|
||||
write(module + '.d.ts', fs.readFileSync(path.join(SRC_DIR, module + '.d.ts')));
|
||||
var filename;
|
||||
if (options.redirects[module]) {
|
||||
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, options.redirects[module] + '.js')));
|
||||
}
|
||||
else {
|
||||
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, module + '.js')));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import * as tss from './treeshaking';
|
||||
|
||||
const REPO_ROOT = path.join(__dirname, '../../');
|
||||
const SRC_DIR = path.join(REPO_ROOT, 'src');
|
||||
const OUT_EDITOR = path.join(REPO_ROOT, 'out-editor');
|
||||
|
||||
let dirCache: { [dir: string]: boolean; } = {};
|
||||
|
||||
@@ -31,33 +32,13 @@ function writeFile(filePath: string, contents: Buffer | string): void {
|
||||
}
|
||||
|
||||
export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: string }): void {
|
||||
const tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.json')).toString());
|
||||
let compilerOptions: { [key: string]: any };
|
||||
if (tsConfig.extends) {
|
||||
compilerOptions = Object.assign({}, require(path.join(options.sourcesRoot, tsConfig.extends)).compilerOptions, tsConfig.compilerOptions);
|
||||
} else {
|
||||
compilerOptions = tsConfig.compilerOptions;
|
||||
}
|
||||
tsConfig.compilerOptions = compilerOptions;
|
||||
|
||||
compilerOptions.noUnusedLocals = false;
|
||||
compilerOptions.preserveConstEnums = false;
|
||||
compilerOptions.declaration = false;
|
||||
compilerOptions.moduleResolution = ts.ModuleResolutionKind.Classic;
|
||||
|
||||
delete compilerOptions.types;
|
||||
delete tsConfig.extends;
|
||||
tsConfig.exclude = [];
|
||||
|
||||
options.compilerOptions = compilerOptions;
|
||||
|
||||
let result = tss.shake(options);
|
||||
for (let fileName in result) {
|
||||
if (result.hasOwnProperty(fileName)) {
|
||||
writeFile(path.join(options.destRoot, fileName), result[fileName]);
|
||||
}
|
||||
}
|
||||
let copied: { [fileName: string]: boolean; } = {};
|
||||
let copied: { [fileName:string]: boolean; } = {};
|
||||
const copyFile = (fileName: string) => {
|
||||
if (copied[fileName]) {
|
||||
return;
|
||||
@@ -67,7 +48,7 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
|
||||
const dstPath = path.join(options.destRoot, fileName);
|
||||
writeFile(dstPath, fs.readFileSync(srcPath));
|
||||
};
|
||||
const writeOutputFile = (fileName: string, contents: string | Buffer) => {
|
||||
const writeOutputFile = (fileName: string, contents: string) => {
|
||||
writeFile(path.join(options.destRoot, fileName), contents);
|
||||
};
|
||||
for (let fileName in result) {
|
||||
@@ -99,7 +80,8 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
|
||||
}
|
||||
}
|
||||
|
||||
delete tsConfig.compilerOptions.moduleResolution;
|
||||
const tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.json')).toString());
|
||||
tsConfig.compilerOptions.noUnusedLocals = false;
|
||||
writeOutputFile('tsconfig.json', JSON.stringify(tsConfig, null, '\t'));
|
||||
|
||||
[
|
||||
@@ -107,179 +89,203 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
|
||||
'vs/css.d.ts',
|
||||
'vs/css.js',
|
||||
'vs/loader.js',
|
||||
'vs/monaco.d.ts',
|
||||
'vs/nls.build.js',
|
||||
'vs/nls.d.ts',
|
||||
'vs/nls.js',
|
||||
'vs/nls.mock.ts',
|
||||
'typings/lib.ie11_safe_es6.d.ts',
|
||||
'typings/thenable.d.ts',
|
||||
'typings/es6-promise.d.ts',
|
||||
'typings/require.d.ts',
|
||||
].forEach(copyFile);
|
||||
}
|
||||
|
||||
export interface IOptions2 {
|
||||
srcFolder: string;
|
||||
export interface IOptions {
|
||||
entryPoints: string[];
|
||||
outFolder: string;
|
||||
outResourcesFolder: string;
|
||||
ignores: string[];
|
||||
renames: { [filename: string]: string; };
|
||||
redirects: { [module: string]: string; };
|
||||
}
|
||||
|
||||
export function createESMSourcesAndResources2(options: IOptions2): void {
|
||||
const SRC_FOLDER = path.join(REPO_ROOT, options.srcFolder);
|
||||
export function createESMSourcesAndResources(options: IOptions): void {
|
||||
const OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
|
||||
const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
|
||||
|
||||
const getDestAbsoluteFilePath = (file: string): string => {
|
||||
let dest = options.renames[file.replace(/\\/g, '/')] || file;
|
||||
if (dest === 'tsconfig.json') {
|
||||
return path.join(OUT_FOLDER, `tsconfig.json`);
|
||||
let in_queue: { [module: string]: boolean; } = Object.create(null);
|
||||
let queue: string[] = [];
|
||||
|
||||
const enqueue = (module: string) => {
|
||||
if (in_queue[module]) {
|
||||
return;
|
||||
}
|
||||
if (/\.ts$/.test(dest)) {
|
||||
return path.join(OUT_FOLDER, dest);
|
||||
}
|
||||
return path.join(OUT_RESOURCES_FOLDER, dest);
|
||||
in_queue[module] = true;
|
||||
queue.push(module);
|
||||
};
|
||||
|
||||
const allFiles = walkDirRecursive(SRC_FOLDER);
|
||||
for (let i = 0; i < allFiles.length; i++) {
|
||||
const file = allFiles[i];
|
||||
|
||||
if (options.ignores.indexOf(file.replace(/\\/g, '/')) >= 0) {
|
||||
continue;
|
||||
const seenDir: { [key: string]: boolean; } = {};
|
||||
const createDirectoryRecursive = (dir: string) => {
|
||||
if (seenDir[dir]) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (file === 'tsconfig.json') {
|
||||
const tsConfig = JSON.parse(fs.readFileSync(path.join(SRC_FOLDER, file)).toString());
|
||||
tsConfig.compilerOptions.module = 'es6';
|
||||
tsConfig.compilerOptions.outDir = path.join(path.relative(OUT_FOLDER, OUT_RESOURCES_FOLDER), 'vs').replace(/\\/g, '/');
|
||||
write(getDestAbsoluteFilePath(file), JSON.stringify(tsConfig, null, '\t'));
|
||||
continue;
|
||||
let lastSlash = dir.lastIndexOf('/');
|
||||
if (lastSlash === -1) {
|
||||
lastSlash = dir.lastIndexOf('\\');
|
||||
}
|
||||
|
||||
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file)) {
|
||||
// Transport the files directly
|
||||
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
|
||||
continue;
|
||||
if (lastSlash !== -1) {
|
||||
createDirectoryRecursive(dir.substring(0, lastSlash));
|
||||
}
|
||||
seenDir[dir] = true;
|
||||
try { fs.mkdirSync(dir); } catch (err) { }
|
||||
};
|
||||
|
||||
if (/\.ts$/.test(file)) {
|
||||
// Transform the .ts file
|
||||
let fileContents = fs.readFileSync(path.join(SRC_FOLDER, file)).toString();
|
||||
seenDir[REPO_ROOT] = true;
|
||||
|
||||
const info = ts.preProcessFile(fileContents);
|
||||
const toggleComments = (fileContents: string) => {
|
||||
let lines = fileContents.split(/\r\n|\r|\n/);
|
||||
let mode = 0;
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
|
||||
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
|
||||
const importedFilename = info.importedFiles[i].fileName;
|
||||
const pos = info.importedFiles[i].pos;
|
||||
const end = info.importedFiles[i].end;
|
||||
|
||||
let importedFilepath: string;
|
||||
if (/^vs\/css!/.test(importedFilename)) {
|
||||
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css';
|
||||
} else {
|
||||
importedFilepath = importedFilename;
|
||||
if (mode === 0) {
|
||||
if (/\/\/ ESM-comment-begin/.test(line)) {
|
||||
mode = 1;
|
||||
continue;
|
||||
}
|
||||
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) {
|
||||
importedFilepath = path.join(path.dirname(file), importedFilepath);
|
||||
if (/\/\/ ESM-uncomment-begin/.test(line)) {
|
||||
mode = 2;
|
||||
continue;
|
||||
}
|
||||
|
||||
let relativePath: string;
|
||||
if (importedFilepath === path.dirname(file).replace(/\\/g, '/')) {
|
||||
relativePath = '../' + path.basename(path.dirname(file));
|
||||
} else if (importedFilepath === path.dirname(path.dirname(file)).replace(/\\/g, '/')) {
|
||||
relativePath = '../../' + path.basename(path.dirname(path.dirname(file)));
|
||||
} else {
|
||||
relativePath = path.relative(path.dirname(file), importedFilepath);
|
||||
}
|
||||
relativePath = relativePath.replace(/\\/g, '/');
|
||||
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
|
||||
relativePath = './' + relativePath;
|
||||
}
|
||||
fileContents = (
|
||||
fileContents.substring(0, pos + 1)
|
||||
+ relativePath
|
||||
+ fileContents.substring(end + 1)
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) {
|
||||
return `import * as ${m1} from ${m2};`;
|
||||
});
|
||||
if (mode === 1) {
|
||||
if (/\/\/ ESM-comment-end/.test(line)) {
|
||||
mode = 0;
|
||||
continue;
|
||||
}
|
||||
lines[i] = '// ' + line;
|
||||
continue;
|
||||
}
|
||||
|
||||
write(getDestAbsoluteFilePath(file), fileContents);
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(`UNKNOWN FILE: ${file}`);
|
||||
}
|
||||
|
||||
|
||||
function walkDirRecursive(dir: string): string[] {
|
||||
if (dir.charAt(dir.length - 1) !== '/' || dir.charAt(dir.length - 1) !== '\\') {
|
||||
dir += '/';
|
||||
}
|
||||
let result: string[] = [];
|
||||
_walkDirRecursive(dir, result, dir.length);
|
||||
return result;
|
||||
}
|
||||
|
||||
function _walkDirRecursive(dir: string, result: string[], trimPos: number): void {
|
||||
const files = fs.readdirSync(dir);
|
||||
for (let i = 0; i < files.length; i++) {
|
||||
const file = path.join(dir, files[i]);
|
||||
if (fs.statSync(file).isDirectory()) {
|
||||
_walkDirRecursive(file, result, trimPos);
|
||||
} else {
|
||||
result.push(file.substr(trimPos));
|
||||
if (mode === 2) {
|
||||
if (/\/\/ ESM-uncomment-end/.test(line)) {
|
||||
mode = 0;
|
||||
continue;
|
||||
}
|
||||
lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) {
|
||||
return indent;
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function write(absoluteFilePath: string, contents: string | Buffer): void {
|
||||
if (/(\.ts$)|(\.js$)/.test(absoluteFilePath)) {
|
||||
return lines.join('\n');
|
||||
};
|
||||
|
||||
const write = (filePath: string, contents: string | Buffer) => {
|
||||
let absoluteFilePath: string;
|
||||
if (/\.ts$/.test(filePath)) {
|
||||
absoluteFilePath = path.join(OUT_FOLDER, filePath);
|
||||
} else {
|
||||
absoluteFilePath = path.join(OUT_RESOURCES_FOLDER, filePath);
|
||||
}
|
||||
createDirectoryRecursive(path.dirname(absoluteFilePath));
|
||||
if (/(\.ts$)|(\.js$)/.test(filePath)) {
|
||||
contents = toggleComments(contents.toString());
|
||||
}
|
||||
writeFile(absoluteFilePath, contents);
|
||||
fs.writeFileSync(absoluteFilePath, contents);
|
||||
};
|
||||
|
||||
function toggleComments(fileContents: string): string {
|
||||
let lines = fileContents.split(/\r\n|\r|\n/);
|
||||
let mode = 0;
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
options.entryPoints.forEach((entryPoint) => enqueue(entryPoint));
|
||||
|
||||
if (mode === 0) {
|
||||
if (/\/\/ ESM-comment-begin/.test(line)) {
|
||||
mode = 1;
|
||||
continue;
|
||||
}
|
||||
if (/\/\/ ESM-uncomment-begin/.test(line)) {
|
||||
mode = 2;
|
||||
continue;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
while (queue.length > 0) {
|
||||
const module = queue.shift();
|
||||
if (transportCSS(module, enqueue, write)) {
|
||||
continue;
|
||||
}
|
||||
if (transportResource(options, module, enqueue, write)) {
|
||||
continue;
|
||||
}
|
||||
if (transportDTS(options, module, enqueue, write)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (mode === 1) {
|
||||
if (/\/\/ ESM-comment-end/.test(line)) {
|
||||
mode = 0;
|
||||
continue;
|
||||
}
|
||||
lines[i] = '// ' + line;
|
||||
continue;
|
||||
}
|
||||
let filename: string;
|
||||
if (options.redirects[module]) {
|
||||
filename = path.join(SRC_DIR, options.redirects[module] + '.ts');
|
||||
} else {
|
||||
filename = path.join(SRC_DIR, module + '.ts');
|
||||
}
|
||||
let fileContents = fs.readFileSync(filename).toString();
|
||||
|
||||
if (mode === 2) {
|
||||
if (/\/\/ ESM-uncomment-end/.test(line)) {
|
||||
mode = 0;
|
||||
continue;
|
||||
}
|
||||
lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) {
|
||||
return indent;
|
||||
});
|
||||
}
|
||||
const info = ts.preProcessFile(fileContents);
|
||||
|
||||
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
|
||||
const importedFilename = info.importedFiles[i].fileName;
|
||||
const pos = info.importedFiles[i].pos;
|
||||
const end = info.importedFiles[i].end;
|
||||
|
||||
let importedFilepath: string;
|
||||
if (/^vs\/css!/.test(importedFilename)) {
|
||||
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css';
|
||||
} else {
|
||||
importedFilepath = importedFilename;
|
||||
}
|
||||
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) {
|
||||
importedFilepath = path.join(path.dirname(module), importedFilepath);
|
||||
}
|
||||
|
||||
return lines.join('\n');
|
||||
enqueue(importedFilepath);
|
||||
|
||||
let relativePath: string;
|
||||
if (importedFilepath === path.dirname(module)) {
|
||||
relativePath = '../' + path.basename(path.dirname(module));
|
||||
} else if (importedFilepath === path.dirname(path.dirname(module))) {
|
||||
relativePath = '../../' + path.basename(path.dirname(path.dirname(module)));
|
||||
} else {
|
||||
relativePath = path.relative(path.dirname(module), importedFilepath);
|
||||
}
|
||||
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
|
||||
relativePath = './' + relativePath;
|
||||
}
|
||||
fileContents = (
|
||||
fileContents.substring(0, pos + 1)
|
||||
+ relativePath
|
||||
+ fileContents.substring(end + 1)
|
||||
);
|
||||
}
|
||||
|
||||
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) {
|
||||
return `import * as ${m1} from ${m2};`;
|
||||
});
|
||||
fileContents = fileContents.replace(/Thenable/g, 'PromiseLike');
|
||||
|
||||
write(module + '.ts', fileContents);
|
||||
}
|
||||
|
||||
const esm_opts = {
|
||||
"compilerOptions": {
|
||||
"outDir": path.relative(path.dirname(OUT_FOLDER), OUT_RESOURCES_FOLDER),
|
||||
"rootDir": "src",
|
||||
"module": "es6",
|
||||
"target": "es5",
|
||||
"experimentalDecorators": true,
|
||||
"lib": [
|
||||
"dom",
|
||||
"es5",
|
||||
"es2015.collection",
|
||||
"es2015.promise"
|
||||
],
|
||||
"types": [
|
||||
]
|
||||
}
|
||||
};
|
||||
fs.writeFileSync(path.join(path.dirname(OUT_FOLDER), 'tsconfig.json'), JSON.stringify(esm_opts, null, '\t'));
|
||||
|
||||
const monacodts = fs.readFileSync(path.join(SRC_DIR, 'vs/monaco.d.ts')).toString();
|
||||
fs.writeFileSync(path.join(OUT_FOLDER, 'vs/monaco.d.ts'), monacodts);
|
||||
|
||||
}
|
||||
|
||||
function transportCSS(module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
|
||||
@@ -331,7 +337,7 @@ function transportCSS(module: string, enqueue: (module: string) => void, write:
|
||||
function _replaceURL(contents: string, replacer: (url: string) => string): string {
|
||||
// Use ")" as the terminator as quotes are oftentimes not used at all
|
||||
return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, (_: string, ...matches: string[]) => {
|
||||
let url = matches[0];
|
||||
var url = matches[0];
|
||||
// Eliminate starting quotes (the initial whitespace is not captured)
|
||||
if (url.charAt(0) === '"' || url.charAt(0) === '\'') {
|
||||
url = url.substring(1);
|
||||
@@ -357,3 +363,33 @@ function transportCSS(module: string, enqueue: (module: string) => void, write:
|
||||
return haystack.length >= needle.length && haystack.substr(0, needle.length) === needle;
|
||||
}
|
||||
}
|
||||
|
||||
function transportResource(options: IOptions, module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
|
||||
|
||||
if (!/\.svg/.test(module)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
write(module, fs.readFileSync(path.join(SRC_DIR, module)));
|
||||
return true;
|
||||
}
|
||||
|
||||
function transportDTS(options: IOptions, module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
|
||||
|
||||
if (options.redirects[module] && fs.existsSync(path.join(SRC_DIR, options.redirects[module] + '.ts'))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!fs.existsSync(path.join(SRC_DIR, module + '.d.ts'))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
write(module + '.d.ts', fs.readFileSync(path.join(SRC_DIR, module + '.d.ts')));
|
||||
let filename: string;
|
||||
if (options.redirects[module]) {
|
||||
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, options.redirects[module] + '.js')));
|
||||
} else {
|
||||
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, module + '.js')));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -1,135 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const es = require("event-stream");
|
||||
const util = require("gulp-util");
|
||||
const appInsights = require("applicationinsights");
|
||||
class Entry {
|
||||
constructor(name, totalCount, totalSize) {
|
||||
this.name = name;
|
||||
this.totalCount = totalCount;
|
||||
this.totalSize = totalSize;
|
||||
}
|
||||
toString(pretty) {
|
||||
if (!pretty) {
|
||||
if (this.totalCount === 1) {
|
||||
return `${this.name}: ${this.totalSize} bytes`;
|
||||
}
|
||||
else {
|
||||
return `${this.name}: ${this.totalCount} files with ${this.totalSize} bytes`;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (this.totalCount === 1) {
|
||||
return `Stats for '${util.colors.grey(this.name)}': ${Math.round(this.totalSize / 1204)}KB`;
|
||||
}
|
||||
else {
|
||||
const count = this.totalCount < 100
|
||||
? util.colors.green(this.totalCount.toString())
|
||||
: util.colors.red(this.totalCount.toString());
|
||||
return `Stats for '${util.colors.grey(this.name)}': ${count} files, ${Math.round(this.totalSize / 1204)}KB`;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
const _entries = new Map();
|
||||
function createStatsStream(group, log) {
|
||||
const entry = new Entry(group, 0, 0);
|
||||
_entries.set(entry.name, entry);
|
||||
return es.through(function (data) {
|
||||
const file = data;
|
||||
if (typeof file.path === 'string') {
|
||||
entry.totalCount += 1;
|
||||
if (Buffer.isBuffer(file.contents)) {
|
||||
entry.totalSize += file.contents.length;
|
||||
}
|
||||
else if (file.stat && typeof file.stat.size === 'number') {
|
||||
entry.totalSize += file.stat.size;
|
||||
}
|
||||
else {
|
||||
// funky file...
|
||||
}
|
||||
}
|
||||
this.emit('data', data);
|
||||
}, function () {
|
||||
if (log) {
|
||||
if (entry.totalCount === 1) {
|
||||
util.log(`Stats for '${util.colors.grey(entry.name)}': ${Math.round(entry.totalSize / 1204)}KB`);
|
||||
}
|
||||
else {
|
||||
const count = entry.totalCount < 100
|
||||
? util.colors.green(entry.totalCount.toString())
|
||||
: util.colors.red(entry.totalCount.toString());
|
||||
util.log(`Stats for '${util.colors.grey(entry.name)}': ${count} files, ${Math.round(entry.totalSize / 1204)}KB`);
|
||||
}
|
||||
}
|
||||
this.emit('end');
|
||||
});
|
||||
}
|
||||
exports.createStatsStream = createStatsStream;
|
||||
function submitAllStats(productJson, commit) {
|
||||
const sorted = [];
|
||||
// move entries for single files to the front
|
||||
_entries.forEach(value => {
|
||||
if (value.totalCount === 1) {
|
||||
sorted.unshift(value);
|
||||
}
|
||||
else {
|
||||
sorted.push(value);
|
||||
}
|
||||
});
|
||||
// print to console
|
||||
for (const entry of sorted) {
|
||||
console.log(entry.toString(true));
|
||||
}
|
||||
// send data as telementry event when the
|
||||
// product is configured to send telemetry
|
||||
if (!productJson || !productJson.aiConfig || typeof productJson.aiConfig.asimovKey !== 'string') {
|
||||
return Promise.resolve(false);
|
||||
}
|
||||
return new Promise(resolve => {
|
||||
try {
|
||||
const sizes = {};
|
||||
const counts = {};
|
||||
for (const entry of sorted) {
|
||||
sizes[entry.name] = entry.totalSize;
|
||||
counts[entry.name] = entry.totalCount;
|
||||
}
|
||||
appInsights.setup(productJson.aiConfig.asimovKey)
|
||||
.setAutoCollectConsole(false)
|
||||
.setAutoCollectExceptions(false)
|
||||
.setAutoCollectPerformance(false)
|
||||
.setAutoCollectRequests(false)
|
||||
.setAutoCollectDependencies(false)
|
||||
.setAutoDependencyCorrelation(false)
|
||||
.start();
|
||||
appInsights.defaultClient.config.endpointUrl = 'https://vortex.data.microsoft.com/collect/v1';
|
||||
/* __GDPR__
|
||||
"monacoworkbench/packagemetrics" : {
|
||||
"commit" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"size" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"count" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" }
|
||||
}
|
||||
*/
|
||||
appInsights.defaultClient.trackEvent({
|
||||
name: 'monacoworkbench/packagemetrics',
|
||||
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
|
||||
});
|
||||
appInsights.defaultClient.flush({
|
||||
callback: () => {
|
||||
appInsights.dispose();
|
||||
resolve(true);
|
||||
}
|
||||
});
|
||||
}
|
||||
catch (err) {
|
||||
console.error('ERROR sending build stats as telemetry event!');
|
||||
console.error(err);
|
||||
resolve(false);
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.submitAllStats = submitAllStats;
|
||||
@@ -1,147 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as es from 'event-stream';
|
||||
import * as util from 'gulp-util';
|
||||
import * as File from 'vinyl';
|
||||
import * as appInsights from 'applicationinsights';
|
||||
|
||||
class Entry {
|
||||
constructor(readonly name: string, public totalCount: number, public totalSize: number) { }
|
||||
|
||||
toString(pretty?: boolean): string {
|
||||
if (!pretty) {
|
||||
if (this.totalCount === 1) {
|
||||
return `${this.name}: ${this.totalSize} bytes`;
|
||||
} else {
|
||||
return `${this.name}: ${this.totalCount} files with ${this.totalSize} bytes`;
|
||||
}
|
||||
} else {
|
||||
if (this.totalCount === 1) {
|
||||
return `Stats for '${util.colors.grey(this.name)}': ${Math.round(this.totalSize / 1204)}KB`;
|
||||
|
||||
} else {
|
||||
const count = this.totalCount < 100
|
||||
? util.colors.green(this.totalCount.toString())
|
||||
: util.colors.red(this.totalCount.toString());
|
||||
|
||||
return `Stats for '${util.colors.grey(this.name)}': ${count} files, ${Math.round(this.totalSize / 1204)}KB`;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const _entries = new Map<string, Entry>();
|
||||
|
||||
export function createStatsStream(group: string, log?: boolean): es.ThroughStream {
|
||||
|
||||
const entry = new Entry(group, 0, 0);
|
||||
_entries.set(entry.name, entry);
|
||||
|
||||
return es.through(function (data) {
|
||||
const file = data as File;
|
||||
if (typeof file.path === 'string') {
|
||||
entry.totalCount += 1;
|
||||
if (Buffer.isBuffer(file.contents)) {
|
||||
entry.totalSize += file.contents.length;
|
||||
} else if (file.stat && typeof file.stat.size === 'number') {
|
||||
entry.totalSize += file.stat.size;
|
||||
} else {
|
||||
// funky file...
|
||||
}
|
||||
}
|
||||
this.emit('data', data);
|
||||
}, function () {
|
||||
if (log) {
|
||||
if (entry.totalCount === 1) {
|
||||
util.log(`Stats for '${util.colors.grey(entry.name)}': ${Math.round(entry.totalSize / 1204)}KB`);
|
||||
|
||||
} else {
|
||||
const count = entry.totalCount < 100
|
||||
? util.colors.green(entry.totalCount.toString())
|
||||
: util.colors.red(entry.totalCount.toString());
|
||||
|
||||
util.log(`Stats for '${util.colors.grey(entry.name)}': ${count} files, ${Math.round(entry.totalSize / 1204)}KB`);
|
||||
}
|
||||
}
|
||||
|
||||
this.emit('end');
|
||||
});
|
||||
}
|
||||
|
||||
export function submitAllStats(productJson: any, commit: string): Promise<boolean> {
|
||||
|
||||
const sorted: Entry[] = [];
|
||||
// move entries for single files to the front
|
||||
_entries.forEach(value => {
|
||||
if (value.totalCount === 1) {
|
||||
sorted.unshift(value);
|
||||
} else {
|
||||
sorted.push(value);
|
||||
}
|
||||
});
|
||||
|
||||
// print to console
|
||||
for (const entry of sorted) {
|
||||
console.log(entry.toString(true));
|
||||
}
|
||||
|
||||
// send data as telementry event when the
|
||||
// product is configured to send telemetry
|
||||
if (!productJson || !productJson.aiConfig || typeof productJson.aiConfig.asimovKey !== 'string') {
|
||||
return Promise.resolve(false);
|
||||
}
|
||||
|
||||
return new Promise(resolve => {
|
||||
try {
|
||||
|
||||
const sizes: any = {};
|
||||
const counts: any = {};
|
||||
for (const entry of sorted) {
|
||||
sizes[entry.name] = entry.totalSize;
|
||||
counts[entry.name] = entry.totalCount;
|
||||
}
|
||||
|
||||
appInsights.setup(productJson.aiConfig.asimovKey)
|
||||
.setAutoCollectConsole(false)
|
||||
.setAutoCollectExceptions(false)
|
||||
.setAutoCollectPerformance(false)
|
||||
.setAutoCollectRequests(false)
|
||||
.setAutoCollectDependencies(false)
|
||||
.setAutoDependencyCorrelation(false)
|
||||
.start();
|
||||
|
||||
appInsights.defaultClient.config.endpointUrl = 'https://vortex.data.microsoft.com/collect/v1';
|
||||
|
||||
/* __GDPR__
|
||||
"monacoworkbench/packagemetrics" : {
|
||||
"commit" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"size" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
|
||||
"count" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" }
|
||||
}
|
||||
*/
|
||||
appInsights.defaultClient.trackEvent({
|
||||
name: 'monacoworkbench/packagemetrics',
|
||||
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
|
||||
});
|
||||
|
||||
|
||||
appInsights.defaultClient.flush({
|
||||
callback: () => {
|
||||
appInsights.dispose();
|
||||
resolve(true);
|
||||
}
|
||||
});
|
||||
|
||||
} catch (err) {
|
||||
console.error('ERROR sending build stats as telemetry event!');
|
||||
console.error(err);
|
||||
resolve(false);
|
||||
}
|
||||
});
|
||||
|
||||
}
|
||||
@@ -4,30 +4,30 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const assert = require("assert");
|
||||
const i18n = require("../i18n");
|
||||
suite('XLF Parser Tests', () => {
|
||||
const sampleXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &</source></trans-unit></body></file></xliff>';
|
||||
const sampleTranslatedXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" target-language="ru" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source><target>Кнопка #1</target></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &</source><target>Кнопка #2 &</target></trans-unit></body></file></xliff>';
|
||||
const originalFilePath = 'vs/base/common/keybinding';
|
||||
const keys = ['key1', 'key2'];
|
||||
const messages = ['Key #1', 'Key #2 &'];
|
||||
const translatedMessages = { key1: 'Кнопка #1', key2: 'Кнопка #2 &' };
|
||||
test('Keys & messages to XLF conversion', () => {
|
||||
const xlf = new i18n.XLF('vscode-workbench');
|
||||
var assert = require("assert");
|
||||
var i18n = require("../i18n");
|
||||
suite('XLF Parser Tests', function () {
|
||||
var sampleXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &</source></trans-unit></body></file></xliff>';
|
||||
var sampleTranslatedXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" target-language="ru" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source><target>Кнопка #1</target></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &</source><target>Кнопка #2 &</target></trans-unit></body></file></xliff>';
|
||||
var originalFilePath = 'vs/base/common/keybinding';
|
||||
var keys = ['key1', 'key2'];
|
||||
var messages = ['Key #1', 'Key #2 &'];
|
||||
var translatedMessages = { key1: 'Кнопка #1', key2: 'Кнопка #2 &' };
|
||||
test('Keys & messages to XLF conversion', function () {
|
||||
var xlf = new i18n.XLF('vscode-workbench');
|
||||
xlf.addFile(originalFilePath, keys, messages);
|
||||
const xlfString = xlf.toString();
|
||||
var xlfString = xlf.toString();
|
||||
assert.strictEqual(xlfString.replace(/\s{2,}/g, ''), sampleXlf);
|
||||
});
|
||||
test('XLF to keys & messages conversion', () => {
|
||||
test('XLF to keys & messages conversion', function () {
|
||||
i18n.XLF.parse(sampleTranslatedXlf).then(function (resolvedFiles) {
|
||||
assert.deepEqual(resolvedFiles[0].messages, translatedMessages);
|
||||
assert.strictEqual(resolvedFiles[0].originalFilePath, originalFilePath);
|
||||
});
|
||||
});
|
||||
test('JSON file source path to Transifex resource match', () => {
|
||||
const editorProject = 'vscode-editor', workbenchProject = 'vscode-workbench';
|
||||
const platform = { name: 'vs/platform', project: editorProject }, editorContrib = { name: 'vs/editor/contrib', project: editorProject }, editor = { name: 'vs/editor', project: editorProject }, base = { name: 'vs/base', project: editorProject }, code = { name: 'vs/code', project: workbenchProject }, workbenchParts = { name: 'vs/workbench/parts/html', project: workbenchProject }, workbenchServices = { name: 'vs/workbench/services/files', project: workbenchProject }, workbench = { name: 'vs/workbench', project: workbenchProject };
|
||||
test('JSON file source path to Transifex resource match', function () {
|
||||
var editorProject = 'vscode-editor', workbenchProject = 'vscode-workbench';
|
||||
var platform = { name: 'vs/platform', project: editorProject }, editorContrib = { name: 'vs/editor/contrib', project: editorProject }, editor = { name: 'vs/editor', project: editorProject }, base = { name: 'vs/base', project: editorProject }, code = { name: 'vs/code', project: workbenchProject }, workbenchParts = { name: 'vs/workbench/parts/html', project: workbenchProject }, workbenchServices = { name: 'vs/workbench/services/files', project: workbenchProject }, workbench = { name: 'vs/workbench', project: workbenchProject };
|
||||
assert.deepEqual(i18n.getResource('vs/platform/actions/browser/menusExtensionPoint'), platform);
|
||||
assert.deepEqual(i18n.getResource('vs/editor/contrib/clipboard/browser/clipboard'), editorContrib);
|
||||
assert.deepEqual(i18n.getResource('vs/editor/common/modes/modesRegistry'), editor);
|
||||
|
||||
@@ -15,7 +15,7 @@ suite('XLF Parser Tests', () => {
|
||||
const translatedMessages = { key1: 'Кнопка #1', key2: 'Кнопка #2 &' };
|
||||
|
||||
test('Keys & messages to XLF conversion', () => {
|
||||
const xlf = new i18n.XLF('vscode-workbench');
|
||||
let xlf = new i18n.XLF('vscode-workbench');
|
||||
xlf.addFile(originalFilePath, keys, messages);
|
||||
const xlfString = xlf.toString();
|
||||
|
||||
|
||||
@@ -4,49 +4,18 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const ts = require("typescript");
|
||||
const TYPESCRIPT_LIB_FOLDER = path.dirname(require.resolve('typescript/lib/lib.d.ts'));
|
||||
var fs = require("fs");
|
||||
var path = require("path");
|
||||
var ts = require("typescript");
|
||||
var TYPESCRIPT_LIB_FOLDER = path.dirname(require.resolve('typescript/lib/lib.d.ts'));
|
||||
var ShakeLevel;
|
||||
(function (ShakeLevel) {
|
||||
ShakeLevel[ShakeLevel["Files"] = 0] = "Files";
|
||||
ShakeLevel[ShakeLevel["InnerFile"] = 1] = "InnerFile";
|
||||
ShakeLevel[ShakeLevel["ClassMembers"] = 2] = "ClassMembers";
|
||||
})(ShakeLevel = exports.ShakeLevel || (exports.ShakeLevel = {}));
|
||||
function printDiagnostics(diagnostics) {
|
||||
for (let i = 0; i < diagnostics.length; i++) {
|
||||
const diag = diagnostics[i];
|
||||
let result = '';
|
||||
if (diag.file) {
|
||||
result += `${diag.file.fileName}: `;
|
||||
}
|
||||
if (diag.file && diag.start) {
|
||||
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
|
||||
result += `- ${location.line + 1},${location.character} - `;
|
||||
}
|
||||
result += JSON.stringify(diag.messageText);
|
||||
console.log(result);
|
||||
}
|
||||
}
|
||||
function shake(options) {
|
||||
const languageService = createTypeScriptLanguageService(options);
|
||||
const program = languageService.getProgram();
|
||||
const globalDiagnostics = program.getGlobalDiagnostics();
|
||||
if (globalDiagnostics.length > 0) {
|
||||
printDiagnostics(globalDiagnostics);
|
||||
throw new Error(`Compilation Errors encountered.`);
|
||||
}
|
||||
const syntacticDiagnostics = program.getSyntacticDiagnostics();
|
||||
if (syntacticDiagnostics.length > 0) {
|
||||
printDiagnostics(syntacticDiagnostics);
|
||||
throw new Error(`Compilation Errors encountered.`);
|
||||
}
|
||||
const semanticDiagnostics = program.getSemanticDiagnostics();
|
||||
if (semanticDiagnostics.length > 0) {
|
||||
printDiagnostics(semanticDiagnostics);
|
||||
throw new Error(`Compilation Errors encountered.`);
|
||||
}
|
||||
var languageService = createTypeScriptLanguageService(options);
|
||||
markNodes(languageService, options);
|
||||
return generateResult(languageService, options.shakeLevel);
|
||||
}
|
||||
@@ -54,99 +23,93 @@ exports.shake = shake;
|
||||
//#region Discovery, LanguageService & Setup
|
||||
function createTypeScriptLanguageService(options) {
|
||||
// Discover referenced files
|
||||
const FILES = discoverAndReadFiles(options);
|
||||
var FILES = discoverAndReadFiles(options);
|
||||
// Add fake usage files
|
||||
options.inlineEntryPoints.forEach((inlineEntryPoint, index) => {
|
||||
FILES[`inlineEntryPoint.${index}.ts`] = inlineEntryPoint;
|
||||
});
|
||||
// Add additional typings
|
||||
options.typings.forEach((typing) => {
|
||||
const filePath = path.join(options.sourcesRoot, typing);
|
||||
FILES[typing] = fs.readFileSync(filePath).toString();
|
||||
options.inlineEntryPoints.forEach(function (inlineEntryPoint, index) {
|
||||
FILES["inlineEntryPoint:" + index + ".ts"] = inlineEntryPoint;
|
||||
});
|
||||
// Resolve libs
|
||||
const RESOLVED_LIBS = {};
|
||||
options.libs.forEach((filename) => {
|
||||
const filepath = path.join(TYPESCRIPT_LIB_FOLDER, filename);
|
||||
RESOLVED_LIBS[`defaultLib:${filename}`] = fs.readFileSync(filepath).toString();
|
||||
var RESOLVED_LIBS = {};
|
||||
options.libs.forEach(function (filename) {
|
||||
var filepath = path.join(TYPESCRIPT_LIB_FOLDER, filename);
|
||||
RESOLVED_LIBS["defaultLib:" + filename] = fs.readFileSync(filepath).toString();
|
||||
});
|
||||
const compilerOptions = ts.convertCompilerOptionsFromJson(options.compilerOptions, options.sourcesRoot).options;
|
||||
const host = new TypeScriptLanguageServiceHost(RESOLVED_LIBS, FILES, compilerOptions);
|
||||
var host = new TypeScriptLanguageServiceHost(RESOLVED_LIBS, FILES, options.compilerOptions);
|
||||
return ts.createLanguageService(host);
|
||||
}
|
||||
/**
|
||||
* Read imports and follow them until all files have been handled
|
||||
*/
|
||||
function discoverAndReadFiles(options) {
|
||||
const FILES = {};
|
||||
const in_queue = Object.create(null);
|
||||
const queue = [];
|
||||
const enqueue = (moduleId) => {
|
||||
var FILES = {};
|
||||
var in_queue = Object.create(null);
|
||||
var queue = [];
|
||||
var enqueue = function (moduleId) {
|
||||
if (in_queue[moduleId]) {
|
||||
return;
|
||||
}
|
||||
in_queue[moduleId] = true;
|
||||
queue.push(moduleId);
|
||||
};
|
||||
options.entryPoints.forEach((entryPoint) => enqueue(entryPoint));
|
||||
options.entryPoints.forEach(function (entryPoint) { return enqueue(entryPoint); });
|
||||
while (queue.length > 0) {
|
||||
const moduleId = queue.shift();
|
||||
const dts_filename = path.join(options.sourcesRoot, moduleId + '.d.ts');
|
||||
var moduleId = queue.shift();
|
||||
var dts_filename = path.join(options.sourcesRoot, moduleId + '.d.ts');
|
||||
if (fs.existsSync(dts_filename)) {
|
||||
const dts_filecontents = fs.readFileSync(dts_filename).toString();
|
||||
FILES[`${moduleId}.d.ts`] = dts_filecontents;
|
||||
var dts_filecontents = fs.readFileSync(dts_filename).toString();
|
||||
FILES[moduleId + '.d.ts'] = dts_filecontents;
|
||||
continue;
|
||||
}
|
||||
let ts_filename;
|
||||
var ts_filename = void 0;
|
||||
if (options.redirects[moduleId]) {
|
||||
ts_filename = path.join(options.sourcesRoot, options.redirects[moduleId] + '.ts');
|
||||
}
|
||||
else {
|
||||
ts_filename = path.join(options.sourcesRoot, moduleId + '.ts');
|
||||
}
|
||||
const ts_filecontents = fs.readFileSync(ts_filename).toString();
|
||||
const info = ts.preProcessFile(ts_filecontents);
|
||||
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
|
||||
const importedFileName = info.importedFiles[i].fileName;
|
||||
var ts_filecontents = fs.readFileSync(ts_filename).toString();
|
||||
var info = ts.preProcessFile(ts_filecontents);
|
||||
for (var i = info.importedFiles.length - 1; i >= 0; i--) {
|
||||
var importedFileName = info.importedFiles[i].fileName;
|
||||
if (options.importIgnorePattern.test(importedFileName)) {
|
||||
// Ignore vs/css! imports
|
||||
continue;
|
||||
}
|
||||
let importedModuleId = importedFileName;
|
||||
var importedModuleId = importedFileName;
|
||||
if (/(^\.\/)|(^\.\.\/)/.test(importedModuleId)) {
|
||||
importedModuleId = path.join(path.dirname(moduleId), importedModuleId);
|
||||
}
|
||||
enqueue(importedModuleId);
|
||||
}
|
||||
FILES[`${moduleId}.ts`] = ts_filecontents;
|
||||
FILES[moduleId + '.ts'] = ts_filecontents;
|
||||
}
|
||||
return FILES;
|
||||
}
|
||||
/**
|
||||
* A TypeScript language service host
|
||||
*/
|
||||
class TypeScriptLanguageServiceHost {
|
||||
constructor(libs, files, compilerOptions) {
|
||||
var TypeScriptLanguageServiceHost = /** @class */ (function () {
|
||||
function TypeScriptLanguageServiceHost(libs, files, compilerOptions) {
|
||||
this._libs = libs;
|
||||
this._files = files;
|
||||
this._compilerOptions = compilerOptions;
|
||||
}
|
||||
// --- language service host ---------------
|
||||
getCompilationSettings() {
|
||||
TypeScriptLanguageServiceHost.prototype.getCompilationSettings = function () {
|
||||
return this._compilerOptions;
|
||||
}
|
||||
getScriptFileNames() {
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getScriptFileNames = function () {
|
||||
return ([]
|
||||
.concat(Object.keys(this._libs))
|
||||
.concat(Object.keys(this._files)));
|
||||
}
|
||||
getScriptVersion(_fileName) {
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getScriptVersion = function (fileName) {
|
||||
return '1';
|
||||
}
|
||||
getProjectVersion() {
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getProjectVersion = function () {
|
||||
return '1';
|
||||
}
|
||||
getScriptSnapshot(fileName) {
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getScriptSnapshot = function (fileName) {
|
||||
if (this._files.hasOwnProperty(fileName)) {
|
||||
return ts.ScriptSnapshot.fromString(this._files[fileName]);
|
||||
}
|
||||
@@ -156,20 +119,21 @@ class TypeScriptLanguageServiceHost {
|
||||
else {
|
||||
return ts.ScriptSnapshot.fromString('');
|
||||
}
|
||||
}
|
||||
getScriptKind(_fileName) {
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getScriptKind = function (fileName) {
|
||||
return ts.ScriptKind.TS;
|
||||
}
|
||||
getCurrentDirectory() {
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getCurrentDirectory = function () {
|
||||
return '';
|
||||
}
|
||||
getDefaultLibFileName(_options) {
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.getDefaultLibFileName = function (options) {
|
||||
return 'defaultLib:lib.d.ts';
|
||||
}
|
||||
isDefaultLibFileName(fileName) {
|
||||
};
|
||||
TypeScriptLanguageServiceHost.prototype.isDefaultLibFileName = function (fileName) {
|
||||
return fileName === this.getDefaultLibFileName(this._compilerOptions);
|
||||
}
|
||||
}
|
||||
};
|
||||
return TypeScriptLanguageServiceHost;
|
||||
}());
|
||||
//#endregion
|
||||
//#region Tree Shaking
|
||||
var NodeColor;
|
||||
@@ -186,7 +150,7 @@ function setColor(node, color) {
|
||||
}
|
||||
function nodeOrParentIsBlack(node) {
|
||||
while (node) {
|
||||
const color = getColor(node);
|
||||
var color = getColor(node);
|
||||
if (color === 2 /* Black */) {
|
||||
return true;
|
||||
}
|
||||
@@ -198,7 +162,8 @@ function nodeOrChildIsBlack(node) {
|
||||
if (getColor(node) === 2 /* Black */) {
|
||||
return true;
|
||||
}
|
||||
for (const child of node.getChildren()) {
|
||||
for (var _i = 0, _a = node.getChildren(); _i < _a.length; _i++) {
|
||||
var child = _a[_i];
|
||||
if (nodeOrChildIsBlack(child)) {
|
||||
return true;
|
||||
}
|
||||
@@ -206,22 +171,19 @@ function nodeOrChildIsBlack(node) {
|
||||
return false;
|
||||
}
|
||||
function markNodes(languageService, options) {
|
||||
const program = languageService.getProgram();
|
||||
if (!program) {
|
||||
throw new Error('Could not get program from language service');
|
||||
}
|
||||
var program = languageService.getProgram();
|
||||
if (options.shakeLevel === 0 /* Files */) {
|
||||
// Mark all source files Black
|
||||
program.getSourceFiles().forEach((sourceFile) => {
|
||||
program.getSourceFiles().forEach(function (sourceFile) {
|
||||
setColor(sourceFile, 2 /* Black */);
|
||||
});
|
||||
return;
|
||||
}
|
||||
const black_queue = [];
|
||||
const gray_queue = [];
|
||||
const sourceFilesLoaded = {};
|
||||
var black_queue = [];
|
||||
var gray_queue = [];
|
||||
var sourceFilesLoaded = {};
|
||||
function enqueueTopLevelModuleStatements(sourceFile) {
|
||||
sourceFile.forEachChild((node) => {
|
||||
sourceFile.forEachChild(function (node) {
|
||||
if (ts.isImportDeclaration(node)) {
|
||||
if (!node.importClause && ts.isStringLiteral(node.moduleSpecifier)) {
|
||||
setColor(node, 2 /* Black */);
|
||||
@@ -230,7 +192,7 @@ function markNodes(languageService, options) {
|
||||
return;
|
||||
}
|
||||
if (ts.isExportDeclaration(node)) {
|
||||
if (node.moduleSpecifier && ts.isStringLiteral(node.moduleSpecifier)) {
|
||||
if (ts.isStringLiteral(node.moduleSpecifier)) {
|
||||
setColor(node, 2 /* Black */);
|
||||
enqueueImport(node, node.moduleSpecifier.text);
|
||||
}
|
||||
@@ -258,7 +220,7 @@ function markNodes(languageService, options) {
|
||||
gray_queue.push(node);
|
||||
}
|
||||
function enqueue_black(node) {
|
||||
const previousColor = getColor(node);
|
||||
var previousColor = getColor(node);
|
||||
if (previousColor === 2 /* Black */) {
|
||||
return;
|
||||
}
|
||||
@@ -276,12 +238,12 @@ function markNodes(languageService, options) {
|
||||
if (nodeOrParentIsBlack(node)) {
|
||||
return;
|
||||
}
|
||||
const fileName = node.getSourceFile().fileName;
|
||||
var fileName = node.getSourceFile().fileName;
|
||||
if (/^defaultLib:/.test(fileName) || /\.d\.ts$/.test(fileName)) {
|
||||
setColor(node, 2 /* Black */);
|
||||
return;
|
||||
}
|
||||
const sourceFile = node.getSourceFile();
|
||||
var sourceFile = node.getSourceFile();
|
||||
if (!sourceFilesLoaded[sourceFile.fileName]) {
|
||||
sourceFilesLoaded[sourceFile.fileName] = true;
|
||||
enqueueTopLevelModuleStatements(sourceFile);
|
||||
@@ -292,15 +254,12 @@ function markNodes(languageService, options) {
|
||||
setColor(node, 2 /* Black */);
|
||||
black_queue.push(node);
|
||||
if (options.shakeLevel === 2 /* ClassMembers */ && (ts.isMethodDeclaration(node) || ts.isMethodSignature(node) || ts.isPropertySignature(node) || ts.isGetAccessor(node) || ts.isSetAccessor(node))) {
|
||||
const references = languageService.getReferencesAtPosition(node.getSourceFile().fileName, node.name.pos + node.name.getLeadingTriviaWidth());
|
||||
var references = languageService.getReferencesAtPosition(node.getSourceFile().fileName, node.name.pos + node.name.getLeadingTriviaWidth());
|
||||
if (references) {
|
||||
for (let i = 0, len = references.length; i < len; i++) {
|
||||
const reference = references[i];
|
||||
const referenceSourceFile = program.getSourceFile(reference.fileName);
|
||||
if (!referenceSourceFile) {
|
||||
continue;
|
||||
}
|
||||
const referenceNode = getTokenAtPosition(referenceSourceFile, reference.textSpan.start, false, false);
|
||||
for (var i = 0, len = references.length; i < len; i++) {
|
||||
var reference = references[i];
|
||||
var referenceSourceFile = program.getSourceFile(reference.fileName);
|
||||
var referenceNode = getTokenAtPosition(referenceSourceFile, reference.textSpan.start, false, false);
|
||||
if (ts.isMethodDeclaration(referenceNode.parent)
|
||||
|| ts.isPropertyDeclaration(referenceNode.parent)
|
||||
|| ts.isGetAccessor(referenceNode.parent)
|
||||
@@ -312,9 +271,9 @@ function markNodes(languageService, options) {
|
||||
}
|
||||
}
|
||||
function enqueueFile(filename) {
|
||||
const sourceFile = program.getSourceFile(filename);
|
||||
var sourceFile = program.getSourceFile(filename);
|
||||
if (!sourceFile) {
|
||||
console.warn(`Cannot find source file ${filename}`);
|
||||
console.warn("Cannot find source file " + filename);
|
||||
return;
|
||||
}
|
||||
enqueue_black(sourceFile);
|
||||
@@ -324,8 +283,8 @@ function markNodes(languageService, options) {
|
||||
// this import should be ignored
|
||||
return;
|
||||
}
|
||||
const nodeSourceFile = node.getSourceFile();
|
||||
let fullPath;
|
||||
var nodeSourceFile = node.getSourceFile();
|
||||
var fullPath;
|
||||
if (/(^\.\/)|(^\.\.\/)/.test(importText)) {
|
||||
fullPath = path.join(path.dirname(nodeSourceFile.fileName), importText) + '.ts';
|
||||
}
|
||||
@@ -334,25 +293,25 @@ function markNodes(languageService, options) {
|
||||
}
|
||||
enqueueFile(fullPath);
|
||||
}
|
||||
options.entryPoints.forEach(moduleId => enqueueFile(moduleId + '.ts'));
|
||||
options.entryPoints.forEach(function (moduleId) { return enqueueFile(moduleId + '.ts'); });
|
||||
// Add fake usage files
|
||||
options.inlineEntryPoints.forEach((_, index) => enqueueFile(`inlineEntryPoint.${index}.ts`));
|
||||
let step = 0;
|
||||
const checker = program.getTypeChecker();
|
||||
while (black_queue.length > 0 || gray_queue.length > 0) {
|
||||
options.inlineEntryPoints.forEach(function (_, index) { return enqueueFile("inlineEntryPoint:" + index + ".ts"); });
|
||||
var step = 0;
|
||||
var checker = program.getTypeChecker();
|
||||
var _loop_1 = function () {
|
||||
++step;
|
||||
let node;
|
||||
var node = void 0;
|
||||
if (step % 100 === 0) {
|
||||
console.log(`${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
|
||||
console.log(step + "/" + (step + black_queue.length + gray_queue.length) + " (" + black_queue.length + ", " + gray_queue.length + ")");
|
||||
}
|
||||
if (black_queue.length === 0) {
|
||||
for (let i = 0; i < gray_queue.length; i++) {
|
||||
const node = gray_queue[i];
|
||||
const nodeParent = node.parent;
|
||||
for (var i = 0; i < gray_queue.length; i++) {
|
||||
var node_1 = gray_queue[i];
|
||||
var nodeParent = node_1.parent;
|
||||
if ((ts.isClassDeclaration(nodeParent) || ts.isInterfaceDeclaration(nodeParent)) && nodeOrChildIsBlack(nodeParent)) {
|
||||
gray_queue.splice(i, 1);
|
||||
black_queue.push(node);
|
||||
setColor(node, 2 /* Black */);
|
||||
black_queue.push(node_1);
|
||||
setColor(node_1, 2 /* Black */);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
@@ -361,18 +320,17 @@ function markNodes(languageService, options) {
|
||||
node = black_queue.shift();
|
||||
}
|
||||
else {
|
||||
// only gray nodes remaining...
|
||||
break;
|
||||
return "break";
|
||||
}
|
||||
const nodeSourceFile = node.getSourceFile();
|
||||
const loop = (node) => {
|
||||
const [symbol, symbolImportNode] = getRealNodeSymbol(checker, node);
|
||||
var nodeSourceFile = node.getSourceFile();
|
||||
var loop = function (node) {
|
||||
var _a = getRealNodeSymbol(checker, node), symbol = _a[0], symbolImportNode = _a[1];
|
||||
if (symbolImportNode) {
|
||||
setColor(symbolImportNode, 2 /* Black */);
|
||||
}
|
||||
if (symbol && !nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol)) {
|
||||
for (let i = 0, len = symbol.declarations.length; i < len; i++) {
|
||||
const declaration = symbol.declarations[i];
|
||||
for (var i = 0, len = symbol.declarations.length; i < len; i++) {
|
||||
var declaration = symbol.declarations[i];
|
||||
if (ts.isSourceFile(declaration)) {
|
||||
// Do not enqueue full source files
|
||||
// (they can be the declaration of a module import)
|
||||
@@ -380,9 +338,9 @@ function markNodes(languageService, options) {
|
||||
}
|
||||
if (options.shakeLevel === 2 /* ClassMembers */ && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration))) {
|
||||
enqueue_black(declaration.name);
|
||||
for (let j = 0; j < declaration.members.length; j++) {
|
||||
const member = declaration.members[j];
|
||||
const memberName = member.name ? member.name.getText() : null;
|
||||
for (var j = 0; j < declaration.members.length; j++) {
|
||||
var member = declaration.members[j];
|
||||
var memberName = member.name ? member.name.getText() : null;
|
||||
if (ts.isConstructorDeclaration(member)
|
||||
|| ts.isConstructSignatureDeclaration(member)
|
||||
|| ts.isIndexSignatureDeclaration(member)
|
||||
@@ -396,7 +354,8 @@ function markNodes(languageService, options) {
|
||||
}
|
||||
// queue the heritage clauses
|
||||
if (declaration.heritageClauses) {
|
||||
for (let heritageClause of declaration.heritageClauses) {
|
||||
for (var _i = 0, _b = declaration.heritageClauses; _i < _b.length; _i++) {
|
||||
var heritageClause = _b[_i];
|
||||
enqueue_black(heritageClause);
|
||||
}
|
||||
}
|
||||
@@ -409,12 +368,17 @@ function markNodes(languageService, options) {
|
||||
node.forEachChild(loop);
|
||||
};
|
||||
node.forEachChild(loop);
|
||||
};
|
||||
while (black_queue.length > 0 || gray_queue.length > 0) {
|
||||
var state_1 = _loop_1();
|
||||
if (state_1 === "break")
|
||||
break;
|
||||
}
|
||||
}
|
||||
function nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol) {
|
||||
for (let i = 0, len = symbol.declarations.length; i < len; i++) {
|
||||
const declaration = symbol.declarations[i];
|
||||
const declarationSourceFile = declaration.getSourceFile();
|
||||
for (var i = 0, len = symbol.declarations.length; i < len; i++) {
|
||||
var declaration = symbol.declarations[i];
|
||||
var declarationSourceFile = declaration.getSourceFile();
|
||||
if (nodeSourceFile === declarationSourceFile) {
|
||||
if (declaration.pos <= node.pos && node.end <= declaration.end) {
|
||||
return true;
|
||||
@@ -424,28 +388,25 @@ function nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol) {
|
||||
return false;
|
||||
}
|
||||
function generateResult(languageService, shakeLevel) {
|
||||
const program = languageService.getProgram();
|
||||
if (!program) {
|
||||
throw new Error('Could not get program from language service');
|
||||
}
|
||||
let result = {};
|
||||
const writeFile = (filePath, contents) => {
|
||||
var program = languageService.getProgram();
|
||||
var result = {};
|
||||
var writeFile = function (filePath, contents) {
|
||||
result[filePath] = contents;
|
||||
};
|
||||
program.getSourceFiles().forEach((sourceFile) => {
|
||||
const fileName = sourceFile.fileName;
|
||||
program.getSourceFiles().forEach(function (sourceFile) {
|
||||
var fileName = sourceFile.fileName;
|
||||
if (/^defaultLib:/.test(fileName)) {
|
||||
return;
|
||||
}
|
||||
const destination = fileName;
|
||||
var destination = fileName;
|
||||
if (/\.d\.ts$/.test(fileName)) {
|
||||
if (nodeOrChildIsBlack(sourceFile)) {
|
||||
writeFile(destination, sourceFile.text);
|
||||
}
|
||||
return;
|
||||
}
|
||||
let text = sourceFile.text;
|
||||
let result = '';
|
||||
var text = sourceFile.text;
|
||||
var result = '';
|
||||
function keep(node) {
|
||||
result += text.substring(node.pos, node.end);
|
||||
}
|
||||
@@ -474,24 +435,24 @@ function generateResult(languageService, shakeLevel) {
|
||||
}
|
||||
}
|
||||
else {
|
||||
let survivingImports = [];
|
||||
for (let i = 0; i < node.importClause.namedBindings.elements.length; i++) {
|
||||
const importNode = node.importClause.namedBindings.elements[i];
|
||||
var survivingImports = [];
|
||||
for (var i = 0; i < node.importClause.namedBindings.elements.length; i++) {
|
||||
var importNode = node.importClause.namedBindings.elements[i];
|
||||
if (getColor(importNode) === 2 /* Black */) {
|
||||
survivingImports.push(importNode.getFullText(sourceFile));
|
||||
}
|
||||
}
|
||||
const leadingTriviaWidth = node.getLeadingTriviaWidth();
|
||||
const leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth);
|
||||
var leadingTriviaWidth = node.getLeadingTriviaWidth();
|
||||
var leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth);
|
||||
if (survivingImports.length > 0) {
|
||||
if (node.importClause && node.importClause.name && getColor(node.importClause) === 2 /* Black */) {
|
||||
return write(`${leadingTrivia}import ${node.importClause.name.text}, {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
|
||||
if (node.importClause && getColor(node.importClause) === 2 /* Black */) {
|
||||
return write(leadingTrivia + "import " + node.importClause.name.text + ", {" + survivingImports.join(',') + " } from" + node.moduleSpecifier.getFullText(sourceFile) + ";");
|
||||
}
|
||||
return write(`${leadingTrivia}import {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
|
||||
return write(leadingTrivia + "import {" + survivingImports.join(',') + " } from" + node.moduleSpecifier.getFullText(sourceFile) + ";");
|
||||
}
|
||||
else {
|
||||
if (node.importClause && node.importClause.name && getColor(node.importClause) === 2 /* Black */) {
|
||||
return write(`${leadingTrivia}import ${node.importClause.name.text} from${node.moduleSpecifier.getFullText(sourceFile)};`);
|
||||
if (node.importClause && getColor(node.importClause) === 2 /* Black */) {
|
||||
return write(leadingTrivia + "import " + node.importClause.name.text + " from" + node.moduleSpecifier.getFullText(sourceFile) + ";");
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -503,10 +464,10 @@ function generateResult(languageService, shakeLevel) {
|
||||
}
|
||||
}
|
||||
if (shakeLevel === 2 /* ClassMembers */ && (ts.isClassDeclaration(node) || ts.isInterfaceDeclaration(node)) && nodeOrChildIsBlack(node)) {
|
||||
let toWrite = node.getFullText();
|
||||
for (let i = node.members.length - 1; i >= 0; i--) {
|
||||
const member = node.members[i];
|
||||
if (getColor(member) === 2 /* Black */ || !member.name) {
|
||||
var toWrite = node.getFullText();
|
||||
for (var i = node.members.length - 1; i >= 0; i--) {
|
||||
var member = node.members[i];
|
||||
if (getColor(member) === 2 /* Black */) {
|
||||
// keep method
|
||||
continue;
|
||||
}
|
||||
@@ -514,8 +475,8 @@ function generateResult(languageService, shakeLevel) {
|
||||
// TODO: keep all members ending with `Brand`...
|
||||
continue;
|
||||
}
|
||||
let pos = member.pos - node.pos;
|
||||
let end = member.end - node.pos;
|
||||
var pos = member.pos - node.pos;
|
||||
var end = member.end - node.pos;
|
||||
toWrite = toWrite.substring(0, pos) + toWrite.substring(end);
|
||||
}
|
||||
return write(toWrite);
|
||||
@@ -547,9 +508,68 @@ function generateResult(languageService, shakeLevel) {
|
||||
* Returns the node's symbol and the `import` node (if the symbol resolved from a different module)
|
||||
*/
|
||||
function getRealNodeSymbol(checker, node) {
|
||||
const getPropertySymbolsFromContextualType = ts.getPropertySymbolsFromContextualType;
|
||||
const getContainingObjectLiteralElement = ts.getContainingObjectLiteralElement;
|
||||
const getNameFromPropertyName = ts.getNameFromPropertyName;
|
||||
/**
|
||||
* Returns the containing object literal property declaration given a possible name node, e.g. "a" in x = { "a": 1 }
|
||||
*/
|
||||
/* @internal */
|
||||
function getContainingObjectLiteralElement(node) {
|
||||
switch (node.kind) {
|
||||
case ts.SyntaxKind.StringLiteral:
|
||||
case ts.SyntaxKind.NumericLiteral:
|
||||
if (node.parent.kind === ts.SyntaxKind.ComputedPropertyName) {
|
||||
return ts.isObjectLiteralElement(node.parent.parent) ? node.parent.parent : undefined;
|
||||
}
|
||||
// falls through
|
||||
case ts.SyntaxKind.Identifier:
|
||||
return ts.isObjectLiteralElement(node.parent) &&
|
||||
(node.parent.parent.kind === ts.SyntaxKind.ObjectLiteralExpression || node.parent.parent.kind === ts.SyntaxKind.JsxAttributes) &&
|
||||
node.parent.name === node ? node.parent : undefined;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
function getPropertySymbolsFromType(type, propName) {
|
||||
function getTextOfPropertyName(name) {
|
||||
function isStringOrNumericLiteral(node) {
|
||||
var kind = node.kind;
|
||||
return kind === ts.SyntaxKind.StringLiteral
|
||||
|| kind === ts.SyntaxKind.NumericLiteral;
|
||||
}
|
||||
switch (name.kind) {
|
||||
case ts.SyntaxKind.Identifier:
|
||||
return name.text;
|
||||
case ts.SyntaxKind.StringLiteral:
|
||||
case ts.SyntaxKind.NumericLiteral:
|
||||
return name.text;
|
||||
case ts.SyntaxKind.ComputedPropertyName:
|
||||
return isStringOrNumericLiteral(name.expression) ? name.expression.text : undefined;
|
||||
}
|
||||
}
|
||||
var name = getTextOfPropertyName(propName);
|
||||
if (name && type) {
|
||||
var result = [];
|
||||
var symbol_1 = type.getProperty(name);
|
||||
if (type.flags & ts.TypeFlags.Union) {
|
||||
for (var _i = 0, _a = type.types; _i < _a.length; _i++) {
|
||||
var t = _a[_i];
|
||||
var symbol_2 = t.getProperty(name);
|
||||
if (symbol_2) {
|
||||
result.push(symbol_2);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
if (symbol_1) {
|
||||
result.push(symbol_1);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
function getPropertySymbolsFromContextualType(typeChecker, node) {
|
||||
var objectLiteral = node.parent;
|
||||
var contextualType = typeChecker.getContextualType(objectLiteral);
|
||||
return getPropertySymbolsFromType(contextualType, node.name);
|
||||
}
|
||||
// Go to the original declaration for cases:
|
||||
//
|
||||
// (1) when the aliased symbol was declared in the location(parent).
|
||||
@@ -577,15 +597,10 @@ function getRealNodeSymbol(checker, node) {
|
||||
return [null, null];
|
||||
}
|
||||
}
|
||||
const { parent } = node;
|
||||
let symbol = checker.getSymbolAtLocation(node);
|
||||
let importNode = null;
|
||||
// If this is an alias, and the request came at the declaration location
|
||||
// get the aliased symbol instead. This allows for goto def on an import e.g.
|
||||
// import {A, B} from "mod";
|
||||
// to jump to the implementation directly.
|
||||
var symbol = checker.getSymbolAtLocation(node);
|
||||
var importNode = null;
|
||||
if (symbol && symbol.flags & ts.SymbolFlags.Alias && shouldSkipAlias(node, symbol.declarations[0])) {
|
||||
const aliased = checker.getAliasedSymbol(symbol);
|
||||
var aliased = checker.getAliasedSymbol(symbol);
|
||||
if (aliased.declarations) {
|
||||
// We should mark the import as visited
|
||||
importNode = symbol.declarations[0];
|
||||
@@ -612,22 +627,13 @@ function getRealNodeSymbol(checker, node) {
|
||||
// pr/*destination*/op1: number
|
||||
// }
|
||||
// bar<Test>(({pr/*goto*/op1})=>{});
|
||||
if (ts.isPropertyName(node) && ts.isBindingElement(parent) && ts.isObjectBindingPattern(parent.parent) &&
|
||||
(node === (parent.propertyName || parent.name))) {
|
||||
const name = getNameFromPropertyName(node);
|
||||
const type = checker.getTypeAtLocation(parent.parent);
|
||||
if (name && type) {
|
||||
if (type.isUnion()) {
|
||||
const prop = type.types[0].getProperty(name);
|
||||
if (prop) {
|
||||
symbol = prop;
|
||||
}
|
||||
}
|
||||
else {
|
||||
const prop = type.getProperty(name);
|
||||
if (prop) {
|
||||
symbol = prop;
|
||||
}
|
||||
if (ts.isPropertyName(node) && ts.isBindingElement(node.parent) && ts.isObjectBindingPattern(node.parent.parent) &&
|
||||
(node === (node.parent.propertyName || node.parent.name))) {
|
||||
var type = checker.getTypeAtLocation(node.parent.parent);
|
||||
if (type) {
|
||||
var propSymbols = getPropertySymbolsFromType(type, node);
|
||||
if (propSymbols) {
|
||||
symbol = propSymbols[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -640,14 +646,11 @@ function getRealNodeSymbol(checker, node) {
|
||||
// }
|
||||
// function Foo(arg: Props) {}
|
||||
// Foo( { pr/*1*/op1: 10, prop2: false })
|
||||
const element = getContainingObjectLiteralElement(node);
|
||||
if (element) {
|
||||
const contextualType = element && checker.getContextualType(element.parent);
|
||||
if (contextualType) {
|
||||
const propertySymbols = getPropertySymbolsFromContextualType(element, checker, contextualType, /*unionSymbolOk*/ false);
|
||||
if (propertySymbols) {
|
||||
symbol = propertySymbols[0];
|
||||
}
|
||||
var element = getContainingObjectLiteralElement(node);
|
||||
if (element && checker.getContextualType(element.parent)) {
|
||||
var propertySymbols = getPropertySymbolsFromContextualType(checker, element);
|
||||
if (propertySymbols) {
|
||||
symbol = propertySymbols[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -658,16 +661,17 @@ function getRealNodeSymbol(checker, node) {
|
||||
}
|
||||
/** Get the token whose text contains the position */
|
||||
function getTokenAtPosition(sourceFile, position, allowPositionInLeadingTrivia, includeEndPosition) {
|
||||
let current = sourceFile;
|
||||
var current = sourceFile;
|
||||
outer: while (true) {
|
||||
// find the child that contains 'position'
|
||||
for (const child of current.getChildren()) {
|
||||
const start = allowPositionInLeadingTrivia ? child.getFullStart() : child.getStart(sourceFile, /*includeJsDoc*/ true);
|
||||
for (var _i = 0, _a = current.getChildren(); _i < _a.length; _i++) {
|
||||
var child = _a[_i];
|
||||
var start = allowPositionInLeadingTrivia ? child.getFullStart() : child.getStart(sourceFile, /*includeJsDoc*/ true);
|
||||
if (start > position) {
|
||||
// If this child begins after position, then all subsequent children will as well.
|
||||
break;
|
||||
}
|
||||
const end = child.getEnd();
|
||||
var end = child.getEnd();
|
||||
if (position < end || (position === end && (child.kind === ts.SyntaxKind.EndOfFileToken || includeEndPosition))) {
|
||||
current = child;
|
||||
continue outer;
|
||||
|
||||
@@ -36,14 +36,10 @@ export interface ITreeShakingOptions {
|
||||
* e.g. `lib.d.ts`, `lib.es2015.collection.d.ts`
|
||||
*/
|
||||
libs: string[];
|
||||
/**
|
||||
* Other .d.ts files
|
||||
*/
|
||||
typings: string[];
|
||||
/**
|
||||
* TypeScript compiler options.
|
||||
*/
|
||||
compilerOptions?: any;
|
||||
compilerOptions: ts.CompilerOptions;
|
||||
/**
|
||||
* The shake level to perform.
|
||||
*/
|
||||
@@ -60,43 +56,8 @@ export interface ITreeShakingResult {
|
||||
[file: string]: string;
|
||||
}
|
||||
|
||||
function printDiagnostics(diagnostics: ReadonlyArray<ts.Diagnostic>): void {
|
||||
for (let i = 0; i < diagnostics.length; i++) {
|
||||
const diag = diagnostics[i];
|
||||
let result = '';
|
||||
if (diag.file) {
|
||||
result += `${diag.file.fileName}: `;
|
||||
}
|
||||
if (diag.file && diag.start) {
|
||||
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
|
||||
result += `- ${location.line + 1},${location.character} - `
|
||||
}
|
||||
result += JSON.stringify(diag.messageText);
|
||||
console.log(result);
|
||||
}
|
||||
}
|
||||
|
||||
export function shake(options: ITreeShakingOptions): ITreeShakingResult {
|
||||
const languageService = createTypeScriptLanguageService(options);
|
||||
const program = languageService.getProgram()!;
|
||||
|
||||
const globalDiagnostics = program.getGlobalDiagnostics();
|
||||
if (globalDiagnostics.length > 0) {
|
||||
printDiagnostics(globalDiagnostics);
|
||||
throw new Error(`Compilation Errors encountered.`);
|
||||
}
|
||||
|
||||
const syntacticDiagnostics = program.getSyntacticDiagnostics();
|
||||
if (syntacticDiagnostics.length > 0) {
|
||||
printDiagnostics(syntacticDiagnostics);
|
||||
throw new Error(`Compilation Errors encountered.`);
|
||||
}
|
||||
|
||||
const semanticDiagnostics = program.getSemanticDiagnostics();
|
||||
if (semanticDiagnostics.length > 0) {
|
||||
printDiagnostics(semanticDiagnostics);
|
||||
throw new Error(`Compilation Errors encountered.`);
|
||||
}
|
||||
|
||||
markNodes(languageService, options);
|
||||
|
||||
@@ -110,13 +71,7 @@ function createTypeScriptLanguageService(options: ITreeShakingOptions): ts.Langu
|
||||
|
||||
// Add fake usage files
|
||||
options.inlineEntryPoints.forEach((inlineEntryPoint, index) => {
|
||||
FILES[`inlineEntryPoint.${index}.ts`] = inlineEntryPoint;
|
||||
});
|
||||
|
||||
// Add additional typings
|
||||
options.typings.forEach((typing) => {
|
||||
const filePath = path.join(options.sourcesRoot, typing);
|
||||
FILES[typing] = fs.readFileSync(filePath).toString();
|
||||
FILES[`inlineEntryPoint:${index}.ts`] = inlineEntryPoint;
|
||||
});
|
||||
|
||||
// Resolve libs
|
||||
@@ -126,9 +81,7 @@ function createTypeScriptLanguageService(options: ITreeShakingOptions): ts.Langu
|
||||
RESOLVED_LIBS[`defaultLib:${filename}`] = fs.readFileSync(filepath).toString();
|
||||
});
|
||||
|
||||
const compilerOptions = ts.convertCompilerOptionsFromJson(options.compilerOptions, options.sourcesRoot).options;
|
||||
|
||||
const host = new TypeScriptLanguageServiceHost(RESOLVED_LIBS, FILES, compilerOptions);
|
||||
const host = new TypeScriptLanguageServiceHost(RESOLVED_LIBS, FILES, options.compilerOptions);
|
||||
return ts.createLanguageService(host);
|
||||
}
|
||||
|
||||
@@ -152,11 +105,11 @@ function discoverAndReadFiles(options: ITreeShakingOptions): IFileMap {
|
||||
options.entryPoints.forEach((entryPoint) => enqueue(entryPoint));
|
||||
|
||||
while (queue.length > 0) {
|
||||
const moduleId = queue.shift()!;
|
||||
const moduleId = queue.shift();
|
||||
const dts_filename = path.join(options.sourcesRoot, moduleId + '.d.ts');
|
||||
if (fs.existsSync(dts_filename)) {
|
||||
const dts_filecontents = fs.readFileSync(dts_filename).toString();
|
||||
FILES[`${moduleId}.d.ts`] = dts_filecontents;
|
||||
FILES[moduleId + '.d.ts'] = dts_filecontents;
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -183,7 +136,7 @@ function discoverAndReadFiles(options: ITreeShakingOptions): IFileMap {
|
||||
enqueue(importedModuleId);
|
||||
}
|
||||
|
||||
FILES[`${moduleId}.ts`] = ts_filecontents;
|
||||
FILES[moduleId + '.ts'] = ts_filecontents;
|
||||
}
|
||||
|
||||
return FILES;
|
||||
@@ -214,12 +167,12 @@ class TypeScriptLanguageServiceHost implements ts.LanguageServiceHost {
|
||||
}
|
||||
getScriptFileNames(): string[] {
|
||||
return (
|
||||
([] as string[])
|
||||
[]
|
||||
.concat(Object.keys(this._libs))
|
||||
.concat(Object.keys(this._files))
|
||||
);
|
||||
}
|
||||
getScriptVersion(_fileName: string): string {
|
||||
getScriptVersion(fileName: string): string {
|
||||
return '1';
|
||||
}
|
||||
getProjectVersion(): string {
|
||||
@@ -234,13 +187,13 @@ class TypeScriptLanguageServiceHost implements ts.LanguageServiceHost {
|
||||
return ts.ScriptSnapshot.fromString('');
|
||||
}
|
||||
}
|
||||
getScriptKind(_fileName: string): ts.ScriptKind {
|
||||
getScriptKind(fileName: string): ts.ScriptKind {
|
||||
return ts.ScriptKind.TS;
|
||||
}
|
||||
getCurrentDirectory(): string {
|
||||
return '';
|
||||
}
|
||||
getDefaultLibFileName(_options: ts.CompilerOptions): string {
|
||||
getDefaultLibFileName(options: ts.CompilerOptions): string {
|
||||
return 'defaultLib:lib.d.ts';
|
||||
}
|
||||
isDefaultLibFileName(fileName: string): boolean {
|
||||
@@ -287,9 +240,6 @@ function nodeOrChildIsBlack(node: ts.Node): boolean {
|
||||
|
||||
function markNodes(languageService: ts.LanguageService, options: ITreeShakingOptions) {
|
||||
const program = languageService.getProgram();
|
||||
if (!program) {
|
||||
throw new Error('Could not get program from language service');
|
||||
}
|
||||
|
||||
if (options.shakeLevel === ShakeLevel.Files) {
|
||||
// Mark all source files Black
|
||||
@@ -316,7 +266,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
|
||||
}
|
||||
|
||||
if (ts.isExportDeclaration(node)) {
|
||||
if (node.moduleSpecifier && ts.isStringLiteral(node.moduleSpecifier)) {
|
||||
if (ts.isStringLiteral(node.moduleSpecifier)) {
|
||||
setColor(node, NodeColor.Black);
|
||||
enqueueImport(node, node.moduleSpecifier.text);
|
||||
}
|
||||
@@ -399,11 +349,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
|
||||
if (references) {
|
||||
for (let i = 0, len = references.length; i < len; i++) {
|
||||
const reference = references[i];
|
||||
const referenceSourceFile = program!.getSourceFile(reference.fileName);
|
||||
if (!referenceSourceFile) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const referenceSourceFile = program.getSourceFile(reference.fileName);
|
||||
const referenceNode = getTokenAtPosition(referenceSourceFile, reference.textSpan.start, false, false);
|
||||
if (
|
||||
ts.isMethodDeclaration(referenceNode.parent)
|
||||
@@ -419,7 +365,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
|
||||
}
|
||||
|
||||
function enqueueFile(filename: string): void {
|
||||
const sourceFile = program!.getSourceFile(filename);
|
||||
const sourceFile = program.getSourceFile(filename);
|
||||
if (!sourceFile) {
|
||||
console.warn(`Cannot find source file ${filename}`);
|
||||
return;
|
||||
@@ -445,7 +391,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
|
||||
|
||||
options.entryPoints.forEach(moduleId => enqueueFile(moduleId + '.ts'));
|
||||
// Add fake usage files
|
||||
options.inlineEntryPoints.forEach((_, index) => enqueueFile(`inlineEntryPoint.${index}.ts`));
|
||||
options.inlineEntryPoints.forEach((_, index) => enqueueFile(`inlineEntryPoint:${index}.ts`));
|
||||
|
||||
let step = 0;
|
||||
|
||||
@@ -455,7 +401,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
|
||||
let node: ts.Node;
|
||||
|
||||
if (step % 100 === 0) {
|
||||
console.log(`${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
|
||||
console.log(`${step}/${step+black_queue.length+gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
|
||||
}
|
||||
|
||||
if (black_queue.length === 0) {
|
||||
@@ -472,7 +418,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
|
||||
}
|
||||
|
||||
if (black_queue.length > 0) {
|
||||
node = black_queue.shift()!;
|
||||
node = black_queue.shift();
|
||||
} else {
|
||||
// only gray nodes remaining...
|
||||
break;
|
||||
@@ -495,7 +441,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
|
||||
}
|
||||
|
||||
if (options.shakeLevel === ShakeLevel.ClassMembers && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration))) {
|
||||
enqueue_black(declaration.name!);
|
||||
enqueue_black(declaration.name);
|
||||
|
||||
for (let j = 0; j < declaration.members.length; j++) {
|
||||
const member = declaration.members[j];
|
||||
@@ -547,9 +493,6 @@ function nodeIsInItsOwnDeclaration(nodeSourceFile: ts.SourceFile, node: ts.Node,
|
||||
|
||||
function generateResult(languageService: ts.LanguageService, shakeLevel: ShakeLevel): ITreeShakingResult {
|
||||
const program = languageService.getProgram();
|
||||
if (!program) {
|
||||
throw new Error('Could not get program from language service');
|
||||
}
|
||||
|
||||
let result: ITreeShakingResult = {};
|
||||
const writeFile = (filePath: string, contents: string): void => {
|
||||
@@ -613,12 +556,12 @@ function generateResult(languageService: ts.LanguageService, shakeLevel: ShakeLe
|
||||
const leadingTriviaWidth = node.getLeadingTriviaWidth();
|
||||
const leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth);
|
||||
if (survivingImports.length > 0) {
|
||||
if (node.importClause && node.importClause.name && getColor(node.importClause) === NodeColor.Black) {
|
||||
if (node.importClause && getColor(node.importClause) === NodeColor.Black) {
|
||||
return write(`${leadingTrivia}import ${node.importClause.name.text}, {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
|
||||
}
|
||||
return write(`${leadingTrivia}import {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
|
||||
} else {
|
||||
if (node.importClause && node.importClause.name && getColor(node.importClause) === NodeColor.Black) {
|
||||
if (node.importClause && getColor(node.importClause) === NodeColor.Black) {
|
||||
return write(`${leadingTrivia}import ${node.importClause.name.text} from${node.moduleSpecifier.getFullText(sourceFile)};`);
|
||||
}
|
||||
}
|
||||
@@ -634,7 +577,7 @@ function generateResult(languageService: ts.LanguageService, shakeLevel: ShakeLe
|
||||
let toWrite = node.getFullText();
|
||||
for (let i = node.members.length - 1; i >= 0; i--) {
|
||||
const member = node.members[i];
|
||||
if (getColor(member) === NodeColor.Black || !member.name) {
|
||||
if (getColor(member) === NodeColor.Black) {
|
||||
// keep method
|
||||
continue;
|
||||
}
|
||||
@@ -682,13 +625,74 @@ function generateResult(languageService: ts.LanguageService, shakeLevel: ShakeLe
|
||||
/**
|
||||
* Returns the node's symbol and the `import` node (if the symbol resolved from a different module)
|
||||
*/
|
||||
function getRealNodeSymbol(checker: ts.TypeChecker, node: ts.Node): [ts.Symbol | null, ts.Declaration | null] {
|
||||
function getRealNodeSymbol(checker: ts.TypeChecker, node: ts.Node): [ts.Symbol, ts.Declaration] {
|
||||
/**
|
||||
* Returns the containing object literal property declaration given a possible name node, e.g. "a" in x = { "a": 1 }
|
||||
*/
|
||||
/* @internal */
|
||||
function getContainingObjectLiteralElement(node: ts.Node): ts.ObjectLiteralElement | undefined {
|
||||
switch (node.kind) {
|
||||
case ts.SyntaxKind.StringLiteral:
|
||||
case ts.SyntaxKind.NumericLiteral:
|
||||
if (node.parent.kind === ts.SyntaxKind.ComputedPropertyName) {
|
||||
return ts.isObjectLiteralElement(node.parent.parent) ? node.parent.parent : undefined;
|
||||
}
|
||||
// falls through
|
||||
case ts.SyntaxKind.Identifier:
|
||||
return ts.isObjectLiteralElement(node.parent) &&
|
||||
(node.parent.parent.kind === ts.SyntaxKind.ObjectLiteralExpression || node.parent.parent.kind === ts.SyntaxKind.JsxAttributes) &&
|
||||
node.parent.name === node ? node.parent : undefined;
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
// Use some TypeScript internals to avoid code duplication
|
||||
type ObjectLiteralElementWithName = ts.ObjectLiteralElement & { name: ts.PropertyName; parent: ts.ObjectLiteralExpression | ts.JsxAttributes };
|
||||
const getPropertySymbolsFromContextualType: (node: ObjectLiteralElementWithName, checker: ts.TypeChecker, contextualType: ts.Type, unionSymbolOk: boolean) => ReadonlyArray<ts.Symbol> = (<any>ts).getPropertySymbolsFromContextualType;
|
||||
const getContainingObjectLiteralElement: (node: ts.Node) => ObjectLiteralElementWithName | undefined = (<any>ts).getContainingObjectLiteralElement;
|
||||
const getNameFromPropertyName: (name: ts.PropertyName) => string | undefined = (<any>ts).getNameFromPropertyName;
|
||||
function getPropertySymbolsFromType(type: ts.Type, propName: ts.PropertyName) {
|
||||
function getTextOfPropertyName(name: ts.PropertyName): string {
|
||||
|
||||
function isStringOrNumericLiteral(node: ts.Node): node is ts.StringLiteral | ts.NumericLiteral {
|
||||
const kind = node.kind;
|
||||
return kind === ts.SyntaxKind.StringLiteral
|
||||
|| kind === ts.SyntaxKind.NumericLiteral;
|
||||
}
|
||||
|
||||
switch (name.kind) {
|
||||
case ts.SyntaxKind.Identifier:
|
||||
return name.text;
|
||||
case ts.SyntaxKind.StringLiteral:
|
||||
case ts.SyntaxKind.NumericLiteral:
|
||||
return name.text;
|
||||
case ts.SyntaxKind.ComputedPropertyName:
|
||||
return isStringOrNumericLiteral(name.expression) ? name.expression.text : undefined!;
|
||||
}
|
||||
}
|
||||
|
||||
const name = getTextOfPropertyName(propName);
|
||||
if (name && type) {
|
||||
const result: ts.Symbol[] = [];
|
||||
const symbol = type.getProperty(name);
|
||||
if (type.flags & ts.TypeFlags.Union) {
|
||||
for (const t of (<ts.UnionType>type).types) {
|
||||
const symbol = t.getProperty(name);
|
||||
if (symbol) {
|
||||
result.push(symbol);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
if (symbol) {
|
||||
result.push(symbol);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function getPropertySymbolsFromContextualType(typeChecker: ts.TypeChecker, node: ts.ObjectLiteralElement): ts.Symbol[] {
|
||||
const objectLiteral = <ts.ObjectLiteralExpression | ts.JsxAttributes>node.parent;
|
||||
const contextualType = typeChecker.getContextualType(objectLiteral)!;
|
||||
return getPropertySymbolsFromType(contextualType, node.name!)!;
|
||||
}
|
||||
|
||||
// Go to the original declaration for cases:
|
||||
//
|
||||
@@ -719,14 +723,8 @@ function getRealNodeSymbol(checker: ts.TypeChecker, node: ts.Node): [ts.Symbol |
|
||||
}
|
||||
}
|
||||
|
||||
const { parent } = node;
|
||||
|
||||
let symbol = checker.getSymbolAtLocation(node);
|
||||
let importNode: ts.Declaration | null = null;
|
||||
// If this is an alias, and the request came at the declaration location
|
||||
// get the aliased symbol instead. This allows for goto def on an import e.g.
|
||||
// import {A, B} from "mod";
|
||||
// to jump to the implementation directly.
|
||||
let importNode: ts.Declaration = null;
|
||||
if (symbol && symbol.flags & ts.SymbolFlags.Alias && shouldSkipAlias(node, symbol.declarations[0])) {
|
||||
const aliased = checker.getAliasedSymbol(symbol);
|
||||
if (aliased.declarations) {
|
||||
@@ -757,21 +755,13 @@ function getRealNodeSymbol(checker: ts.TypeChecker, node: ts.Node): [ts.Symbol |
|
||||
// pr/*destination*/op1: number
|
||||
// }
|
||||
// bar<Test>(({pr/*goto*/op1})=>{});
|
||||
if (ts.isPropertyName(node) && ts.isBindingElement(parent) && ts.isObjectBindingPattern(parent.parent) &&
|
||||
(node === (parent.propertyName || parent.name))) {
|
||||
const name = getNameFromPropertyName(node);
|
||||
const type = checker.getTypeAtLocation(parent.parent);
|
||||
if (name && type) {
|
||||
if (type.isUnion()) {
|
||||
const prop = type.types[0].getProperty(name);
|
||||
if (prop) {
|
||||
symbol = prop;
|
||||
}
|
||||
} else {
|
||||
const prop = type.getProperty(name);
|
||||
if (prop) {
|
||||
symbol = prop;
|
||||
}
|
||||
if (ts.isPropertyName(node) && ts.isBindingElement(node.parent) && ts.isObjectBindingPattern(node.parent.parent) &&
|
||||
(node === (node.parent.propertyName || node.parent.name))) {
|
||||
const type = checker.getTypeAtLocation(node.parent.parent);
|
||||
if (type) {
|
||||
const propSymbols = getPropertySymbolsFromType(type, node);
|
||||
if (propSymbols) {
|
||||
symbol = propSymbols[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -786,13 +776,10 @@ function getRealNodeSymbol(checker: ts.TypeChecker, node: ts.Node): [ts.Symbol |
|
||||
// function Foo(arg: Props) {}
|
||||
// Foo( { pr/*1*/op1: 10, prop2: false })
|
||||
const element = getContainingObjectLiteralElement(node);
|
||||
if (element) {
|
||||
const contextualType = element && checker.getContextualType(element.parent);
|
||||
if (contextualType) {
|
||||
const propertySymbols = getPropertySymbolsFromContextualType(element, checker, contextualType, /*unionSymbolOk*/ false);
|
||||
if (propertySymbols) {
|
||||
symbol = propertySymbols[0];
|
||||
}
|
||||
if (element && checker.getContextualType(element.parent as ts.Expression)) {
|
||||
const propertySymbols = getPropertySymbolsFromContextualType(checker, element);
|
||||
if (propertySymbols) {
|
||||
symbol = propertySymbols[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,30 +3,48 @@
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
var __extends = (this && this.__extends) || (function () {
|
||||
var extendStatics = Object.setPrototypeOf ||
|
||||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
||||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
||||
return function (d, b) {
|
||||
extendStatics(d, b);
|
||||
function __() { this.constructor = d; }
|
||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const path_1 = require("path");
|
||||
const Lint = require("tslint");
|
||||
class Rule extends Lint.Rules.AbstractRule {
|
||||
apply(sourceFile) {
|
||||
var path_1 = require("path");
|
||||
var Lint = require("tslint");
|
||||
var Rule = /** @class */ (function (_super) {
|
||||
__extends(Rule, _super);
|
||||
function Rule() {
|
||||
return _super !== null && _super.apply(this, arguments) || this;
|
||||
}
|
||||
Rule.prototype.apply = function (sourceFile) {
|
||||
return this.applyWithWalker(new ImportPatterns(sourceFile, this.getOptions()));
|
||||
}
|
||||
}
|
||||
};
|
||||
return Rule;
|
||||
}(Lint.Rules.AbstractRule));
|
||||
exports.Rule = Rule;
|
||||
class ImportPatterns extends Lint.RuleWalker {
|
||||
constructor(file, opts) {
|
||||
super(file, opts);
|
||||
this.imports = Object.create(null);
|
||||
var ImportPatterns = /** @class */ (function (_super) {
|
||||
__extends(ImportPatterns, _super);
|
||||
function ImportPatterns(file, opts) {
|
||||
var _this = _super.call(this, file, opts) || this;
|
||||
_this.imports = Object.create(null);
|
||||
return _this;
|
||||
}
|
||||
visitImportDeclaration(node) {
|
||||
let path = node.moduleSpecifier.getText();
|
||||
ImportPatterns.prototype.visitImportDeclaration = function (node) {
|
||||
var path = node.moduleSpecifier.getText();
|
||||
// remove quotes
|
||||
path = path.slice(1, -1);
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(path_1.dirname(node.getSourceFile().fileName), path);
|
||||
}
|
||||
if (this.imports[path]) {
|
||||
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Duplicate imports for '${path}'.`));
|
||||
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Duplicate imports for '" + path + "'."));
|
||||
}
|
||||
this.imports[path] = true;
|
||||
}
|
||||
}
|
||||
};
|
||||
return ImportPatterns;
|
||||
}(Lint.RuleWalker));
|
||||
|
||||
@@ -3,60 +3,79 @@
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
var __extends = (this && this.__extends) || (function () {
|
||||
var extendStatics = Object.setPrototypeOf ||
|
||||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
||||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
||||
return function (d, b) {
|
||||
extendStatics(d, b);
|
||||
function __() { this.constructor = d; }
|
||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ts = require("typescript");
|
||||
const Lint = require("tslint");
|
||||
const minimatch = require("minimatch");
|
||||
const path_1 = require("path");
|
||||
class Rule extends Lint.Rules.AbstractRule {
|
||||
apply(sourceFile) {
|
||||
const configs = this.getOptions().ruleArguments;
|
||||
for (const config of configs) {
|
||||
var ts = require("typescript");
|
||||
var Lint = require("tslint");
|
||||
var minimatch = require("minimatch");
|
||||
var path_1 = require("path");
|
||||
var Rule = /** @class */ (function (_super) {
|
||||
__extends(Rule, _super);
|
||||
function Rule() {
|
||||
return _super !== null && _super.apply(this, arguments) || this;
|
||||
}
|
||||
Rule.prototype.apply = function (sourceFile) {
|
||||
var configs = this.getOptions().ruleArguments;
|
||||
for (var _i = 0, configs_1 = configs; _i < configs_1.length; _i++) {
|
||||
var config = configs_1[_i];
|
||||
if (minimatch(sourceFile.fileName, config.target)) {
|
||||
return this.applyWithWalker(new ImportPatterns(sourceFile, this.getOptions(), config));
|
||||
}
|
||||
}
|
||||
return [];
|
||||
}
|
||||
}
|
||||
};
|
||||
return Rule;
|
||||
}(Lint.Rules.AbstractRule));
|
||||
exports.Rule = Rule;
|
||||
class ImportPatterns extends Lint.RuleWalker {
|
||||
constructor(file, opts, _config) {
|
||||
super(file, opts);
|
||||
this._config = _config;
|
||||
var ImportPatterns = /** @class */ (function (_super) {
|
||||
__extends(ImportPatterns, _super);
|
||||
function ImportPatterns(file, opts, _config) {
|
||||
var _this = _super.call(this, file, opts) || this;
|
||||
_this._config = _config;
|
||||
return _this;
|
||||
}
|
||||
visitImportEqualsDeclaration(node) {
|
||||
ImportPatterns.prototype.visitImportEqualsDeclaration = function (node) {
|
||||
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
|
||||
this._validateImport(node.moduleReference.expression.getText(), node);
|
||||
}
|
||||
}
|
||||
visitImportDeclaration(node) {
|
||||
};
|
||||
ImportPatterns.prototype.visitImportDeclaration = function (node) {
|
||||
this._validateImport(node.moduleSpecifier.getText(), node);
|
||||
}
|
||||
visitCallExpression(node) {
|
||||
super.visitCallExpression(node);
|
||||
};
|
||||
ImportPatterns.prototype.visitCallExpression = function (node) {
|
||||
_super.prototype.visitCallExpression.call(this, node);
|
||||
// import('foo') statements inside the code
|
||||
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
|
||||
const [path] = node.arguments;
|
||||
var path = node.arguments[0];
|
||||
this._validateImport(path.getText(), node);
|
||||
}
|
||||
}
|
||||
_validateImport(path, node) {
|
||||
};
|
||||
ImportPatterns.prototype._validateImport = function (path, node) {
|
||||
// remove quotes
|
||||
path = path.slice(1, -1);
|
||||
// resolve relative paths
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(this.getSourceFile().fileName, path);
|
||||
}
|
||||
let restrictions;
|
||||
var restrictions;
|
||||
if (typeof this._config.restrictions === 'string') {
|
||||
restrictions = [this._config.restrictions];
|
||||
}
|
||||
else {
|
||||
restrictions = this._config.restrictions;
|
||||
}
|
||||
let matched = false;
|
||||
for (const pattern of restrictions) {
|
||||
var matched = false;
|
||||
for (var _i = 0, restrictions_1 = restrictions; _i < restrictions_1.length; _i++) {
|
||||
var pattern = restrictions_1[_i];
|
||||
if (minimatch(path, pattern)) {
|
||||
matched = true;
|
||||
break;
|
||||
@@ -64,7 +83,8 @@ class ImportPatterns extends Lint.RuleWalker {
|
||||
}
|
||||
if (!matched) {
|
||||
// None of the restrictions matched
|
||||
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Imports violates '${restrictions.join(' or ')}' restrictions. See https://github.com/Microsoft/vscode/wiki/Code-Organization`));
|
||||
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Imports violates '" + restrictions.join(' or ') + "' restrictions. See https://github.com/Microsoft/vscode/wiki/Code-Organization"));
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
return ImportPatterns;
|
||||
}(Lint.RuleWalker));
|
||||
|
||||
@@ -3,22 +3,36 @@
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
var __extends = (this && this.__extends) || (function () {
|
||||
var extendStatics = Object.setPrototypeOf ||
|
||||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
||||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
||||
return function (d, b) {
|
||||
extendStatics(d, b);
|
||||
function __() { this.constructor = d; }
|
||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ts = require("typescript");
|
||||
const Lint = require("tslint");
|
||||
const path_1 = require("path");
|
||||
class Rule extends Lint.Rules.AbstractRule {
|
||||
apply(sourceFile) {
|
||||
const parts = path_1.dirname(sourceFile.fileName).split(/\\|\//);
|
||||
const ruleArgs = this.getOptions().ruleArguments[0];
|
||||
let config;
|
||||
for (let i = parts.length - 1; i >= 0; i--) {
|
||||
var ts = require("typescript");
|
||||
var Lint = require("tslint");
|
||||
var path_1 = require("path");
|
||||
var Rule = /** @class */ (function (_super) {
|
||||
__extends(Rule, _super);
|
||||
function Rule() {
|
||||
return _super !== null && _super.apply(this, arguments) || this;
|
||||
}
|
||||
Rule.prototype.apply = function (sourceFile) {
|
||||
var parts = path_1.dirname(sourceFile.fileName).split(/\\|\//);
|
||||
var ruleArgs = this.getOptions().ruleArguments[0];
|
||||
var config;
|
||||
for (var i = parts.length - 1; i >= 0; i--) {
|
||||
if (ruleArgs[parts[i]]) {
|
||||
config = {
|
||||
allowed: new Set(ruleArgs[parts[i]]).add(parts[i]),
|
||||
disallowed: new Set()
|
||||
};
|
||||
Object.keys(ruleArgs).forEach(key => {
|
||||
Object.keys(ruleArgs).forEach(function (key) {
|
||||
if (!config.allowed.has(key)) {
|
||||
config.disallowed.add(key);
|
||||
}
|
||||
@@ -30,54 +44,58 @@ class Rule extends Lint.Rules.AbstractRule {
|
||||
return [];
|
||||
}
|
||||
return this.applyWithWalker(new LayeringRule(sourceFile, config, this.getOptions()));
|
||||
}
|
||||
}
|
||||
};
|
||||
return Rule;
|
||||
}(Lint.Rules.AbstractRule));
|
||||
exports.Rule = Rule;
|
||||
class LayeringRule extends Lint.RuleWalker {
|
||||
constructor(file, config, opts) {
|
||||
super(file, opts);
|
||||
this._config = config;
|
||||
var LayeringRule = /** @class */ (function (_super) {
|
||||
__extends(LayeringRule, _super);
|
||||
function LayeringRule(file, config, opts) {
|
||||
var _this = _super.call(this, file, opts) || this;
|
||||
_this._config = config;
|
||||
return _this;
|
||||
}
|
||||
visitImportEqualsDeclaration(node) {
|
||||
LayeringRule.prototype.visitImportEqualsDeclaration = function (node) {
|
||||
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
|
||||
this._validateImport(node.moduleReference.expression.getText(), node);
|
||||
}
|
||||
}
|
||||
visitImportDeclaration(node) {
|
||||
};
|
||||
LayeringRule.prototype.visitImportDeclaration = function (node) {
|
||||
this._validateImport(node.moduleSpecifier.getText(), node);
|
||||
}
|
||||
visitCallExpression(node) {
|
||||
super.visitCallExpression(node);
|
||||
};
|
||||
LayeringRule.prototype.visitCallExpression = function (node) {
|
||||
_super.prototype.visitCallExpression.call(this, node);
|
||||
// import('foo') statements inside the code
|
||||
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
|
||||
const [path] = node.arguments;
|
||||
var path = node.arguments[0];
|
||||
this._validateImport(path.getText(), node);
|
||||
}
|
||||
}
|
||||
_validateImport(path, node) {
|
||||
};
|
||||
LayeringRule.prototype._validateImport = function (path, node) {
|
||||
// remove quotes
|
||||
path = path.slice(1, -1);
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(path_1.dirname(node.getSourceFile().fileName), path);
|
||||
}
|
||||
const parts = path_1.dirname(path).split(/\\|\//);
|
||||
for (let i = parts.length - 1; i >= 0; i--) {
|
||||
const part = parts[i];
|
||||
var parts = path_1.dirname(path).split(/\\|\//);
|
||||
for (var i = parts.length - 1; i >= 0; i--) {
|
||||
var part = parts[i];
|
||||
if (this._config.allowed.has(part)) {
|
||||
// GOOD - same layer
|
||||
return;
|
||||
}
|
||||
if (this._config.disallowed.has(part)) {
|
||||
// BAD - wrong layer
|
||||
const message = `Bad layering. You are not allowed to access '${part}' from here, allowed layers are: [${LayeringRule._print(this._config.allowed)}]`;
|
||||
var message = "Bad layering. You are not allowed to access '" + part + "' from here, allowed layers are: [" + LayeringRule._print(this._config.allowed) + "]";
|
||||
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), message));
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
static _print(set) {
|
||||
const r = [];
|
||||
set.forEach(e => r.push(e));
|
||||
};
|
||||
LayeringRule._print = function (set) {
|
||||
var r = [];
|
||||
set.forEach(function (e) { return r.push(e); });
|
||||
return r.join(', ');
|
||||
}
|
||||
}
|
||||
};
|
||||
return LayeringRule;
|
||||
}(Lint.RuleWalker));
|
||||
|
||||
@@ -16,9 +16,9 @@ export class Rule extends Lint.Rules.AbstractRule {
|
||||
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
|
||||
|
||||
const parts = dirname(sourceFile.fileName).split(/\\|\//);
|
||||
const ruleArgs = this.getOptions().ruleArguments[0];
|
||||
let ruleArgs = this.getOptions().ruleArguments[0];
|
||||
|
||||
let config: Config | undefined;
|
||||
let config: Config;
|
||||
for (let i = parts.length - 1; i >= 0; i--) {
|
||||
if (ruleArgs[parts[i]]) {
|
||||
config = {
|
||||
@@ -26,8 +26,8 @@ export class Rule extends Lint.Rules.AbstractRule {
|
||||
disallowed: new Set<string>()
|
||||
};
|
||||
Object.keys(ruleArgs).forEach(key => {
|
||||
if (!config!.allowed.has(key)) {
|
||||
config!.disallowed.add(key);
|
||||
if (!config.allowed.has(key)) {
|
||||
config.disallowed.add(key);
|
||||
}
|
||||
});
|
||||
break;
|
||||
@@ -98,7 +98,7 @@ class LayeringRule extends Lint.RuleWalker {
|
||||
}
|
||||
|
||||
static _print(set: Set<string>): string {
|
||||
const r: string[] = [];
|
||||
let r: string[] = [];
|
||||
set.forEach(e => r.push(e));
|
||||
return r.join(', ');
|
||||
}
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ts = require("typescript");
|
||||
const Lint = require("tslint");
|
||||
class Rule extends Lint.Rules.AbstractRule {
|
||||
apply(sourceFile) {
|
||||
return this.applyWithWalker(new NewBufferRuleWalker(sourceFile, this.getOptions()));
|
||||
}
|
||||
}
|
||||
exports.Rule = Rule;
|
||||
class NewBufferRuleWalker extends Lint.RuleWalker {
|
||||
visitNewExpression(node) {
|
||||
if (node.expression.kind === ts.SyntaxKind.Identifier && node.expression && node.expression.text === 'Buffer') {
|
||||
this.addFailureAtNode(node, '`new Buffer` is deprecated. Consider Buffer.From or Buffer.alloc instead.');
|
||||
}
|
||||
super.visitNewExpression(node);
|
||||
}
|
||||
}
|
||||
@@ -1,23 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as ts from 'typescript';
|
||||
import * as Lint from 'tslint';
|
||||
|
||||
export class Rule extends Lint.Rules.AbstractRule {
|
||||
apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
|
||||
return this.applyWithWalker(new NewBufferRuleWalker(sourceFile, this.getOptions()));
|
||||
}
|
||||
}
|
||||
|
||||
class NewBufferRuleWalker extends Lint.RuleWalker {
|
||||
visitNewExpression(node: ts.NewExpression) {
|
||||
if (node.expression.kind === ts.SyntaxKind.Identifier && node.expression && (node.expression as ts.Identifier).text === 'Buffer') {
|
||||
this.addFailureAtNode(node, '`new Buffer` is deprecated. Consider Buffer.From or Buffer.alloc instead.');
|
||||
}
|
||||
|
||||
super.visitNewExpression(node);
|
||||
}
|
||||
}
|
||||
@@ -3,42 +3,57 @@
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
var __extends = (this && this.__extends) || (function () {
|
||||
var extendStatics = Object.setPrototypeOf ||
|
||||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
||||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
||||
return function (d, b) {
|
||||
extendStatics(d, b);
|
||||
function __() { this.constructor = d; }
|
||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ts = require("typescript");
|
||||
const Lint = require("tslint");
|
||||
const path_1 = require("path");
|
||||
class Rule extends Lint.Rules.AbstractRule {
|
||||
apply(sourceFile) {
|
||||
var ts = require("typescript");
|
||||
var Lint = require("tslint");
|
||||
var path_1 = require("path");
|
||||
var Rule = /** @class */ (function (_super) {
|
||||
__extends(Rule, _super);
|
||||
function Rule() {
|
||||
return _super !== null && _super.apply(this, arguments) || this;
|
||||
}
|
||||
Rule.prototype.apply = function (sourceFile) {
|
||||
if (/vs(\/|\\)editor/.test(sourceFile.fileName)) {
|
||||
// the vs/editor folder is allowed to use the standalone editor
|
||||
return [];
|
||||
}
|
||||
return this.applyWithWalker(new NoStandaloneEditorRuleWalker(sourceFile, this.getOptions()));
|
||||
}
|
||||
}
|
||||
};
|
||||
return Rule;
|
||||
}(Lint.Rules.AbstractRule));
|
||||
exports.Rule = Rule;
|
||||
class NoStandaloneEditorRuleWalker extends Lint.RuleWalker {
|
||||
constructor(file, opts) {
|
||||
super(file, opts);
|
||||
var NoStandaloneEditorRuleWalker = /** @class */ (function (_super) {
|
||||
__extends(NoStandaloneEditorRuleWalker, _super);
|
||||
function NoStandaloneEditorRuleWalker(file, opts) {
|
||||
return _super.call(this, file, opts) || this;
|
||||
}
|
||||
visitImportEqualsDeclaration(node) {
|
||||
NoStandaloneEditorRuleWalker.prototype.visitImportEqualsDeclaration = function (node) {
|
||||
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
|
||||
this._validateImport(node.moduleReference.expression.getText(), node);
|
||||
}
|
||||
}
|
||||
visitImportDeclaration(node) {
|
||||
};
|
||||
NoStandaloneEditorRuleWalker.prototype.visitImportDeclaration = function (node) {
|
||||
this._validateImport(node.moduleSpecifier.getText(), node);
|
||||
}
|
||||
visitCallExpression(node) {
|
||||
super.visitCallExpression(node);
|
||||
};
|
||||
NoStandaloneEditorRuleWalker.prototype.visitCallExpression = function (node) {
|
||||
_super.prototype.visitCallExpression.call(this, node);
|
||||
// import('foo') statements inside the code
|
||||
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
|
||||
const [path] = node.arguments;
|
||||
var path = node.arguments[0];
|
||||
this._validateImport(path.getText(), node);
|
||||
}
|
||||
}
|
||||
// {{SQL CARBON EDIT}} - Rename node argument to _node to prevent errors since it is not used
|
||||
_validateImport(path, _node) {
|
||||
};
|
||||
NoStandaloneEditorRuleWalker.prototype._validateImport = function (path, node) {
|
||||
// remove quotes
|
||||
path = path.slice(1, -1);
|
||||
// resolve relative paths
|
||||
@@ -53,5 +68,6 @@ class NoStandaloneEditorRuleWalker extends Lint.RuleWalker {
|
||||
// {{SQL CARBON EDIT}}
|
||||
//this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Not allowed to import standalone editor modules. See https://github.com/Microsoft/vscode/wiki/Code-Organization`));
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
return NoStandaloneEditorRuleWalker;
|
||||
}(Lint.RuleWalker));
|
||||
|
||||
@@ -43,8 +43,7 @@ class NoStandaloneEditorRuleWalker extends Lint.RuleWalker {
|
||||
}
|
||||
}
|
||||
|
||||
// {{SQL CARBON EDIT}} - Rename node argument to _node to prevent errors since it is not used
|
||||
private _validateImport(path: string, _node: ts.Node): void {
|
||||
private _validateImport(path: string, node: ts.Node): void {
|
||||
// remove quotes
|
||||
path = path.slice(1, -1);
|
||||
|
||||
|
||||
@@ -3,17 +3,32 @@
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
var __extends = (this && this.__extends) || (function () {
|
||||
var extendStatics = Object.setPrototypeOf ||
|
||||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
||||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
||||
return function (d, b) {
|
||||
extendStatics(d, b);
|
||||
function __() { this.constructor = d; }
|
||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ts = require("typescript");
|
||||
const Lint = require("tslint");
|
||||
var ts = require("typescript");
|
||||
var Lint = require("tslint");
|
||||
/**
|
||||
* Implementation of the no-unexternalized-strings rule.
|
||||
*/
|
||||
class Rule extends Lint.Rules.AbstractRule {
|
||||
apply(sourceFile) {
|
||||
return this.applyWithWalker(new NoUnexternalizedStringsRuleWalker(sourceFile, this.getOptions()));
|
||||
var Rule = /** @class */ (function (_super) {
|
||||
__extends(Rule, _super);
|
||||
function Rule() {
|
||||
return _super !== null && _super.apply(this, arguments) || this;
|
||||
}
|
||||
}
|
||||
Rule.prototype.apply = function (sourceFile) {
|
||||
return this.applyWithWalker(new NoUnexternalizedStringsRuleWalker(sourceFile, this.getOptions()));
|
||||
};
|
||||
return Rule;
|
||||
}(Lint.Rules.AbstractRule));
|
||||
exports.Rule = Rule;
|
||||
function isStringLiteral(node) {
|
||||
return node && node.kind === ts.SyntaxKind.StringLiteral;
|
||||
@@ -24,76 +39,73 @@ function isObjectLiteral(node) {
|
||||
function isPropertyAssignment(node) {
|
||||
return node && node.kind === ts.SyntaxKind.PropertyAssignment;
|
||||
}
|
||||
class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
constructor(file, opts) {
|
||||
super(file, opts);
|
||||
this.signatures = Object.create(null);
|
||||
this.ignores = Object.create(null);
|
||||
this.messageIndex = undefined;
|
||||
this.keyIndex = undefined;
|
||||
this.usedKeys = Object.create(null);
|
||||
const options = this.getOptions();
|
||||
const first = options && options.length > 0 ? options[0] : null;
|
||||
var NoUnexternalizedStringsRuleWalker = /** @class */ (function (_super) {
|
||||
__extends(NoUnexternalizedStringsRuleWalker, _super);
|
||||
function NoUnexternalizedStringsRuleWalker(file, opts) {
|
||||
var _this = _super.call(this, file, opts) || this;
|
||||
_this.signatures = Object.create(null);
|
||||
_this.ignores = Object.create(null);
|
||||
_this.messageIndex = undefined;
|
||||
_this.keyIndex = undefined;
|
||||
_this.usedKeys = Object.create(null);
|
||||
var options = _this.getOptions();
|
||||
var first = options && options.length > 0 ? options[0] : null;
|
||||
if (first) {
|
||||
if (Array.isArray(first.signatures)) {
|
||||
first.signatures.forEach((signature) => this.signatures[signature] = true);
|
||||
first.signatures.forEach(function (signature) { return _this.signatures[signature] = true; });
|
||||
}
|
||||
if (Array.isArray(first.ignores)) {
|
||||
first.ignores.forEach((ignore) => this.ignores[ignore] = true);
|
||||
first.ignores.forEach(function (ignore) { return _this.ignores[ignore] = true; });
|
||||
}
|
||||
if (typeof first.messageIndex !== 'undefined') {
|
||||
this.messageIndex = first.messageIndex;
|
||||
_this.messageIndex = first.messageIndex;
|
||||
}
|
||||
if (typeof first.keyIndex !== 'undefined') {
|
||||
this.keyIndex = first.keyIndex;
|
||||
_this.keyIndex = first.keyIndex;
|
||||
}
|
||||
}
|
||||
return _this;
|
||||
}
|
||||
visitSourceFile(node) {
|
||||
super.visitSourceFile(node);
|
||||
Object.keys(this.usedKeys).forEach(key => {
|
||||
// Keys are quoted.
|
||||
let identifier = key.substr(1, key.length - 2);
|
||||
if (!NoUnexternalizedStringsRuleWalker.IDENTIFIER.test(identifier)) {
|
||||
let occurrence = this.usedKeys[key][0];
|
||||
this.addFailure(this.createFailure(occurrence.key.getStart(), occurrence.key.getWidth(), `The key ${occurrence.key.getText()} doesn't conform to a valid localize identifier`));
|
||||
}
|
||||
const occurrences = this.usedKeys[key];
|
||||
NoUnexternalizedStringsRuleWalker.prototype.visitSourceFile = function (node) {
|
||||
var _this = this;
|
||||
_super.prototype.visitSourceFile.call(this, node);
|
||||
Object.keys(this.usedKeys).forEach(function (key) {
|
||||
var occurrences = _this.usedKeys[key];
|
||||
if (occurrences.length > 1) {
|
||||
occurrences.forEach(occurrence => {
|
||||
this.addFailure((this.createFailure(occurrence.key.getStart(), occurrence.key.getWidth(), `Duplicate key ${occurrence.key.getText()} with different message value.`)));
|
||||
occurrences.forEach(function (occurrence) {
|
||||
_this.addFailure((_this.createFailure(occurrence.key.getStart(), occurrence.key.getWidth(), "Duplicate key " + occurrence.key.getText() + " with different message value.")));
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
visitStringLiteral(node) {
|
||||
};
|
||||
NoUnexternalizedStringsRuleWalker.prototype.visitStringLiteral = function (node) {
|
||||
this.checkStringLiteral(node);
|
||||
super.visitStringLiteral(node);
|
||||
}
|
||||
checkStringLiteral(node) {
|
||||
const text = node.getText();
|
||||
const doubleQuoted = text.length >= 2 && text[0] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE && text[text.length - 1] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE;
|
||||
const info = this.findDescribingParent(node);
|
||||
_super.prototype.visitStringLiteral.call(this, node);
|
||||
};
|
||||
NoUnexternalizedStringsRuleWalker.prototype.checkStringLiteral = function (node) {
|
||||
var text = node.getText();
|
||||
var doubleQuoted = text.length >= 2 && text[0] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE && text[text.length - 1] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE;
|
||||
var info = this.findDescribingParent(node);
|
||||
// Ignore strings in import and export nodes.
|
||||
if (info && info.isImport && doubleQuoted) {
|
||||
const fix = [
|
||||
var fix = [
|
||||
Lint.Replacement.replaceFromTo(node.getStart(), 1, '\''),
|
||||
Lint.Replacement.replaceFromTo(node.getStart() + text.length - 1, 1, '\''),
|
||||
];
|
||||
this.addFailureAtNode(node, NoUnexternalizedStringsRuleWalker.ImportFailureMessage, fix);
|
||||
return;
|
||||
}
|
||||
const callInfo = info ? info.callInfo : null;
|
||||
const functionName = callInfo ? callInfo.callExpression.expression.getText() : null;
|
||||
var callInfo = info ? info.callInfo : null;
|
||||
var functionName = callInfo ? callInfo.callExpression.expression.getText() : null;
|
||||
if (functionName && this.ignores[functionName]) {
|
||||
return;
|
||||
}
|
||||
if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) {
|
||||
const s = node.getText();
|
||||
const fix = [
|
||||
Lint.Replacement.replaceFromTo(node.getStart(), node.getWidth(), `nls.localize('KEY-${s.substring(1, s.length - 1)}', ${s})`),
|
||||
var s = node.getText();
|
||||
var fix = [
|
||||
Lint.Replacement.replaceFromTo(node.getStart(), node.getWidth(), "nls.localize('KEY-" + s.substring(1, s.length - 1) + "', " + s + ")"),
|
||||
];
|
||||
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Unexternalized string found: ${node.getText()}`, fix));
|
||||
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Unexternalized string found: " + node.getText(), fix));
|
||||
return;
|
||||
}
|
||||
// We have a single quoted string outside a localize function name.
|
||||
@@ -101,22 +113,22 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
return;
|
||||
}
|
||||
// We have a string that is a direct argument into the localize call.
|
||||
const keyArg = callInfo && callInfo.argIndex === this.keyIndex
|
||||
var keyArg = callInfo.argIndex === this.keyIndex
|
||||
? callInfo.callExpression.arguments[this.keyIndex]
|
||||
: null;
|
||||
if (keyArg) {
|
||||
if (isStringLiteral(keyArg)) {
|
||||
this.recordKey(keyArg, this.messageIndex && callInfo ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
|
||||
this.recordKey(keyArg, this.messageIndex ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
|
||||
}
|
||||
else if (isObjectLiteral(keyArg)) {
|
||||
for (let i = 0; i < keyArg.properties.length; i++) {
|
||||
const property = keyArg.properties[i];
|
||||
for (var i = 0; i < keyArg.properties.length; i++) {
|
||||
var property = keyArg.properties[i];
|
||||
if (isPropertyAssignment(property)) {
|
||||
const name = property.name.getText();
|
||||
if (name === 'key') {
|
||||
const initializer = property.initializer;
|
||||
var name_1 = property.name.getText();
|
||||
if (name_1 === 'key') {
|
||||
var initializer = property.initializer;
|
||||
if (isStringLiteral(initializer)) {
|
||||
this.recordKey(initializer, this.messageIndex && callInfo ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
|
||||
this.recordKey(initializer, this.messageIndex ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
|
||||
}
|
||||
break;
|
||||
}
|
||||
@@ -124,42 +136,42 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
}
|
||||
}
|
||||
}
|
||||
const messageArg = callInfo.callExpression.arguments[this.messageIndex];
|
||||
var messageArg = callInfo.callExpression.arguments[this.messageIndex];
|
||||
if (messageArg && messageArg.kind !== ts.SyntaxKind.StringLiteral) {
|
||||
this.addFailure(this.createFailure(messageArg.getStart(), messageArg.getWidth(), `Message argument to '${callInfo.callExpression.expression.getText()}' must be a string literal.`));
|
||||
this.addFailure(this.createFailure(messageArg.getStart(), messageArg.getWidth(), "Message argument to '" + callInfo.callExpression.expression.getText() + "' must be a string literal."));
|
||||
return;
|
||||
}
|
||||
}
|
||||
recordKey(keyNode, messageNode) {
|
||||
const text = keyNode.getText();
|
||||
};
|
||||
NoUnexternalizedStringsRuleWalker.prototype.recordKey = function (keyNode, messageNode) {
|
||||
var text = keyNode.getText();
|
||||
// We have an empty key
|
||||
if (text.match(/(['"]) *\1/)) {
|
||||
if (messageNode) {
|
||||
this.addFailureAtNode(keyNode, `Key is empty for message: ${messageNode.getText()}`);
|
||||
this.addFailureAtNode(keyNode, "Key is empty for message: " + messageNode.getText());
|
||||
}
|
||||
else {
|
||||
this.addFailureAtNode(keyNode, `Key is empty.`);
|
||||
this.addFailureAtNode(keyNode, "Key is empty.");
|
||||
}
|
||||
return;
|
||||
}
|
||||
let occurrences = this.usedKeys[text];
|
||||
var occurrences = this.usedKeys[text];
|
||||
if (!occurrences) {
|
||||
occurrences = [];
|
||||
this.usedKeys[text] = occurrences;
|
||||
}
|
||||
if (messageNode) {
|
||||
if (occurrences.some(pair => pair.message ? pair.message.getText() === messageNode.getText() : false)) {
|
||||
if (occurrences.some(function (pair) { return pair.message ? pair.message.getText() === messageNode.getText() : false; })) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
occurrences.push({ key: keyNode, message: messageNode });
|
||||
}
|
||||
findDescribingParent(node) {
|
||||
let parent;
|
||||
};
|
||||
NoUnexternalizedStringsRuleWalker.prototype.findDescribingParent = function (node) {
|
||||
var parent;
|
||||
while ((parent = node.parent)) {
|
||||
const kind = parent.kind;
|
||||
var kind = parent.kind;
|
||||
if (kind === ts.SyntaxKind.CallExpression) {
|
||||
const callExpression = parent;
|
||||
var callExpression = parent;
|
||||
return { callInfo: { callExpression: callExpression, argIndex: callExpression.arguments.indexOf(node) } };
|
||||
}
|
||||
else if (kind === ts.SyntaxKind.ImportEqualsDeclaration || kind === ts.SyntaxKind.ImportDeclaration || kind === ts.SyntaxKind.ExportDeclaration) {
|
||||
@@ -173,9 +185,8 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
}
|
||||
node = parent;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
NoUnexternalizedStringsRuleWalker.ImportFailureMessage = 'Do not use double quotes for imports.';
|
||||
NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE = '"';
|
||||
NoUnexternalizedStringsRuleWalker.IDENTIFIER = /^[_a-zA-Z0-9][ .\-_a-zA-Z0-9]*$/;
|
||||
};
|
||||
NoUnexternalizedStringsRuleWalker.ImportFailureMessage = 'Do not use double quotes for imports.';
|
||||
NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE = '"';
|
||||
return NoUnexternalizedStringsRuleWalker;
|
||||
}(Lint.RuleWalker));
|
||||
|
||||
@@ -40,7 +40,7 @@ function isPropertyAssignment(node: ts.Node): node is ts.PropertyAssignment {
|
||||
|
||||
interface KeyMessagePair {
|
||||
key: ts.StringLiteral;
|
||||
message: ts.Node | undefined;
|
||||
message: ts.Node;
|
||||
}
|
||||
|
||||
class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
@@ -50,8 +50,8 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
private static DOUBLE_QUOTE: string = '"';
|
||||
|
||||
private signatures: Map<boolean>;
|
||||
private messageIndex: number | undefined;
|
||||
private keyIndex: number | undefined;
|
||||
private messageIndex: number;
|
||||
private keyIndex: number;
|
||||
private ignores: Map<boolean>;
|
||||
|
||||
private usedKeys: Map<KeyMessagePair[]>;
|
||||
@@ -63,8 +63,8 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
this.messageIndex = undefined;
|
||||
this.keyIndex = undefined;
|
||||
this.usedKeys = Object.create(null);
|
||||
const options: any[] = this.getOptions();
|
||||
const first: UnexternalizedStringsOptions = options && options.length > 0 ? options[0] : null;
|
||||
let options: any[] = this.getOptions();
|
||||
let first: UnexternalizedStringsOptions = options && options.length > 0 ? options[0] : null;
|
||||
if (first) {
|
||||
if (Array.isArray(first.signatures)) {
|
||||
first.signatures.forEach((signature: string) => this.signatures[signature] = true);
|
||||
@@ -81,17 +81,10 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
}
|
||||
}
|
||||
|
||||
private static IDENTIFIER = /^[_a-zA-Z0-9][ .\-_a-zA-Z0-9]*$/;
|
||||
protected visitSourceFile(node: ts.SourceFile): void {
|
||||
super.visitSourceFile(node);
|
||||
Object.keys(this.usedKeys).forEach(key => {
|
||||
// Keys are quoted.
|
||||
let identifier = key.substr(1, key.length - 2);
|
||||
if (!NoUnexternalizedStringsRuleWalker.IDENTIFIER.test(identifier)) {
|
||||
let occurrence = this.usedKeys[key][0];
|
||||
this.addFailure(this.createFailure(occurrence.key.getStart(), occurrence.key.getWidth(), `The key ${occurrence.key.getText()} doesn't conform to a valid localize identifier`));
|
||||
}
|
||||
const occurrences = this.usedKeys[key];
|
||||
let occurrences = this.usedKeys[key];
|
||||
if (occurrences.length > 1) {
|
||||
occurrences.forEach(occurrence => {
|
||||
this.addFailure((this.createFailure(occurrence.key.getStart(), occurrence.key.getWidth(), `Duplicate key ${occurrence.key.getText()} with different message value.`)));
|
||||
@@ -106,9 +99,9 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
}
|
||||
|
||||
private checkStringLiteral(node: ts.StringLiteral): void {
|
||||
const text = node.getText();
|
||||
const doubleQuoted = text.length >= 2 && text[0] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE && text[text.length - 1] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE;
|
||||
const info = this.findDescribingParent(node);
|
||||
let text = node.getText();
|
||||
let doubleQuoted = text.length >= 2 && text[0] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE && text[text.length - 1] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE;
|
||||
let info = this.findDescribingParent(node);
|
||||
// Ignore strings in import and export nodes.
|
||||
if (info && info.isImport && doubleQuoted) {
|
||||
const fix = [
|
||||
@@ -122,13 +115,13 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
);
|
||||
return;
|
||||
}
|
||||
const callInfo = info ? info.callInfo : null;
|
||||
const functionName = callInfo ? callInfo.callExpression.expression.getText() : null;
|
||||
let callInfo = info ? info.callInfo : null;
|
||||
let functionName = callInfo ? callInfo.callExpression.expression.getText() : null;
|
||||
if (functionName && this.ignores[functionName]) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName!])) {
|
||||
if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) {
|
||||
const s = node.getText();
|
||||
const fix = [
|
||||
Lint.Replacement.replaceFromTo(node.getStart(), node.getWidth(), `nls.localize('KEY-${s.substring(1, s.length - 1)}', ${s})`),
|
||||
@@ -137,25 +130,25 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
return;
|
||||
}
|
||||
// We have a single quoted string outside a localize function name.
|
||||
if (!doubleQuoted && !this.signatures[functionName!]) {
|
||||
if (!doubleQuoted && !this.signatures[functionName]) {
|
||||
return;
|
||||
}
|
||||
// We have a string that is a direct argument into the localize call.
|
||||
const keyArg: ts.Expression | null = callInfo && callInfo.argIndex === this.keyIndex
|
||||
let keyArg: ts.Expression = callInfo.argIndex === this.keyIndex
|
||||
? callInfo.callExpression.arguments[this.keyIndex]
|
||||
: null;
|
||||
if (keyArg) {
|
||||
if (isStringLiteral(keyArg)) {
|
||||
this.recordKey(keyArg, this.messageIndex && callInfo ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
|
||||
this.recordKey(keyArg, this.messageIndex ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
|
||||
} else if (isObjectLiteral(keyArg)) {
|
||||
for (let i = 0; i < keyArg.properties.length; i++) {
|
||||
const property = keyArg.properties[i];
|
||||
let property = keyArg.properties[i];
|
||||
if (isPropertyAssignment(property)) {
|
||||
const name = property.name.getText();
|
||||
let name = property.name.getText();
|
||||
if (name === 'key') {
|
||||
const initializer = property.initializer;
|
||||
let initializer = property.initializer;
|
||||
if (isStringLiteral(initializer)) {
|
||||
this.recordKey(initializer, this.messageIndex && callInfo ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
|
||||
this.recordKey(initializer, this.messageIndex ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
|
||||
}
|
||||
break;
|
||||
}
|
||||
@@ -164,18 +157,18 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
}
|
||||
}
|
||||
|
||||
const messageArg = callInfo!.callExpression.arguments[this.messageIndex!];
|
||||
const messageArg = callInfo.callExpression.arguments[this.messageIndex];
|
||||
|
||||
if (messageArg && messageArg.kind !== ts.SyntaxKind.StringLiteral) {
|
||||
this.addFailure(this.createFailure(
|
||||
messageArg.getStart(), messageArg.getWidth(),
|
||||
`Message argument to '${callInfo!.callExpression.expression.getText()}' must be a string literal.`));
|
||||
`Message argument to '${callInfo.callExpression.expression.getText()}' must be a string literal.`));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
private recordKey(keyNode: ts.StringLiteral, messageNode: ts.Node | undefined) {
|
||||
const text = keyNode.getText();
|
||||
private recordKey(keyNode: ts.StringLiteral, messageNode: ts.Node) {
|
||||
let text = keyNode.getText();
|
||||
// We have an empty key
|
||||
if (text.match(/(['"]) *\1/)) {
|
||||
if (messageNode) {
|
||||
@@ -198,12 +191,12 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
occurrences.push({ key: keyNode, message: messageNode });
|
||||
}
|
||||
|
||||
private findDescribingParent(node: ts.Node): { callInfo?: { callExpression: ts.CallExpression, argIndex: number }, isImport?: boolean; } | null {
|
||||
private findDescribingParent(node: ts.Node): { callInfo?: { callExpression: ts.CallExpression, argIndex: number }, isImport?: boolean; } {
|
||||
let parent: ts.Node;
|
||||
while ((parent = node.parent)) {
|
||||
const kind = parent.kind;
|
||||
let kind = parent.kind;
|
||||
if (kind === ts.SyntaxKind.CallExpression) {
|
||||
const callExpression = parent as ts.CallExpression;
|
||||
let callExpression = parent as ts.CallExpression;
|
||||
return { callInfo: { callExpression: callExpression, argIndex: callExpression.arguments.indexOf(<any>node) } };
|
||||
} else if (kind === ts.SyntaxKind.ImportEqualsDeclaration || kind === ts.SyntaxKind.ImportDeclaration || kind === ts.SyntaxKind.ExportDeclaration) {
|
||||
return { isImport: true };
|
||||
@@ -215,6 +208,5 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
}
|
||||
node = parent;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,43 +3,59 @@
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
var __extends = (this && this.__extends) || (function () {
|
||||
var extendStatics = Object.setPrototypeOf ||
|
||||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
||||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
||||
return function (d, b) {
|
||||
extendStatics(d, b);
|
||||
function __() { this.constructor = d; }
|
||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const Lint = require("tslint");
|
||||
const fs = require("fs");
|
||||
class Rule extends Lint.Rules.AbstractRule {
|
||||
apply(sourceFile) {
|
||||
var Lint = require("tslint");
|
||||
var fs = require("fs");
|
||||
var Rule = /** @class */ (function (_super) {
|
||||
__extends(Rule, _super);
|
||||
function Rule() {
|
||||
return _super !== null && _super.apply(this, arguments) || this;
|
||||
}
|
||||
Rule.prototype.apply = function (sourceFile) {
|
||||
return this.applyWithWalker(new TranslationRemindRuleWalker(sourceFile, this.getOptions()));
|
||||
}
|
||||
}
|
||||
};
|
||||
return Rule;
|
||||
}(Lint.Rules.AbstractRule));
|
||||
exports.Rule = Rule;
|
||||
class TranslationRemindRuleWalker extends Lint.RuleWalker {
|
||||
constructor(file, opts) {
|
||||
super(file, opts);
|
||||
var TranslationRemindRuleWalker = /** @class */ (function (_super) {
|
||||
__extends(TranslationRemindRuleWalker, _super);
|
||||
function TranslationRemindRuleWalker(file, opts) {
|
||||
return _super.call(this, file, opts) || this;
|
||||
}
|
||||
visitImportDeclaration(node) {
|
||||
const declaration = node.moduleSpecifier.getText();
|
||||
if (declaration !== `'${TranslationRemindRuleWalker.NLS_MODULE}'`) {
|
||||
TranslationRemindRuleWalker.prototype.visitImportDeclaration = function (node) {
|
||||
var declaration = node.moduleSpecifier.getText();
|
||||
if (declaration !== "'" + TranslationRemindRuleWalker.NLS_MODULE + "'") {
|
||||
return;
|
||||
}
|
||||
this.visitImportLikeDeclaration(node);
|
||||
}
|
||||
visitImportEqualsDeclaration(node) {
|
||||
const reference = node.moduleReference.getText();
|
||||
if (reference !== `require('${TranslationRemindRuleWalker.NLS_MODULE}')`) {
|
||||
};
|
||||
TranslationRemindRuleWalker.prototype.visitImportEqualsDeclaration = function (node) {
|
||||
var reference = node.moduleReference.getText();
|
||||
if (reference !== "require('" + TranslationRemindRuleWalker.NLS_MODULE + "')") {
|
||||
return;
|
||||
}
|
||||
this.visitImportLikeDeclaration(node);
|
||||
}
|
||||
visitImportLikeDeclaration(node) {
|
||||
const currentFile = node.getSourceFile().fileName;
|
||||
const matchService = currentFile.match(/vs\/workbench\/services\/\w+/);
|
||||
const matchPart = currentFile.match(/vs\/workbench\/parts\/\w+/);
|
||||
};
|
||||
TranslationRemindRuleWalker.prototype.visitImportLikeDeclaration = function (node) {
|
||||
var currentFile = node.getSourceFile().fileName;
|
||||
var matchService = currentFile.match(/vs\/workbench\/services\/\w+/);
|
||||
var matchPart = currentFile.match(/vs\/workbench\/parts\/\w+/);
|
||||
if (!matchService && !matchPart) {
|
||||
return;
|
||||
}
|
||||
const resource = matchService ? matchService[0] : matchPart[0];
|
||||
let resourceDefined = false;
|
||||
let json;
|
||||
var resource = matchService ? matchService[0] : matchPart[0];
|
||||
var resourceDefined = false;
|
||||
var json;
|
||||
try {
|
||||
json = fs.readFileSync('./build/lib/i18n.resources.json', 'utf8');
|
||||
}
|
||||
@@ -47,16 +63,17 @@ class TranslationRemindRuleWalker extends Lint.RuleWalker {
|
||||
console.error('[translation-remind rule]: File with resources to pull from Transifex was not found. Aborting translation resource check for newly defined workbench part/service.');
|
||||
return;
|
||||
}
|
||||
const workbenchResources = JSON.parse(json).workbench;
|
||||
workbenchResources.forEach((existingResource) => {
|
||||
var workbenchResources = JSON.parse(json).workbench;
|
||||
workbenchResources.forEach(function (existingResource) {
|
||||
if (existingResource.name === resource) {
|
||||
resourceDefined = true;
|
||||
return;
|
||||
}
|
||||
});
|
||||
if (!resourceDefined) {
|
||||
this.addFailureAtNode(node, `Please add '${resource}' to ./build/lib/i18n.resources.json file to use translations here.`);
|
||||
this.addFailureAtNode(node, "Please add '" + resource + "' to ./build/lib/i18n.resources.json file to use translations here.");
|
||||
}
|
||||
}
|
||||
}
|
||||
TranslationRemindRuleWalker.NLS_MODULE = 'vs/nls';
|
||||
};
|
||||
TranslationRemindRuleWalker.NLS_MODULE = 'vs/nls';
|
||||
return TranslationRemindRuleWalker;
|
||||
}(Lint.RuleWalker));
|
||||
|
||||
@@ -30,7 +30,7 @@ class TranslationRemindRuleWalker extends Lint.RuleWalker {
|
||||
this.visitImportLikeDeclaration(node);
|
||||
}
|
||||
|
||||
protected visitImportEqualsDeclaration(node: ts.ImportEqualsDeclaration): void {
|
||||
protected visitImportEqualsDeclaration(node: ts.ImportEqualsDeclaration): void {
|
||||
const reference = node.moduleReference.getText();
|
||||
if (reference !== `require('${TranslationRemindRuleWalker.NLS_MODULE}')`) {
|
||||
return;
|
||||
@@ -47,7 +47,7 @@ class TranslationRemindRuleWalker extends Lint.RuleWalker {
|
||||
return;
|
||||
}
|
||||
|
||||
const resource = matchService ? matchService[0] : matchPart![0];
|
||||
const resource = matchService ? matchService[0] : matchPart[0];
|
||||
let resourceDefined = false;
|
||||
|
||||
let json;
|
||||
@@ -59,7 +59,7 @@ class TranslationRemindRuleWalker extends Lint.RuleWalker {
|
||||
}
|
||||
const workbenchResources = JSON.parse(json).workbench;
|
||||
|
||||
workbenchResources.forEach((existingResource: any) => {
|
||||
workbenchResources.forEach(existingResource => {
|
||||
if (existingResource.name === resource) {
|
||||
resourceDefined = true;
|
||||
return;
|
||||
|
||||
10
build/lib/typings/OSSREADME.json
Normal file
10
build/lib/typings/OSSREADME.json
Normal file
@@ -0,0 +1,10 @@
|
||||
// ATTENTION - THIS DIRECTORY CONTAINS THIRD PARTY OPEN SOURCE MATERIALS:
|
||||
|
||||
// All OSS in this folder is development time only
|
||||
[{
|
||||
"name": "definitelytyped",
|
||||
"repositoryURL": "https://github.com/DefinitelyTyped/DefinitelyTyped",
|
||||
"license": "MIT",
|
||||
"isDev": true
|
||||
}
|
||||
]
|
||||
361
build/lib/typings/Q.d.ts
vendored
Normal file
361
build/lib/typings/Q.d.ts
vendored
Normal file
@@ -0,0 +1,361 @@
|
||||
// Type definitions for Q
|
||||
// Project: https://github.com/kriskowal/q
|
||||
// Definitions by: Barrie Nemetchek <https://github.com/bnemetchek>, Andrew Gaspar <https://github.com/AndrewGaspar/>, John Reilly <https://github.com/johnnyreilly>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
/**
|
||||
* If value is a Q promise, returns the promise.
|
||||
* If value is a promise from another library it is coerced into a Q promise (where possible).
|
||||
*/
|
||||
declare function Q<T>(promise: Q.IPromise<T>): Q.Promise<T>;
|
||||
/**
|
||||
* If value is not a promise, returns a promise that is fulfilled with value.
|
||||
*/
|
||||
declare function Q<T>(value: T): Q.Promise<T>;
|
||||
/**
|
||||
* Calling with nothing at all creates a void promise
|
||||
*/
|
||||
declare function Q(): Q.Promise<void>;
|
||||
|
||||
declare namespace Q {
|
||||
type IWhenable<T> = IPromise<T> | T;
|
||||
interface IPromise<T> {
|
||||
then<U>(onFulfill?: (value: T) => IWhenable<U>, onReject?: (error: any) => IWhenable<U>): IPromise<U>;
|
||||
}
|
||||
|
||||
interface Deferred<T> {
|
||||
promise: Promise<T>;
|
||||
resolve(value?: IWhenable<T>): void;
|
||||
reject(reason: any): void;
|
||||
notify(value: any): void;
|
||||
makeNodeResolver(): (reason: any, value: T) => void;
|
||||
}
|
||||
|
||||
interface Promise<T> {
|
||||
/**
|
||||
* Like a finally clause, allows you to observe either the fulfillment or rejection of a promise, but to do so without modifying the final value. This is useful for collecting resources regardless of whether a job succeeded, like closing a database connection, shutting a server down, or deleting an unneeded key from an object.
|
||||
|
||||
* finally returns a promise, which will become resolved with the same fulfillment value or rejection reason as promise. However, if callback returns a promise, the resolution of the returned promise will be delayed until the promise returned from callback is finished.
|
||||
*/
|
||||
fin(finallyCallback: () => any): Promise<T>;
|
||||
/**
|
||||
* Like a finally clause, allows you to observe either the fulfillment or rejection of a promise, but to do so without modifying the final value. This is useful for collecting resources regardless of whether a job succeeded, like closing a database connection, shutting a server down, or deleting an unneeded key from an object.
|
||||
|
||||
* finally returns a promise, which will become resolved with the same fulfillment value or rejection reason as promise. However, if callback returns a promise, the resolution of the returned promise will be delayed until the promise returned from callback is finished.
|
||||
*/
|
||||
finally(finallyCallback: () => any): Promise<T>;
|
||||
|
||||
/**
|
||||
* The then method from the Promises/A+ specification, with an additional progress handler.
|
||||
*/
|
||||
then<U>(onFulfill?: (value: T) => IWhenable<U>, onReject?: (error: any) => IWhenable<U>, onProgress?: Function): Promise<U>;
|
||||
|
||||
/**
|
||||
* Like then, but "spreads" the array into a variadic fulfillment handler. If any of the promises in the array are rejected, instead calls onRejected with the first rejected promise's rejection reason.
|
||||
*
|
||||
* This is especially useful in conjunction with all
|
||||
*/
|
||||
spread<U>(onFulfill: (...args: any[]) => IWhenable<U>, onReject?: (reason: any) => IWhenable<U>): Promise<U>;
|
||||
|
||||
fail<U>(onRejected: (reason: any) => IWhenable<U>): Promise<U>;
|
||||
|
||||
/**
|
||||
* A sugar method, equivalent to promise.then(undefined, onRejected).
|
||||
*/
|
||||
catch<U>(onRejected: (reason: any) => IWhenable<U>): Promise<U>;
|
||||
|
||||
/**
|
||||
* A sugar method, equivalent to promise.then(undefined, undefined, onProgress).
|
||||
*/
|
||||
progress(onProgress: (progress: any) => any): Promise<T>;
|
||||
|
||||
/**
|
||||
* Much like then, but with different behavior around unhandled rejection. If there is an unhandled rejection, either because promise is rejected and no onRejected callback was provided, or because onFulfilled or onRejected threw an error or returned a rejected promise, the resulting rejection reason is thrown as an exception in a future turn of the event loop.
|
||||
*
|
||||
* This method should be used to terminate chains of promises that will not be passed elsewhere. Since exceptions thrown in then callbacks are consumed and transformed into rejections, exceptions at the end of the chain are easy to accidentally, silently ignore. By arranging for the exception to be thrown in a future turn of the event loop, so that it won't be caught, it causes an onerror event on the browser window, or an uncaughtException event on Node.js's process object.
|
||||
*
|
||||
* Exceptions thrown by done will have long stack traces, if Q.longStackSupport is set to true. If Q.onerror is set, exceptions will be delivered there instead of thrown in a future turn.
|
||||
*
|
||||
* The Golden Rule of done vs. then usage is: either return your promise to someone else, or if the chain ends with you, call done to terminate it.
|
||||
*/
|
||||
done(onFulfilled?: (value: T) => any, onRejected?: (reason: any) => any, onProgress?: (progress: any) => any): void;
|
||||
|
||||
/**
|
||||
* If callback is a function, assumes it's a Node.js-style callback, and calls it as either callback(rejectionReason) when/if promise becomes rejected, or as callback(null, fulfillmentValue) when/if promise becomes fulfilled. If callback is not a function, simply returns promise.
|
||||
*/
|
||||
nodeify(callback: (reason: any, value: any) => void): Promise<T>;
|
||||
|
||||
/**
|
||||
* Returns a promise to get the named property of an object. Essentially equivalent to
|
||||
*
|
||||
* promise.then(function (o) {
|
||||
* return o[propertyName];
|
||||
* });
|
||||
*/
|
||||
get<U>(propertyName: String): Promise<U>;
|
||||
set<U>(propertyName: String, value: any): Promise<U>;
|
||||
delete<U>(propertyName: String): Promise<U>;
|
||||
/**
|
||||
* Returns a promise for the result of calling the named method of an object with the given array of arguments. The object itself is this in the function, just like a synchronous method call. Essentially equivalent to
|
||||
*
|
||||
* promise.then(function (o) {
|
||||
* return o[methodName].apply(o, args);
|
||||
* });
|
||||
*/
|
||||
post<U>(methodName: String, args: any[]): Promise<U>;
|
||||
/**
|
||||
* Returns a promise for the result of calling the named method of an object with the given variadic arguments. The object itself is this in the function, just like a synchronous method call.
|
||||
*/
|
||||
invoke<U>(methodName: String, ...args: any[]): Promise<U>;
|
||||
fapply<U>(args: any[]): Promise<U>;
|
||||
fcall<U>(...args: any[]): Promise<U>;
|
||||
|
||||
/**
|
||||
* Returns a promise for an array of the property names of an object. Essentially equivalent to
|
||||
*
|
||||
* promise.then(function (o) {
|
||||
* return Object.keys(o);
|
||||
* });
|
||||
*/
|
||||
keys(): Promise<string[]>;
|
||||
|
||||
/**
|
||||
* A sugar method, equivalent to promise.then(function () { return value; }).
|
||||
*/
|
||||
thenResolve<U>(value: U): Promise<U>;
|
||||
/**
|
||||
* A sugar method, equivalent to promise.then(function () { throw reason; }).
|
||||
*/
|
||||
thenReject(reason: any): Promise<T>;
|
||||
|
||||
/**
|
||||
* Attaches a handler that will observe the value of the promise when it becomes fulfilled, returning a promise for that same value, perhaps deferred but not replaced by the promise returned by the onFulfilled handler.
|
||||
*/
|
||||
tap(onFulfilled: (value: T) => any): Promise<T>;
|
||||
|
||||
timeout(ms: number, message?: string): Promise<T>;
|
||||
/**
|
||||
* Returns a promise that will have the same result as promise, but will only be fulfilled or rejected after at least ms milliseconds have passed.
|
||||
*/
|
||||
delay(ms: number): Promise<T>;
|
||||
|
||||
/**
|
||||
* Returns whether a given promise is in the fulfilled state. When the static version is used on non-promises, the result is always true.
|
||||
*/
|
||||
isFulfilled(): boolean;
|
||||
/**
|
||||
* Returns whether a given promise is in the rejected state. When the static version is used on non-promises, the result is always false.
|
||||
*/
|
||||
isRejected(): boolean;
|
||||
/**
|
||||
* Returns whether a given promise is in the pending state. When the static version is used on non-promises, the result is always false.
|
||||
*/
|
||||
isPending(): boolean;
|
||||
|
||||
valueOf(): any;
|
||||
|
||||
/**
|
||||
* Returns a "state snapshot" object, which will be in one of three forms:
|
||||
*
|
||||
* - { state: "pending" }
|
||||
* - { state: "fulfilled", value: <fulfllment value> }
|
||||
* - { state: "rejected", reason: <rejection reason> }
|
||||
*/
|
||||
inspect(): PromiseState<T>;
|
||||
}
|
||||
|
||||
interface PromiseState<T> {
|
||||
/**
|
||||
* "fulfilled", "rejected", "pending"
|
||||
*/
|
||||
state: string;
|
||||
value?: T;
|
||||
reason?: any;
|
||||
}
|
||||
|
||||
// If no value provided, returned promise will be of void type
|
||||
export function when(): Promise<void>;
|
||||
|
||||
// if no fulfill, reject, or progress provided, returned promise will be of same type
|
||||
export function when<T>(value: IWhenable<T>): Promise<T>;
|
||||
|
||||
// If a non-promise value is provided, it will not reject or progress
|
||||
export function when<T, U>(value: IWhenable<T>, onFulfilled: (val: T) => IWhenable<U>, onRejected?: (reason: any) => IWhenable<U>, onProgress?: (progress: any) => any): Promise<U>;
|
||||
|
||||
/**
|
||||
* Currently "impossible" (and I use the term loosely) to implement due to TypeScript limitations as it is now.
|
||||
* See: https://github.com/Microsoft/TypeScript/issues/1784 for discussion on it.
|
||||
*/
|
||||
// export function try(method: Function, ...args: any[]): Promise<any>;
|
||||
|
||||
export function fbind<T>(method: (...args: any[]) => IWhenable<T>, ...args: any[]): (...args: any[]) => Promise<T>;
|
||||
|
||||
export function fcall<T>(method: (...args: any[]) => T, ...args: any[]): Promise<T>;
|
||||
|
||||
export function send<T>(obj: any, functionName: string, ...args: any[]): Promise<T>;
|
||||
export function invoke<T>(obj: any, functionName: string, ...args: any[]): Promise<T>;
|
||||
export function mcall<T>(obj: any, functionName: string, ...args: any[]): Promise<T>;
|
||||
|
||||
export function denodeify<T>(nodeFunction: Function, ...args: any[]): (...args: any[]) => Promise<T>;
|
||||
export function nbind<T>(nodeFunction: Function, thisArg: any, ...args: any[]): (...args: any[]) => Promise<T>;
|
||||
export function nfbind<T>(nodeFunction: Function, ...args: any[]): (...args: any[]) => Promise<T>;
|
||||
export function nfcall<T>(nodeFunction: Function, ...args: any[]): Promise<T>;
|
||||
export function nfapply<T>(nodeFunction: Function, args: any[]): Promise<T>;
|
||||
|
||||
export function ninvoke<T>(nodeModule: any, functionName: string, ...args: any[]): Promise<T>;
|
||||
export function npost<T>(nodeModule: any, functionName: string, args: any[]): Promise<T>;
|
||||
export function nsend<T>(nodeModule: any, functionName: string, ...args: any[]): Promise<T>;
|
||||
export function nmcall<T>(nodeModule: any, functionName: string, ...args: any[]): Promise<T>;
|
||||
|
||||
/**
|
||||
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
|
||||
*/
|
||||
export function all<A, B, C, D, E, F>(promises: IWhenable<[IWhenable<A>, IWhenable<B>, IWhenable<C>, IWhenable<D>, IWhenable<E>, IWhenable<F>]>): Promise<[A, B, C, D, E, F]>;
|
||||
/**
|
||||
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
|
||||
*/
|
||||
export function all<A, B, C, D, E>(promises: IWhenable<[IWhenable<A>, IWhenable<B>, IWhenable<C>, IWhenable<D>, IWhenable<E>]>): Promise<[A, B, C, D, E]>;
|
||||
/**
|
||||
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
|
||||
*/
|
||||
export function all<A, B, C, D>(promises: IWhenable<[IWhenable<A>, IWhenable<B>, IWhenable<C>, IWhenable<D>]>): Promise<[A, B, C, D]>;
|
||||
/**
|
||||
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
|
||||
*/
|
||||
export function all<A, B, C>(promises: IWhenable<[IWhenable<A>, IWhenable<B>, IWhenable<C>]>): Promise<[A, B, C]>;
|
||||
/**
|
||||
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
|
||||
*/
|
||||
export function all<A, B>(promises: IWhenable<[IWhenable<A>, IWhenable<B>]>): Promise<[A, B]>;
|
||||
/**
|
||||
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
|
||||
*/
|
||||
export function all<T>(promises: IWhenable<IWhenable<T>[]>): Promise<T[]>;
|
||||
|
||||
/**
|
||||
* Returns a promise for the first of an array of promises to become settled.
|
||||
*/
|
||||
export function race<T>(promises: IWhenable<T>[]): Promise<T>;
|
||||
|
||||
/**
|
||||
* Returns a promise that is fulfilled with an array of promise state snapshots, but only after all the original promises have settled, i.e. become either fulfilled or rejected.
|
||||
*/
|
||||
export function allSettled<T>(promises: IWhenable<IWhenable<T>[]>): Promise<PromiseState<T>[]>;
|
||||
|
||||
export function allResolved<T>(promises: IWhenable<IWhenable<T>[]>): Promise<Promise<T>[]>;
|
||||
|
||||
/**
|
||||
* Like then, but "spreads" the array into a variadic fulfillment handler. If any of the promises in the array are rejected, instead calls onRejected with the first rejected promise's rejection reason.
|
||||
* This is especially useful in conjunction with all.
|
||||
*/
|
||||
export function spread<T, U>(promises: IWhenable<T>[], onFulfilled: (...args: T[]) => IWhenable<U>, onRejected?: (reason: any) => IWhenable<U>): Promise<U>;
|
||||
|
||||
/**
|
||||
* Returns a promise that will have the same result as promise, except that if promise is not fulfilled or rejected before ms milliseconds, the returned promise will be rejected with an Error with the given message. If message is not supplied, the message will be "Timed out after " + ms + " ms".
|
||||
*/
|
||||
export function timeout<T>(promise: Promise<T>, ms: number, message?: string): Promise<T>;
|
||||
|
||||
/**
|
||||
* Returns a promise that will have the same result as promise, but will only be fulfilled or rejected after at least ms milliseconds have passed.
|
||||
*/
|
||||
export function delay<T>(promise: Promise<T>, ms: number): Promise<T>;
|
||||
/**
|
||||
* Returns a promise that will have the same result as promise, but will only be fulfilled or rejected after at least ms milliseconds have passed.
|
||||
*/
|
||||
export function delay<T>(value: T, ms: number): Promise<T>;
|
||||
/**
|
||||
* Returns a promise that will be fulfilled with undefined after at least ms milliseconds have passed.
|
||||
*/
|
||||
export function delay(ms: number): Promise <void>;
|
||||
/**
|
||||
* Returns whether a given promise is in the fulfilled state. When the static version is used on non-promises, the result is always true.
|
||||
*/
|
||||
export function isFulfilled(promise: Promise<any>): boolean;
|
||||
/**
|
||||
* Returns whether a given promise is in the rejected state. When the static version is used on non-promises, the result is always false.
|
||||
*/
|
||||
export function isRejected(promise: Promise<any>): boolean;
|
||||
/**
|
||||
* Returns whether a given promise is in the pending state. When the static version is used on non-promises, the result is always false.
|
||||
*/
|
||||
export function isPending(promise: Promise<any>): boolean;
|
||||
|
||||
/**
|
||||
* Returns a "deferred" object with a:
|
||||
* promise property
|
||||
* resolve(value) method
|
||||
* reject(reason) method
|
||||
* notify(value) method
|
||||
* makeNodeResolver() method
|
||||
*/
|
||||
export function defer<T>(): Deferred<T>;
|
||||
|
||||
/**
|
||||
* Returns a promise that is rejected with reason.
|
||||
*/
|
||||
export function reject<T>(reason?: any): Promise<T>;
|
||||
|
||||
export function Promise<T>(resolver: (resolve: (val: IWhenable<T>) => void , reject: (reason: any) => void , notify: (progress: any) => void ) => void ): Promise<T>;
|
||||
|
||||
/**
|
||||
* Creates a new version of func that accepts any combination of promise and non-promise values, converting them to their fulfillment values before calling the original func. The returned version also always returns a promise: if func does a return or throw, then Q.promised(func) will return fulfilled or rejected promise, respectively.
|
||||
*
|
||||
* This can be useful for creating functions that accept either promises or non-promise values, and for ensuring that the function always returns a promise even in the face of unintentional thrown exceptions.
|
||||
*/
|
||||
export function promised<T>(callback: (...args: any[]) => T): (...args: any[]) => Promise<T>;
|
||||
|
||||
/**
|
||||
* Returns whether the given value is a Q promise.
|
||||
*/
|
||||
export function isPromise(object: any): boolean;
|
||||
/**
|
||||
* Returns whether the given value is a promise (i.e. it's an object with a then function).
|
||||
*/
|
||||
export function isPromiseAlike(object: any): boolean;
|
||||
/**
|
||||
* Returns whether a given promise is in the pending state. When the static version is used on non-promises, the result is always false.
|
||||
*/
|
||||
export function isPending(object: any): boolean;
|
||||
/**
|
||||
* If an object is not a promise, it is as "near" as possible.
|
||||
* If a promise is rejected, it is as "near" as possible too.
|
||||
* If it’s a fulfilled promise, the fulfillment value is nearer.
|
||||
* If it’s a deferred promise and the deferred has been resolved, the
|
||||
* resolution is "nearer".
|
||||
*/
|
||||
export function nearer<T>(promise: Promise<T>): T;
|
||||
|
||||
/**
|
||||
* This is an experimental tool for converting a generator function into a deferred function. This has the potential of reducing nested callbacks in engines that support yield.
|
||||
*/
|
||||
export function async<T>(generatorFunction: any): (...args: any[]) => Promise<T>;
|
||||
export function nextTick(callback: Function): void;
|
||||
|
||||
/**
|
||||
* A settable property that will intercept any uncaught errors that would otherwise be thrown in the next tick of the event loop, usually as a result of done. Can be useful for getting the full stack trace of an error in browsers, which is not usually possible with window.onerror.
|
||||
*/
|
||||
export var onerror: (reason: any) => void;
|
||||
/**
|
||||
* A settable property that lets you turn on long stack trace support. If turned on, "stack jumps" will be tracked across asynchronous promise operations, so that if an uncaught error is thrown by done or a rejection reason's stack property is inspected in a rejection callback, a long stack trace is produced.
|
||||
*/
|
||||
export var longStackSupport: boolean;
|
||||
|
||||
/**
|
||||
* Calling resolve with a pending promise causes promise to wait on the passed promise, becoming fulfilled with its fulfillment value or rejected with its rejection reason (or staying pending forever, if the passed promise does).
|
||||
* Calling resolve with a rejected promise causes promise to be rejected with the passed promise's rejection reason.
|
||||
* Calling resolve with a fulfilled promise causes promise to be fulfilled with the passed promise's fulfillment value.
|
||||
* Calling resolve with a non-promise value causes promise to be fulfilled with that value.
|
||||
*/
|
||||
export function resolve<T>(object: IWhenable<T>): Promise<T>;
|
||||
|
||||
/**
|
||||
* Resets the global "Q" variable to the value it has before Q was loaded.
|
||||
* This will either be undefined if there was no version or the version of Q which was already loaded before.
|
||||
* @returns { The last version of Q. }
|
||||
*/
|
||||
export function noConflict(): typeof Q;
|
||||
}
|
||||
|
||||
declare module "q" {
|
||||
export = Q;
|
||||
}
|
||||
@@ -1,16 +0,0 @@
|
||||
{
|
||||
"registrations": [
|
||||
{
|
||||
"component": {
|
||||
"type": "git",
|
||||
"git": {
|
||||
"name": "definitelytyped",
|
||||
"repositoryUrl": "https://github.com/DefinitelyTyped/DefinitelyTyped",
|
||||
"commitHash": "69e3ac6bec3008271f76bbfa7cf69aa9198c4ff0"
|
||||
}
|
||||
},
|
||||
"license": "MIT"
|
||||
}
|
||||
],
|
||||
"version": 1
|
||||
}
|
||||
121
build/lib/typings/chalk.d.ts
vendored
Normal file
121
build/lib/typings/chalk.d.ts
vendored
Normal file
@@ -0,0 +1,121 @@
|
||||
// Type definitions for chalk v0.4.0
|
||||
// Project: https://github.com/sindresorhus/chalk
|
||||
// Definitions by: Diullei Gomes <https://github.com/Diullei>, Bart van der Schoor <https://github.com/Bartvds>, Nico Jansen <https://github.com/nicojs>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
declare namespace Chalk {
|
||||
|
||||
export var enabled: boolean;
|
||||
export var supportsColor: boolean;
|
||||
export var styles: ChalkStyleMap;
|
||||
|
||||
export function stripColor(value: string): any;
|
||||
export function hasColor(str: string): boolean;
|
||||
|
||||
export interface ChalkChain extends ChalkStyle {
|
||||
(...text: string[]): string;
|
||||
}
|
||||
|
||||
export interface ChalkStyleElement {
|
||||
open: string;
|
||||
close: string;
|
||||
}
|
||||
|
||||
// General
|
||||
export var reset: ChalkChain;
|
||||
export var bold: ChalkChain;
|
||||
export var italic: ChalkChain;
|
||||
export var underline: ChalkChain;
|
||||
export var inverse: ChalkChain;
|
||||
export var strikethrough: ChalkChain;
|
||||
|
||||
// Text colors
|
||||
export var black: ChalkChain;
|
||||
export var red: ChalkChain;
|
||||
export var green: ChalkChain;
|
||||
export var yellow: ChalkChain;
|
||||
export var blue: ChalkChain;
|
||||
export var magenta: ChalkChain;
|
||||
export var cyan: ChalkChain;
|
||||
export var white: ChalkChain;
|
||||
export var gray: ChalkChain;
|
||||
export var grey: ChalkChain;
|
||||
|
||||
// Background colors
|
||||
export var bgBlack: ChalkChain;
|
||||
export var bgRed: ChalkChain;
|
||||
export var bgGreen: ChalkChain;
|
||||
export var bgYellow: ChalkChain;
|
||||
export var bgBlue: ChalkChain;
|
||||
export var bgMagenta: ChalkChain;
|
||||
export var bgCyan: ChalkChain;
|
||||
export var bgWhite: ChalkChain;
|
||||
|
||||
|
||||
export interface ChalkStyle {
|
||||
// General
|
||||
reset: ChalkChain;
|
||||
bold: ChalkChain;
|
||||
italic: ChalkChain;
|
||||
underline: ChalkChain;
|
||||
inverse: ChalkChain;
|
||||
strikethrough: ChalkChain;
|
||||
|
||||
// Text colors
|
||||
black: ChalkChain;
|
||||
red: ChalkChain;
|
||||
green: ChalkChain;
|
||||
yellow: ChalkChain;
|
||||
blue: ChalkChain;
|
||||
magenta: ChalkChain;
|
||||
cyan: ChalkChain;
|
||||
white: ChalkChain;
|
||||
gray: ChalkChain;
|
||||
grey: ChalkChain;
|
||||
|
||||
// Background colors
|
||||
bgBlack: ChalkChain;
|
||||
bgRed: ChalkChain;
|
||||
bgGreen: ChalkChain;
|
||||
bgYellow: ChalkChain;
|
||||
bgBlue: ChalkChain;
|
||||
bgMagenta: ChalkChain;
|
||||
bgCyan: ChalkChain;
|
||||
bgWhite: ChalkChain;
|
||||
}
|
||||
|
||||
export interface ChalkStyleMap {
|
||||
// General
|
||||
reset: ChalkStyleElement;
|
||||
bold: ChalkStyleElement;
|
||||
italic: ChalkStyleElement;
|
||||
underline: ChalkStyleElement;
|
||||
inverse: ChalkStyleElement;
|
||||
strikethrough: ChalkStyleElement;
|
||||
|
||||
// Text colors
|
||||
black: ChalkStyleElement;
|
||||
red: ChalkStyleElement;
|
||||
green: ChalkStyleElement;
|
||||
yellow: ChalkStyleElement;
|
||||
blue: ChalkStyleElement;
|
||||
magenta: ChalkStyleElement;
|
||||
cyan: ChalkStyleElement;
|
||||
white: ChalkStyleElement;
|
||||
gray: ChalkStyleElement;
|
||||
|
||||
// Background colors
|
||||
bgBlack: ChalkStyleElement;
|
||||
bgRed: ChalkStyleElement;
|
||||
bgGreen: ChalkStyleElement;
|
||||
bgYellow: ChalkStyleElement;
|
||||
bgBlue: ChalkStyleElement;
|
||||
bgMagenta: ChalkStyleElement;
|
||||
bgCyan: ChalkStyleElement;
|
||||
bgWhite: ChalkStyleElement;
|
||||
}
|
||||
}
|
||||
|
||||
declare module "chalk" {
|
||||
export = Chalk;
|
||||
}
|
||||
18
build/lib/typings/debounce.d.ts
vendored
Normal file
18
build/lib/typings/debounce.d.ts
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
// Type definitions for compose-function
|
||||
// Project: https://github.com/component/debounce
|
||||
// Definitions by: Denis Sokolov <https://github.com/denis-sokolov>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
declare module "debounce" {
|
||||
// Overload on boolean constants would allow us to narrow further,
|
||||
// but it is not implemented for TypeScript yet
|
||||
function f<A extends Function>(f: A, interval?: number, immediate?: boolean): A
|
||||
|
||||
/**
|
||||
* This is required as per:
|
||||
* https://github.com/Microsoft/TypeScript/issues/5073
|
||||
*/
|
||||
namespace f {}
|
||||
|
||||
export = f;
|
||||
}
|
||||
19
build/lib/typings/event-stream.d.ts
vendored
19
build/lib/typings/event-stream.d.ts
vendored
@@ -1,11 +1,12 @@
|
||||
declare module "event-stream" {
|
||||
import { Stream } from 'stream';
|
||||
import { ThroughStream as _ThroughStream } from 'through';
|
||||
import { ThroughStream as _ThroughStream} from 'through';
|
||||
import { MapStream } from 'map-stream';
|
||||
import * as File from 'vinyl';
|
||||
|
||||
export interface ThroughStream extends _ThroughStream {
|
||||
queue(data: File | null): any;
|
||||
push(data: File | null): any;
|
||||
queue(data: File | null);
|
||||
push(data: File | null);
|
||||
paused: boolean;
|
||||
}
|
||||
|
||||
@@ -14,14 +15,14 @@ declare module "event-stream" {
|
||||
function concat(...stream: Stream[]): ThroughStream;
|
||||
function duplex(istream: Stream, ostream: Stream): ThroughStream;
|
||||
|
||||
function through(write?: (this: ThroughStream, data: any) => void, end?: (this: ThroughStream) => void,
|
||||
opts?: { autoDestroy: boolean; }): ThroughStream;
|
||||
function through(write?: (data: any) => void, end?: () => void,
|
||||
opts?: {autoDestroy: boolean; }): ThroughStream;
|
||||
|
||||
function readArray<T>(array: T[]): ThroughStream;
|
||||
function writeArray<T>(cb: (err: Error, array: T[]) => void): ThroughStream;
|
||||
function writeArray<T>(cb: (err:Error, array:T[]) => void): ThroughStream;
|
||||
|
||||
function mapSync<I, O>(cb: (data: I) => O): ThroughStream;
|
||||
function map<I, O>(cb: (data: I, cb: (err?: Error, data?: O) => void) => O): ThroughStream;
|
||||
function mapSync<I,O>(cb: (data:I) => O): ThroughStream;
|
||||
function map<I,O>(cb: (data:I, cb:(err?:Error, data?: O)=>void) => O): ThroughStream;
|
||||
|
||||
function readable(asyncFunction: (this: ThroughStream, ...args: any[]) => any): any;
|
||||
function readable(asyncFunction: Function): MapStream;
|
||||
}
|
||||
3
build/lib/typings/github-releases.d.ts
vendored
3
build/lib/typings/github-releases.d.ts
vendored
@@ -1,3 +0,0 @@
|
||||
declare module 'github-releases' {
|
||||
|
||||
}
|
||||
43
build/lib/typings/gulp-concat.d.ts
vendored
Normal file
43
build/lib/typings/gulp-concat.d.ts
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
// Type definitions for gulp-concat
|
||||
// Project: http://github.com/wearefractal/gulp-concat
|
||||
// Definitions by: Keita Kagurazaka <https://github.com/k-kagurazaka>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
declare module "gulp-concat" {
|
||||
|
||||
interface IOptions {
|
||||
newLine: string;
|
||||
}
|
||||
|
||||
interface IFsStats {
|
||||
dev?: number;
|
||||
ino?: number;
|
||||
mode?: number;
|
||||
nlink?: number;
|
||||
uid?: number;
|
||||
gid?: number;
|
||||
rdev?: number;
|
||||
size?: number;
|
||||
blksize?: number;
|
||||
blocks?: number;
|
||||
atime?: Date;
|
||||
mtime?: Date;
|
||||
ctime?: Date;
|
||||
}
|
||||
|
||||
interface IVinylOptions {
|
||||
cwd?: string;
|
||||
base?: string;
|
||||
path?: string;
|
||||
stat?: IFsStats;
|
||||
contents?: NodeJS.ReadableStream | Buffer;
|
||||
}
|
||||
|
||||
interface IConcat {
|
||||
(filename: string, options?: IOptions): NodeJS.ReadWriteStream;
|
||||
(options: IVinylOptions): NodeJS.ReadWriteStream;
|
||||
}
|
||||
|
||||
var _tmp: IConcat;
|
||||
export = _tmp;
|
||||
}
|
||||
29
build/lib/typings/gulp-filter.d.ts
vendored
Normal file
29
build/lib/typings/gulp-filter.d.ts
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
// Type definitions for gulp-filter v3.0.1
|
||||
// Project: https://github.com/sindresorhus/gulp-filter
|
||||
// Definitions by: Tanguy Krotoff <https://github.com/tkrotoff>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
declare module 'gulp-filter' {
|
||||
import File = require('vinyl');
|
||||
import * as Minimatch from 'minimatch';
|
||||
|
||||
namespace filter {
|
||||
interface FileFunction {
|
||||
(file: File): boolean;
|
||||
}
|
||||
|
||||
interface Options extends Minimatch.IOptions {
|
||||
restore?: boolean;
|
||||
passthrough?: boolean;
|
||||
}
|
||||
|
||||
// A transform stream with a .restore object
|
||||
interface Filter extends NodeJS.ReadWriteStream {
|
||||
restore: NodeJS.ReadWriteStream
|
||||
}
|
||||
}
|
||||
|
||||
function filter(pattern: string | string[] | filter.FileFunction, options?: filter.Options): filter.Filter;
|
||||
|
||||
export = filter;
|
||||
}
|
||||
29
build/lib/typings/gulp-rename.d.ts
vendored
Normal file
29
build/lib/typings/gulp-rename.d.ts
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
// Type definitions for gulp-rename
|
||||
// Project: https://github.com/hparra/gulp-rename
|
||||
// Definitions by: Asana <https://asana.com>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
declare module "gulp-rename" {
|
||||
interface ParsedPath {
|
||||
dirname?: string;
|
||||
basename?: string;
|
||||
extname?: string;
|
||||
}
|
||||
|
||||
interface Options extends ParsedPath {
|
||||
prefix?: string;
|
||||
suffix?: string;
|
||||
}
|
||||
|
||||
function rename(name: string): NodeJS.ReadWriteStream;
|
||||
function rename(callback: (path: ParsedPath) => any): NodeJS.ReadWriteStream;
|
||||
function rename(opts: Options): NodeJS.ReadWriteStream;
|
||||
|
||||
/**
|
||||
* This is required as per:
|
||||
* https://github.com/Microsoft/TypeScript/issues/5073
|
||||
*/
|
||||
namespace rename {}
|
||||
|
||||
export = rename;
|
||||
}
|
||||
27
build/lib/typings/gulp-sourcemaps.d.ts
vendored
Normal file
27
build/lib/typings/gulp-sourcemaps.d.ts
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
// Type definitions for gulp-sourcemaps
|
||||
// Project: https://github.com/floridoo/gulp-sourcemaps
|
||||
// Definitions by: Asana <https://asana.com>
|
||||
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
|
||||
|
||||
declare module "gulp-sourcemaps" {
|
||||
interface InitOptions {
|
||||
loadMaps?: boolean;
|
||||
debug?: boolean;
|
||||
}
|
||||
|
||||
interface WriteMapper {
|
||||
(file: string): string;
|
||||
}
|
||||
|
||||
interface WriteOptions {
|
||||
addComment?: boolean;
|
||||
includeContent?: boolean;
|
||||
sourceRoot?: string | WriteMapper;
|
||||
sourceMappingURLPrefix?: string | WriteMapper;
|
||||
sourceMappingURL?: (f:{relative:string})=>string;
|
||||
}
|
||||
|
||||
export function init(opts?: InitOptions): NodeJS.ReadWriteStream;
|
||||
export function write(path?: string, opts?: WriteOptions): NodeJS.ReadWriteStream;
|
||||
export function write(opts?: WriteOptions): NodeJS.ReadWriteStream;
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user