Compare commits

..

18 Commits
1.5.1 ... 1.2.3

Author SHA1 Message Date
Karl Burtram
8439cde610 Bump SQL Tools to 1.5.0-alpha.52 2018-10-31 20:38:22 -07:00
Anthony Dresser
2080c525a7 Auto Scale Axis (#3070)
* fix input to chart that was causes scales to not auto size

* formatting

* formatting

* added comments
2018-10-31 20:33:09 -07:00
Aditya Bist
db2d380b6c Agent/edit job logic (#3023)
* lumped stepdata with jobdata in job dialog

* fix bug with empty steps

* added clumped and update steps and schedules from job dialog

* edit data sends one call instead of multiple

* cleaned code
2018-10-31 20:32:59 -07:00
Alan Ren
a06f80bdb1 fix for issue 3065 (#3067)
* fix for 3065

* remove the parameter, no need to save connection
2018-10-31 20:32:51 -07:00
Karl Burtram
1f76c85b1b Remove SQL Import dashboard tab (#3064) 2018-10-31 20:32:37 -07:00
Karl Burtram
cffc18d5ea Revert "Revert "Add a command line interface for connecting to a SQL Server (#3047)""
This reverts commit a747b6a500.
2018-10-31 20:32:17 -07:00
Karl Burtram
a747b6a500 Revert "Add a command line interface for connecting to a SQL Server (#3047)"
This reverts commit 7f66087d8c.
2018-10-31 14:39:46 -07:00
Aditya Bist
711b7bf622 fixed null ref (#3061) 2018-10-31 14:25:39 -07:00
Alan Ren
9b0757de9c a few ux improvements (#3057)
* style update

* checkbox styler

* casing update
2018-10-31 14:25:26 -07:00
Alan Ren
b9a0744a83 fix for issue 2719 (#3060) 2018-10-31 14:25:14 -07:00
Karl Burtram
c69915ca58 Bump SQL Tools to 1.5.0-alpha.51 2018-10-31 13:17:20 -07:00
Karl Burtram
a16835918a Fix build break in previous Query Plan commit 2018-10-31 13:04:32 -07:00
David Shiflet
f9e27d7112 Add a command line interface for connecting to a SQL Server (#3047)
* Add switches for server, database, user, integrated auth

* Refactor into new commandline service

* Open query editor when passed server on command line

* Add tests
2018-10-31 10:14:36 -07:00
Ryan
c072ba9c5c Add query plan theme support (#2991) (#3031)
Add monaco-editor and monaco-editor-hover to output otherwise backgrounds collide.
2018-10-31 10:14:26 -07:00
Alan Ren
807fb2ed8a fix missing footer for backup dialog (#3056)
@MattIrv  Thanks for helping out
2018-10-31 09:46:14 -07:00
Ruturaj Gujar
daf347b728 Fixed some typos and grammatical errors (#3027) 2018-10-31 09:46:06 -07:00
Karl Burtram
e76222db7a Change 'None' to 'Do not save' in Connection Dialog (#3051) 2018-10-30 10:46:24 -07:00
Karl Burtram
f676090901 Bump Azure Data Studio to 1.2.3 2018-10-30 10:28:29 -07:00
12563 changed files with 321305 additions and 280035 deletions

View File

@@ -1,4 +1,4 @@
# EditorConfig is awesome: https://EditorConfig.org # EditorConfig is awesome: http://EditorConfig.org
# top-most EditorConfig file # top-most EditorConfig file
root = true root = true
@@ -6,6 +6,7 @@ root = true
# Tab indentation # Tab indentation
[*] [*]
indent_style = tab indent_style = tab
indent_size = 4
trim_trailing_whitespace = true trim_trailing_whitespace = true
# The indent size used in the `package.json` file cannot be changed # The indent size used in the `package.json` file cannot be changed

19
.eslintrc Normal file
View File

@@ -0,0 +1,19 @@
{
"env": {
"node": true,
"es6": true
},
"rules": {
"no-console": 0,
"no-cond-assign": 0,
"no-unused-vars": 1,
"no-extra-semi": "warn",
"semi": "warn"
},
"extends": "eslint:recommended",
"parserOptions": {
"ecmaFeatures": {
"experimentalObjectRestSpread": true
}
}
}

View File

@@ -1,20 +0,0 @@
{
"root": true,
"env": {
"node": true,
"es6": true
},
"rules": {
"no-console": 0,
"no-cond-assign": 0,
"no-unused-vars": 1,
"no-extra-semi": "warn",
"semi": "warn"
},
"extends": "eslint:recommended",
"parserOptions": {
"ecmaFeatures": {
"experimentalObjectRestSpread": true
}
}
}

View File

@@ -1,10 +1,6 @@
--- ---
name: Bug report name: Bug report
about: Create a report to help us improve about: Create a report to help us improve
title: ''
labels: ''
assignees: ''
--- ---
<!-- Please search existing issues to avoid creating duplicates. --> <!-- Please search existing issues to avoid creating duplicates. -->

View File

@@ -1,20 +0,0 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: feature request
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution or feature you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.

View File

@@ -1,49 +0,0 @@
{
perform: true,
alwaysRequireAssignee: false,
labelsRequiringAssignee: [],
autoAssignees: {
accessibility: [],
acquisition: [],
agent: [],
azure: [],
backup: [],
bcdr: [],
'chart viewer': [],
connection: [],
dacfx: [],
dashboard: [],
'data explorer': [],
documentation: [],
'edit data': [],
export: [],
extensibility: [],
extensionManager: [],
globalization: [],
grid: [],
import: [],
insights: [],
intellisense: [],
localization: [],
'managed instance': [],
notebooks: [],
'object explorer': [],
performance: [],
profiler: [],
'query editor': [],
'query execution': [],
reliability: [],
restore: [],
scripting: [],
'server group': [],
settings: [],
setup: [],
shell: [],
showplan: [],
snippet: [],
sql2019Preview: [],
sqldw: [],
supportability: [],
ux: []
}
}

12
.github/commands.yml vendored
View File

@@ -1,12 +0,0 @@
{
perform: false,
commands: [
{
type: 'label',
name: 'duplicate',
allowTriggerByBot: true,
action: 'close',
comment: "Thanks for creating this issue! We figured it's covering the same as another one we already have. Thus, we closed this one as a duplicate. You can search for existing issues [here](https://aka.ms/vscodeissuesearch). See also our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines.\n\nHappy Coding!"
}
]
}

5
.github/copycat.yml vendored
View File

@@ -1,5 +0,0 @@
{
perform: true,
target_owner: 'anthonydresser',
target_repo: 'testissues'
}

6
.github/locker.yml vendored
View File

@@ -1,6 +0,0 @@
{
daysAfterClose: 45,
daysSinceLastUpdate: 3,
ignoredLabels: [],
perform: true
}

View File

@@ -1,6 +0,0 @@
{
daysUntilClose: 7,
needsMoreInfoLabel: 'needs more info',
perform: true,
closeComment: "This issue has been closed automatically because it needs more information and has not had recent activity in the last 7 days. If you have more info to help resolve the issue, leave a comment"
}

View File

@@ -1,6 +0,0 @@
{
newReleaseLabel: 'new-release',
newReleaseColor: '006b75',
daysAfterRelease: 5,
perform: true
}

View File

@@ -1,5 +0,0 @@
{
perform: true,
whenCreatedByTeam: true,
comment: "Thanks for submitting this issue. Please also check if it is already covered by an existing one, like:\n${potentialDuplicates}"
}

3
.gitignore vendored
View File

@@ -3,7 +3,6 @@ npm-debug.log
Thumbs.db Thumbs.db
node_modules/ node_modules/
.build/ .build/
extensions/**/dist/
out/ out/
out-build/ out-build/
out-editor/ out-editor/
@@ -18,4 +17,4 @@ build/node_modules
coverage/ coverage/
test_data/ test_data/
test-results/ test-results/
yarn-error.log yarn-error.log

2
.nvmrc
View File

@@ -1 +1 @@
8 8.9.2

58
.travis.yml Normal file
View File

@@ -0,0 +1,58 @@
sudo: false
language: cpp
os:
- linux
- osx
cache:
directories:
- $HOME/.cache/yarn
notifications:
email: false
webhooks:
- http://vscode-probot.westus.cloudapp.azure.com:3450/travis/notifications
- http://vscode-test-probot.westus.cloudapp.azure.com:3450/travis/notifications
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- gcc-4.9
- g++-4.9
- gcc-4.9-multilib
- g++-4.9-multilib
- zip
- libgtk2.0-0
- libx11-dev
- libxkbfile-dev
- libsecret-1-dev
before_install:
- git submodule update --init --recursive
- nvm install 8.9.1
- nvm use 8.9.1
- npm i -g yarn
# - npm config set python `which python`
- if [ $TRAVIS_OS_NAME == "linux" ]; then
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:99.0;
sh -e /etc/init.d/xvfb start;
sleep 3;
fi
# Make npm logs less verbose
# - npm config set depth 0
# - npm config set loglevel warn
install:
- yarn
script:
- node_modules/.bin/gulp electron --silent
- node_modules/.bin/gulp compile --silent --max_old_space_size=4096
- node_modules/.bin/gulp optimize-vscode --silent --max_old_space_size=4096
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then ./scripts/test.sh --coverage --reporter dot; else ./scripts/test.sh --reporter dot; fi
after_success:
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then node_modules/.bin/coveralls < .build/coverage/lcov.info; fi

View File

@@ -1,23 +0,0 @@
{
"type": "array",
"items": {
"type": "object",
"required": [
"name",
"licenseDetail"
],
"properties": {
"name": {
"type": "string",
"description": "The name of the dependency"
},
"licenseDetail": {
"type": "array",
"description": "The complete license text of the dependency",
"items": {
"type": "string"
}
}
}
}
}

View File

@@ -1,142 +0,0 @@
{
"type": "object",
"properties": {
"registrations": {
"type": "array",
"items": {
"type": "object",
"properties": {
"component": {
"oneOf": [
{
"type": "object",
"required": [
"type",
"git"
],
"properties": {
"type": {
"type": "string",
"enum": [
"git"
]
},
"git": {
"type": "object",
"required": [
"name",
"repositoryUrl",
"commitHash"
],
"properties": {
"name": {
"type": "string"
},
"repositoryUrl": {
"type": "string"
},
"commitHash": {
"type": "string"
}
}
}
}
},
{
"type": "object",
"required": [
"type",
"npm"
],
"properties": {
"type": {
"type": "string",
"enum": [
"npm"
]
},
"npm": {
"type": "object",
"required": [
"name",
"version"
],
"properties": {
"name": {
"type": "string"
},
"version": {
"type": "string"
}
}
}
}
},
{
"type": "object",
"required": [
"type",
"other"
],
"properties": {
"type": {
"type": "string",
"enum": [
"other"
]
},
"other": {
"type": "object",
"required": [
"name",
"downloadUrl",
"version"
],
"properties": {
"name": {
"type": "string"
},
"downloadUrl": {
"type": "string"
},
"version": {
"type": "string"
}
}
}
}
}
]
},
"repositoryUrl": {
"type": "string",
"description": "The git url of the component"
},
"version": {
"type": "string",
"description": "The version of the component"
},
"license": {
"type": "string",
"description": "The name of the license"
},
"developmentDependency": {
"type": "boolean",
"description": "This component is inlined in the vscode repo and **is not shipped**."
},
"isOnlyProductionDependency": {
"type": "boolean",
"description": "This component is shipped and **is not inlined in the vscode repo**."
},
"licenseDetail": {
"type": "array",
"items": {
"type": "string"
},
"description": "The license text"
}
}
}
}
}
}

View File

@@ -2,7 +2,7 @@
// See https://go.microsoft.com/fwlink/?LinkId=827846 // See https://go.microsoft.com/fwlink/?LinkId=827846
// for the documentation about the extensions.json format // for the documentation about the extensions.json format
"recommendations": [ "recommendations": [
"ms-vscode.vscode-typescript-tslint-plugin", "eg2.tslint",
"dbaeumer.vscode-eslint", "dbaeumer.vscode-eslint",
"msjsdiag.debugger-for-chrome" "msjsdiag.debugger-for-chrome"
] ]

144
.vscode/launch.json vendored
View File

@@ -9,12 +9,14 @@
"stopOnEntry": true, "stopOnEntry": true,
"args": [ "args": [
"hygiene" "hygiene"
] ],
"cwd": "${workspaceFolder}"
}, },
{ {
"type": "node", "type": "node",
"request": "attach", "request": "attach",
"name": "Attach to Extension Host", "name": "Attach to Extension Host",
"protocol": "inspector",
"port": 5870, "port": 5870,
"restart": true, "restart": true,
"outFiles": [ "outFiles": [
@@ -22,15 +24,19 @@
] ]
}, },
{ {
"type": "chrome", "type": "node",
"request": "attach", "request": "attach",
"name": "Attach to Shared Process", "name": "Attach to Shared Process",
"port": 9222, "protocol": "inspector",
"urlFilter": "*" "port": 5871,
"outFiles": [
"${workspaceFolder}/out/**/*.js"
]
}, },
{ {
"type": "node", "type": "node",
"request": "attach", "request": "attach",
"protocol": "inspector",
"name": "Attach to Search Process", "name": "Attach to Search Process",
"port": 5876, "port": 5876,
"outFiles": [ "outFiles": [
@@ -41,6 +47,7 @@
"type": "node", "type": "node",
"request": "attach", "request": "attach",
"name": "Attach to CLI Process", "name": "Attach to CLI Process",
"protocol": "inspector",
"port": 5874, "port": 5874,
"outFiles": [ "outFiles": [
"${workspaceFolder}/out/**/*.js" "${workspaceFolder}/out/**/*.js"
@@ -50,6 +57,7 @@
"type": "node", "type": "node",
"request": "attach", "request": "attach",
"name": "Attach to Main Process", "name": "Attach to Main Process",
"protocol": "inspector",
"port": 5875, "port": 5875,
"outFiles": [ "outFiles": [
"${workspaceFolder}/out/**/*.js" "${workspaceFolder}/out/**/*.js"
@@ -65,40 +73,6 @@
"type": "chrome", "type": "chrome",
"request": "launch", "request": "launch",
"name": "Launch azuredatastudio", "name": "Launch azuredatastudio",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat",
"timeout": 20000
},
"osx": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
},
"breakOnLoad": false,
"urlFilter": "*workbench.html*",
"runtimeArgs": [
"--inspect=5875",
"--no-cached-data"
],
"webRoot": "${workspaceFolder}"
},
{
"type": "node",
"request": "launch",
"name": "Launch ADS (Main Process)",
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
"runtimeArgs": [
"--no-cached-data"
],
"outFiles": [
"${workspaceFolder}/out/**/*.js"
]
},
{
"type": "chrome",
"request": "launch",
"name": "Launch azuredatastudio with new notebook command",
"windows": { "windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat" "runtimeExecutable": "${workspaceFolder}/scripts/sql.bat"
}, },
@@ -110,8 +84,7 @@
}, },
"urlFilter": "*index.html*", "urlFilter": "*index.html*",
"runtimeArgs": [ "runtimeArgs": [
"--inspect=5875", "--inspect=5875"
"--command=notebook.command.new"
], ],
"skipFiles": [ "skipFiles": [
"**/winjs*.js" "**/winjs*.js"
@@ -119,6 +92,34 @@
"webRoot": "${workspaceFolder}", "webRoot": "${workspaceFolder}",
"timeout": 45000 "timeout": 45000
}, },
{
"type": "node",
"request": "launch",
"name": "Unit Tests",
"protocol": "inspector",
"program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
"runtimeExecutable": "${workspaceFolder}/.build/electron/Azure Data Studio.app/Contents/MacOS/Electron",
"windows": {
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio.exe"
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio"
},
"stopOnEntry": false,
"outputCapture": "std",
"args": [
"--delay",
"--timeout",
"2000"
],
"cwd": "${workspaceFolder}",
"env": {
"ELECTRON_RUN_AS_NODE": "true"
},
"outFiles": [
"${workspaceFolder}/out/**/*.js"
]
},
{ {
"name": "Launch Built-in Extension", "name": "Launch Built-in Extension",
"type": "extensionHost", "type": "extensionHost",
@@ -127,70 +128,9 @@
"args": [ "args": [
"--extensionDevelopmentPath=${workspaceRoot}/extensions/debug-auto-launch" "--extensionDevelopmentPath=${workspaceRoot}/extensions/debug-auto-launch"
] ]
}, }
{
"type": "node",
"request": "launch",
"name": "Launch Smoke Test",
"program": "${workspaceFolder}/test/smoke/test/index.js",
"cwd": "${workspaceFolder}/test/smoke",
"env": {
"BUILD_ARTIFACTSTAGINGDIRECTORY": "${workspaceFolder}"
}
},
{
"type": "node",
"request": "launch",
"name": "Run Unit Tests",
"program": "${workspaceFolder}/test/electron/index.js",
"runtimeExecutable": "${workspaceFolder}/.build/electron/Azure Data Studio.app/Contents/MacOS/Electron",
"windows": {
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio.exe"
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio"
},
"outputCapture": "std",
"args": [
"--remote-debugging-port=9222"
],
"cwd": "${workspaceFolder}",
"outFiles": [
"${workspaceFolder}/out/**/*.js"
]
},
{
"type": "chrome",
"request": "launch",
"name": "Run Extension Unit Tests",
"windows": {
"runtimeExecutable": "${workspaceFolder}/scripts/test-extensions-unit.bat"
},
"osx": {
"runtimeExecutable": "${workspaceFolder}/scripts/test-extensions-unit.sh"
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/scripts/test-extensions-unit.sh"
},
"webRoot": "${workspaceFolder}",
"timeout": 45000
},
], ],
"compounds": [ "compounds": [
{
"name": "Debug Unit Tests",
"configurations": [
"Attach to azuredatastudio",
"Run Unit Tests"
]
},
{
"name": "Debug Extension Unit Tests",
"configurations": [
"Attach to Extension Host",
"Run Extension Unit Tests"
]
},
{ {
"name": "Debug azuredatastudio Main and Renderer", "name": "Debug azuredatastudio Main and Renderer",
"configurations": [ "configurations": [

18
.vscode/settings.json vendored
View File

@@ -11,7 +11,7 @@
} }
}, },
"files.associations": { "files.associations": {
"cglicenses.json": "jsonc" "OSSREADME.json": "jsonc"
}, },
"search.exclude": { "search.exclude": {
"**/node_modules": true, "**/node_modules": true,
@@ -22,9 +22,9 @@
"out-vscode/**": true, "out-vscode/**": true,
"i18n/**": true, "i18n/**": true,
"extensions/**/out/**": true, "extensions/**/out/**": true,
"test/smoke/out/**": true, "test/smoke/out/**": true
"src/vs/base/test/node/uri.test.data.txt": true
}, },
"tslint.enable": true,
"lcov.path": [ "lcov.path": [
"./.build/coverage/lcov.info", "./.build/coverage/lcov.info",
"./.build/coverage-single/lcov.info" "./.build/coverage-single/lcov.info"
@@ -43,14 +43,6 @@
"git.ignoreLimitWarning": true, "git.ignoreLimitWarning": true,
"emmet.excludeLanguages": [], "emmet.excludeLanguages": [],
"typescript.preferences.importModuleSpecifier": "non-relative", "typescript.preferences.importModuleSpecifier": "non-relative",
"typescript.preferences.quoteStyle": "single", "typescript.preferences.quoteStyle": "single"
"json.schemas": [{
"fileMatch": [ "cgmanifest.json" ],
"url": "./.vscode/cgmanifest.schema.json"
}, {
"fileMatch": [ "cglicenses.json" ],
"url": "./.vscode/cglicenses.schema.json"
}
],
"git.ignoreLimitWarning": true
} }

View File

@@ -1,40 +0,0 @@
{
// Each snippet is defined under a snippet name and has a scope, prefix, body and
// description. The scope defines in watch languages the snippet is applicable. The prefix is what is
// used to trigger the snippet and the body will be expanded and inserted.Possible variables are:
// $1, $2 for tab stops, $0 for the final cursor position, and ${1:label}, ${2:another} for placeholders.
// Placeholders with the same ids are connected.
// Example:
"MSFT Copyright Header": {
"scope": "javascript,typescript,css",
"prefix": [
"header",
"stub",
"copyright"
],
"body": [
"/*---------------------------------------------------------------------------------------------",
" * Copyright (c) Microsoft Corporation. All rights reserved.",
" * Licensed under the Source EULA. See License.txt in the project root for license information.",
" *--------------------------------------------------------------------------------------------*/",
"",
"$0"
],
"description": "Insert Copyright Statement"
},
"TS -> Inject Service": {
"scope": "typescript",
"description": "Constructor Injection Pattern",
"prefix": "@inject",
"body": "@$1 private readonly _$2: ${1},$0"
},
"TS -> Event & Emitter": {
"scope": "typescript",
"prefix": "emitter",
"description": "Add emitter and event properties",
"body": [
"private readonly _onDid$1 = new Emitter<$2>();",
"readonly onDid$1: Event<$2> = this._onDid$1.event;"
],
}
}

17
.vscode/tasks.json vendored
View File

@@ -28,23 +28,6 @@
} }
} }
}, },
{
"type": "npm",
"script": "strict-null-check-watch",
"label": "TS - Strict Null Checks",
"isBackground": true,
"presentation": {
"reveal": "never"
},
"problemMatcher": {
"base": "$tsc-watch",
"owner": "typescript-strict-null",
"applyTo": "allDocuments"
},
"runOptions": {
"runOn": "folderOpen"
}
},
{ {
"type": "gulp", "type": "gulp",
"task": "tslint", "task": "tslint",

View File

@@ -1,3 +1,3 @@
disturl "https://atom.io/download/electron" disturl "https://atom.io/download/electron"
target "3.1.2" target "2.0.9"
runtime "electron" runtime "electron"

View File

@@ -1,90 +1,5 @@
# Change Log # Change Log
## Version 1.5.1
* Release date: March 18, 2019
* Release status: General Availability
## What's new in this version
* Announcing T-SQL Notebooks
* Announcing PostgreSQL extension
* Announcing SQL Server Dacpac extension
* Resolved [bugs and issues](https://github.com/Microsoft/azuredatastudio/milestone/25?closed=1).
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* GeoffYoung for `Fix sqlDropColumn description #4422`
## Version 1.4.5
* Release date: February 13, 2019
* Release status: General Availability
## What's new in this version
* Added **Admin pack for SQL Server** extension pack to make it easier to install SQL Server admin-related extensions. This includes:
* [SQL Server Agent](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-agent-extension?view=sql-server-2017)
* [SQL Server Profiler](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-profiler-extension?view=sql-server-2017)
* [SQL Server Import](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-import-extension?view=sql-server-2017)
* Added filtering extended event support in Profiler extension
* Added Save as XML feature that can save T-SQL results as XML
* Added Data-Tier Application Wizard improvements
* Added Generate script button
* Added view to give warnings of possible data loss during deployment
* Updates to the [SQL Server 2019 Preview extension](https://docs.microsoft.com/sql/azure-data-studio/sql-server-2019-extension?view=sql-server-ver15)
* Results streaming enabled by default for long running queries
* Resolved [bugs and issues](https://github.com/Microsoft/azuredatastudio/milestone/23?closed=1).
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* AlexFsmn for `Added context menu for DBs in explorer view to backup & restore db. #2277`
* sadedil for `Missing feature request: Save as XML #3729`
* gbritton1 for `Removed reference to object explorer #3463`
## Version 1.3.8
* Release date: January 9, 2019
* Release status: General Availability
## What's new in this version
* #13 Feature Request: Azure Active Directory Authentication
* #1040 Stream initial query results as they become available
* #3298 Сan't add an azure account.
* #2387 Support Per-User Installer
* SQL Server Import updates for DACPAC\BACPAC
* SQL Server Profiler UI and UX improvements
* Updates to [SQL Server 2019 extension](https://docs.microsoft.com/sql/azure-data-studio/sql-server-2019-extension?view=sql-server-ver15)
* **sp_executesql to SQL** and **New Database** extensions
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* Tarig0 for `Add Routine_Type to CreateStoredProc fixes #3257 (#3286)`
* oltruong for `typo fix #3025'`
* Thomas-S-B for `Removed unnecessary IErrorDetectionStrategy #749`
* Thomas-S-B for `Simplified code #750`
## Version 1.2.4
* Release date: November 6, 2018
* Release status: General Availability
## What's new in this version
* Update to the SQL Server 2019 Preview extension
* Introducing Paste the Plan extension
* Introducing High Color queries extension, including SSMS editor theme
* Fixes in SQL Server Agent, Profiler, and Import extensions
* Fix .Net Core Socket KeepAlive issue causing dropped inactive connections on macOS
* Upgrade SQL Tools Service to .Net Core 2.2 Preview 3 (for eventual AAD support)
* Fix customer reported GitHub issues
## Contributions and "thank you"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* rdaniels6813 for `Add query plan theme support #3031`
* Ruturaj123 for `Fixed some typos and grammatical errors #3027`
* PromoFaux for `Use emoji shortcodes in CONTRIBUTING.md instead of <20> #3009`
* ckaczor for `Fix: DATETIMEOFFSET data types should be ISO formatted #714`
* hi-im-T0dd for `Fixed sync issue with my forked master so this commit is correct #2948`
* hi-im-T0dd for `Fixed when right clicking and selecting Manage-correct name displays #2794`
## Version 1.1.3 ## Version 1.1.3
* Release date: October 18, 2018 * Release date: October 18, 2018
* Release status: General Availability * Release status: General Availability

View File

@@ -18,15 +18,11 @@ File a single issue per problem and feature request.
* Do not enumerate multiple bugs or feature requests in the same issue. * Do not enumerate multiple bugs or feature requests in the same issue.
* Do not add your issue as a comment to an existing issue unless it's for the identical input. Many issues look similar, but have different causes. * Do not add your issue as a comment to an existing issue unless it's for the identical input. Many issues look similar, but have different causes.
The more information you can provide, the more likely someone will be successful at reproducing the issue and finding a fix. The more information you can provide, the more likely someone will be successful reproducing the issue and finding a fix.
The built-in tool for reporting an issue, which you can access by using `Report Issue` in Azure Data Studio's Help menu, can help streamline this process by automatically providing the version of Azure Data Studio, all your installed extensions, and your system info.
Please include the following with each issue. Please include the following with each issue.
* Version of Azure Data Studio (formerly SQL Operations Studio) * Version of Azure Data Studio (formerly SQL Operations Studio).
* Your operating system
> **Tip:** You can easily create an issue using `Report Issues` from Azure Data Studio Help menu. > **Tip:** You can easily create an issue using `Report Issues` from Azure Data Studio Help menu.

1751
OSSREADME.json Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,6 @@
# Azure Data Studio # Azure Data Studio
[![Join the chat at https://gitter.im/Microsoft/sqlopsstudio](https://badges.gitter.im/Microsoft/sqlopsstudio.svg)](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) [![Join the chat at https://gitter.im/Microsoft/sqlopsstudio](https://badges.gitter.im/Microsoft/sqlopsstudio.svg)](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
[![Build Status](https://dev.azure.com/ms/azuredatastudio/_apis/build/status/Microsoft.azuredatastudio)](https://dev.azure.com/ms/azuredatastudio/_build/latest?definitionId=4)
Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux. Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
@@ -9,22 +8,16 @@ Azure Data Studio is a data management tool that enables you to work with SQL Se
Platform | Link Platform | Link
-- | -- -- | --
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2083322 Windows Setup Installer | https://go.microsoft.com/fwlink/?linkid=2030731
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2083323 Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2030736
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2083324 macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2030738
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2083325 Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2030741
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2083424 Linux RPM | https://go.microsoft.com/fwlink/?linkid=2030746
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2083326 Linux DEB | https://go.microsoft.com/fwlink/?linkid=2030750
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2083327
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions. Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
Try out the latest insiders build from `master`: Try out the latest insiders build from `master` at https://github.com/Microsoft/azuredatastudio/releases.
- [Windows User Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-user/insider)
- [Windows System Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64/insider)
- [Windows ZIP - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-archive/insider)
- [macOS ZIP - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/darwin/insider)
- [Linux TAR.GZ - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/linux-x64/insider)
See the [change log](https://github.com/Microsoft/azuredatastudio/blob/master/CHANGELOG.md) for additional details of what's in this release. See the [change log](https://github.com/Microsoft/azuredatastudio/blob/master/CHANGELOG.md) for additional details of what's in this release.
@@ -68,20 +61,6 @@ The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.micro
## Contributions and "Thank You" ## Contributions and "Thank You"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes: We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
* GeoffYoung for `Fix sqlDropColumn description #4422`
* AlexFsmn for `Added context menu for DBs in explorer view to backup & restore db. #2277`
* sadedil for `Missing feature request: Save as XML #3729`
* gbritton1 for `Removed reference to object explorer #3463`
* Tarig0 for `Add Routine_Type to CreateStoredProc fixes #3257 (#3286)`
* oltruong for `typo fix #3025'`
* Thomas-S-B for `Removed unnecessary IErrorDetectionStrategy #749`
* Thomas-S-B for `Simplified code #750`
* rdaniels6813 for `Add query plan theme support #3031`
* Ruturaj123 for `Fixed some typos and grammatical errors #3027`
* PromoFaux for `Use emoji shortcodes in CONTRIBUTING.md instead of <20> #3009`
* ckaczor for `Fix: DATETIMEOFFSET data types should be ISO formatted #714`
* hi-im-T0dd for `Fixed sync issue with my forked master so this commit is correct #2948`
* hi-im-T0dd for `Fixed when right clicking and selecting Manage-correct name displays #2794`
* philoushka for `center the icon #2760` * philoushka for `center the icon #2760`
* anthonypants for `Typo #2775` * anthonypants for `Typo #2775`
* kstolte for `Fix Invalid Configuration in Launch.json #2789` * kstolte for `Fix Invalid Configuration in Launch.json #2789`
@@ -124,6 +103,7 @@ We would like to thank all our users who raised issues, and in particular the fo
* Russian: Andrey Veselov, Anton Fontanov, Anton Savin, Elena Ostrovskaia, Igor Babichev, Maxim Zelensky, Rodion Fedechkin, Tasha T, Vladimir Zyryanov * Russian: Andrey Veselov, Anton Fontanov, Anton Savin, Elena Ostrovskaia, Igor Babichev, Maxim Zelensky, Rodion Fedechkin, Tasha T, Vladimir Zyryanov
* Portuguese Brazil: Daniel de Sousa, Diogo Duarte, Douglas Correa, Douglas Eccker, José Emanuel Mendes, Marcelo Fernandes, Marcondes Alexandre, Roberto Fonseca, Rodrigo Crespi * Portuguese Brazil: Daniel de Sousa, Diogo Duarte, Douglas Correa, Douglas Eccker, José Emanuel Mendes, Marcelo Fernandes, Marcondes Alexandre, Roberto Fonseca, Rodrigo Crespi
And of course, we'd like to thank the authors of all upstream dependencies. Please see a full list in the [ThirdPartyNotices.txt](https://raw.githubusercontent.com/Microsoft/azuredatastudio/master/ThirdPartyNotices.txt) And of course, we'd like to thank the authors of all upstream dependencies. Please see a full list in the [ThirdPartyNotices.txt](https://raw.githubusercontent.com/Microsoft/azuredatastudio/master/ThirdPartyNotices.txt)
## License ## License

View File

@@ -17,12 +17,10 @@ expressly granted herein, whether by implication, estoppel or otherwise.
chokidar: https://github.com/paulmillr/chokidar chokidar: https://github.com/paulmillr/chokidar
comment-json: https://github.com/kaelzhang/node-comment-json comment-json: https://github.com/kaelzhang/node-comment-json
core-js: https://github.com/zloirock/core-js core-js: https://github.com/zloirock/core-js
decompress: https://github.com/kevva/decompress
emmet: https://github.com/emmetio/emmet emmet: https://github.com/emmetio/emmet
error-ex: https://github.com/Qix-/node-error-ex error-ex: https://github.com/Qix-/node-error-ex
escape-string-regexp: https://github.com/sindresorhus/escape-string-regexp escape-string-regexp: https://github.com/sindresorhus/escape-string-regexp
fast-plist: https://github.com/Microsoft/node-fast-plist fast-plist: https://github.com/Microsoft/node-fast-plist
figures: https://github.com/sindresorhus/figures
find-remove: https://www.npmjs.com/package/find-remove find-remove: https://www.npmjs.com/package/find-remove
fs-extra: https://github.com/jprichardson/node-fs-extra fs-extra: https://github.com/jprichardson/node-fs-extra
gc-signals: https://github.com/Microsoft/node-gc-signals gc-signals: https://github.com/Microsoft/node-gc-signals
@@ -36,35 +34,28 @@ expressly granted herein, whether by implication, estoppel or otherwise.
jquery-ui: https://github.com/jquery/jquery-ui jquery-ui: https://github.com/jquery/jquery-ui
jquery.event.drag: https://github.com/devongovett/jquery.event.drag jquery.event.drag: https://github.com/devongovett/jquery.event.drag
jschardet: https://github.com/aadsm/jschardet jschardet: https://github.com/aadsm/jschardet
JupyterLab: https://github.com/jupyterlab/jupyterlab
make-error: https://github.com/JsCommunity/make-error make-error: https://github.com/JsCommunity/make-error
minimist: https://github.com/substack/minimist minimist: https://github.com/substack/minimist
moment: https://github.com/moment/moment moment: https://github.com/moment/moment
native-keymap: https://github.com/Microsoft/node-native-keymap native-keymap: https://github.com/Microsoft/node-native-keymap
native-watchdog: https://github.com/Microsoft/node-native-watchdog native-watchdog: https://github.com/Microsoft/node-native-watchdog
ng2-charts: https://github.com/valor-software/ng2-charts ng2-charts: https://github.com/valor-software/ng2-charts
node-fetch: https://github.com/bitinn/node-fetch
node-pty: https://github.com/Tyriar/node-pty node-pty: https://github.com/Tyriar/node-pty
nsfw: https://github.com/Axosoft/nsfw nsfw: https://github.com/Axosoft/nsfw
pretty-data: https://github.com/vkiryukhin/pretty-data pretty-data: https://github.com/vkiryukhin/pretty-data
primeng: https://github.com/primefaces/primeng primeng: https://github.com/primefaces/primeng
process-nextick-args: https://github.com/calvinmetcalf/process-nextick-args
pty.js: https://github.com/chjj/pty.js pty.js: https://github.com/chjj/pty.js
reflect-metadata: https://github.com/rbuckton/reflect-metadata reflect-metadata: https://github.com/rbuckton/reflect-metadata
request: https://github.com/request/request
rxjs: https://github.com/ReactiveX/RxJS rxjs: https://github.com/ReactiveX/RxJS
semver: https://github.com/npm/node-semver semver: https://github.com/npm/node-semver
slickgrid: https://github.com/6pac/SlickGrid slickgrid: https://github.com/6pac/SlickGrid
sqltoolsservice: https://github.com/Microsoft/sqltoolsservice sqltoolsservice: https://github.com/Microsoft/sqltoolsservice
svg.js: https://github.com/svgdotjs/svg.js svg.js: https://github.com/svgdotjs/svg.js
systemjs: https://github.com/systemjs/systemjs systemjs: https://github.com/systemjs/systemjs
temp-write: https://github.com/sindresorhus/temp-write
underscore: https://github.com/jashkenas/underscore underscore: https://github.com/jashkenas/underscore
v8-profiler: https://github.com/node-inspector/v8-profiler v8-profiler: https://github.com/node-inspector/v8-profiler
vscode: https://github.com/microsoft/vscode vscode: https://github.com/microsoft/vscode
vscode-debugprotocol: https://github.com/Microsoft/vscode-debugadapter-node vscode-debugprotocol: https://github.com/Microsoft/vscode-debugadapter-node
vscode-languageclient: https://github.com/Microsoft/vscode-languageserver-node
vscode-nls: https://github.com/Microsoft/vscode-nls
vscode-ripgrep: https://github.com/roblourens/vscode-ripgrep vscode-ripgrep: https://github.com/roblourens/vscode-ripgrep
vscode-textmate: https://github.com/Microsoft/vscode-textmate vscode-textmate: https://github.com/Microsoft/vscode-textmate
winreg: https://github.com/fresc81/node-winreg winreg: https://github.com/fresc81/node-winreg
@@ -72,9 +63,10 @@ expressly granted herein, whether by implication, estoppel or otherwise.
yauzl: https://github.com/thejoshwolfe/yauzl yauzl: https://github.com/thejoshwolfe/yauzl
zone.js: https://www.npmjs.com/package/zone zone.js: https://www.npmjs.com/package/zone
Microsoft PROSE SDK: https://microsoft.github.io/prose
%% angular NOTICES AND INFORMATION BEGIN HERE %% angular NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License
Copyright (c) 2014-2017 Google, Inc. http://angular.io Copyright (c) 2014-2017 Google, Inc. http://angular.io
@@ -300,20 +292,6 @@ THE SOFTWARE.
========================================= =========================================
END OF core-js NOTICES AND INFORMATION END OF core-js NOTICES AND INFORMATION
%% decompress NOTICES AND INFORMATION BEGIN HERE
=========================================
MIT License
Copyright (c) Kevin Mårtensson <kevinmartensson@gmail.com> (github.com/kevva)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF decompress NOTICES AND INFORMATION
%% emmet NOTICES AND INFORMATION BEGIN HERE %% emmet NOTICES AND INFORMATION BEGIN HERE
========================================= =========================================
The MIT License (MIT) The MIT License (MIT)
@@ -415,20 +393,6 @@ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEAL
========================================= =========================================
END OF fast-plist NOTICES AND INFORMATION END OF fast-plist NOTICES AND INFORMATION
%% figures NOTICES AND INFORMATION BEGIN HERE
=========================================
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF figures NOTICES AND INFORMATION
%% fs-extra NOTICES AND INFORMATION BEGIN HERE %% fs-extra NOTICES AND INFORMATION BEGIN HERE
========================================= =========================================
(The MIT License) (The MIT License)
@@ -1202,43 +1166,6 @@ That's all there is to it!
========================================= =========================================
END OF jschardet NOTICES AND INFORMATION END OF jschardet NOTICES AND INFORMATION
%% JupyterLab NOTICES AND INFORMATION BEGIN HERE
Copyright (c) 2015 Project Jupyter Contributors
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Semver File License
===================
The semver.py file is from https://github.com/podhmo/python-semver
which is licensed under the "MIT" license. See the semver.py file for details.
END OF JupyterLab NOTICES AND INFORMATION
%% make-error NOTICES AND INFORMATION BEGIN HERE %% make-error NOTICES AND INFORMATION BEGIN HERE
========================================= =========================================
ISC © Julien Fontanet ISC © Julien Fontanet
@@ -1370,32 +1297,6 @@ SOFTWARE.
========================================= =========================================
END OF ng2-charts NOTICES AND INFORMATION END OF ng2-charts NOTICES AND INFORMATION
%% node-fetch NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright (c) 2016 David Frank
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
=========================================
END OF node-fetch NOTICES AND INFORMATION
%% node-pty NOTICES AND INFORMATION BEGIN HERE %% node-pty NOTICES AND INFORMATION BEGIN HERE
========================================= =========================================
Copyright (c) 2012-2015, Christopher Jeffrey (https://github.com/chjj/) Copyright (c) 2012-2015, Christopher Jeffrey (https://github.com/chjj/)
@@ -1470,30 +1371,6 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI
========================================= =========================================
END OF primeng NOTICES AND INFORMATION END OF primeng NOTICES AND INFORMATION
%% process-nextick-args NOTICES AND INFORMATION BEGIN HERE
=========================================
# Copyright (c) 2015 Calvin Metcalf
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.**
=========================================
END OF process-nextick-args NOTICES AND INFORMATION
%% pty.js NOTICES AND INFORMATION BEGIN HERE %% pty.js NOTICES AND INFORMATION BEGIN HERE
========================================= =========================================
Copyright (c) 2012-2015, Christopher Jeffrey (https://github.com/chjj/) Copyright (c) 2012-2015, Christopher Jeffrey (https://github.com/chjj/)
@@ -1578,66 +1455,6 @@ END OF TERMS AND CONDITIONS
========================================= =========================================
END OF reflect-metadata NOTICES AND INFORMATION END OF reflect-metadata NOTICES AND INFORMATION
%% request NOTICES AND INFORMATION BEGIN HERE
=========================================
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
You must give any other recipients of the Work or Derivative Works a copy of this License; and
You must cause any modified files to carry prominent notices stating that You changed the files; and
You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
=========================================
END OF request NOTICES AND INFORMATION
%% rxjs NOTICES AND INFORMATION BEGIN HERE %% rxjs NOTICES AND INFORMATION BEGIN HERE
========================================= =========================================
Apache License Apache License
@@ -1963,20 +1780,6 @@ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEAL
========================================= =========================================
END OF systemjs NOTICES AND INFORMATION END OF systemjs NOTICES AND INFORMATION
%% temp-write NOTICES AND INFORMATION BEGIN HERE
=========================================
MIT License
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF temp-write NOTICES AND INFORMATION
%% underscore NOTICES AND INFORMATION BEGIN HERE %% underscore NOTICES AND INFORMATION BEGIN HERE
========================================= =========================================
Copyright (c) 2009-2017 Jeremy Ashkenas, DocumentCloud and Investigative Copyright (c) 2009-2017 Jeremy Ashkenas, DocumentCloud and Investigative
@@ -2079,50 +1882,6 @@ OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWA
========================================= =========================================
END OF vscode-debugprotocol NOTICES AND INFORMATION END OF vscode-debugprotocol NOTICES AND INFORMATION
%% vscode-languageclient NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) Microsoft Corporation
All rights reserved.
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy,
modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF vscode-languageclient NOTICES AND INFORMATION
%% vscode-nls NOTICES AND INFORMATION BEGIN HERE
=========================================
The MIT License (MIT)
Copyright (c) Microsoft Corporation
All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy,
modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=========================================
END OF vscode-nls NOTICES AND INFORMATION
%% vscode-ripgrep NOTICES AND INFORMATION BEGIN HERE %% vscode-ripgrep NOTICES AND INFORMATION BEGIN HERE
========================================= =========================================
vscode-ripgrep vscode-ripgrep
@@ -2282,187 +2041,3 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE. THE SOFTWARE.
========================================= =========================================
END OF zone.js NOTICES AND INFORMATION END OF zone.js NOTICES AND INFORMATION
%% Microsoft.ProgramSynthesis.Common NOTICES AND INFORMATION BEGIN HERE
=========================================
NOTICES AND INFORMATION
Do Not Translate or Localize
This software incorporates material from third parties. Microsoft makes certain
open source code available at http://3rdpartysource.microsoft.com, or you may
send a check or money order for US $5.00, including the product name, the open
source component name, and version number, to:
Source Code Compliance Team
Microsoft Corporation
One Microsoft Way
Redmond, WA 98052
USA
Notwithstanding any other terms, you may reverse engineer this software to the
extent required to debug changes to any libraries licensed under the GNU Lesser
General Public License.
-------------------------------START OF THIRD-PARTY NOTICES-------------------------------------------
===================================CoreFx (BEGIN)
The MIT License (MIT)
Copyright (c) .NET Foundation and Contributors
All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
===================================CoreFx (END)
===================================CoreFxLab (BEGIN)
The MIT License (MIT)
Copyright (c) Microsoft Corporation
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
===================================CoreFxLab (END)
===================================Reactive Extensions (BEGIN)
Copyright (c) .NET Foundation and Contributors
All Rights Reserved
Licensed under the Apache License, Version 2.0 (the "License"); you
may not use this file except in compliance with the License. You may
obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied. See the License for the specific language governing permissions
and limitations under the License.
List of contributors to the Rx libraries
Rx and Ix.NET:
Wes Dyer
Jeffrey van Gogh
Matthew Podwysocki
Bart De Smet
Danny van Velzen
Erik Meijer
Brian Beckman
Aaron Lahman
Georgi Chkodrov
Arthur Watson
Gert Drapers
Mark Shields
Eric Rozell
Rx.js and Ix.js:
Matthew Podwysocki
Jeffrey van Gogh
Bart De Smet
Brian Beckman
Wes Dyer
Erik Meijer
Tx:
Georgi Chkodrov
Bart De Smet
Aaron Lahman
Erik Meijer
Brian Grunkemeyer
Beysim Sezgin
Tiho Tarnavski
Collin Meek
Sajay Anthony
Karen Albrecht
John Allen
Zach Kramer
Rx++ and Ix++:
Aaron Lahman
===================================Reactive Extensions (END)
-------------------------------END OF THIRD-PARTY NOTICES-------------------------------------------
=========================================
END OF Microsoft.ProgramSynthesis.Common NOTICES AND INFORMATION
%% Microsoft.ProgramSynthesis.Detection NOTICES AND INFORMATION BEGIN HERE
=========================================
NOTICES AND INFORMATION
Do Not Translate or Localize
This software incorporates material from third parties. Microsoft makes certain
open source code available at http://3rdpartysource.microsoft.com, or you may
send a check or money order for US $5.00, including the product name, the open
source component name, and version number, to:
Source Code Compliance Team
Microsoft Corporation
One Microsoft Way
Redmond, WA 98052
USA
Notwithstanding any other terms, you may reverse engineer this software to the
extent required to debug changes to any libraries licensed under the GNU Lesser
General Public License.
-------------------------------START OF THIRD-PARTY NOTICES-------------------------------------------
The MIT License (MIT)
Copyright (c) 2014 ExcelDataReader
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
===================================ExcelDataReader (END)
-------------------------------END OF THIRD-PARTY NOTICES-------------------------------------------
=========================================
END OF Microsoft.ProgramSynthesis.Detection NOTICES AND INFORMATION

19
appveyor.yml Normal file
View File

@@ -0,0 +1,19 @@
environment:
ELECTRON_RUN_AS_NODE: 1
VSCODE_BUILD_VERBOSE: true
cache:
- '%LOCALAPPDATA%\Yarn\cache'
install:
- ps: Install-Product node 8.9.1 x64
build_script:
- yarn
- .\node_modules\.bin\gulp electron
- npm run compile
test_script:
- node --version
- .\scripts\test.bat
- .\scripts\test-integration.bat

View File

@@ -1,46 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: '8.x'
displayName: 'Install Node.js'
- script: |
git submodule update --init --recursive
nvm install 8.9.1
nvm use 8.9.1
npm i -g yarn
displayName: 'preinstall'
- script: |
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:10
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
# sh -e /etc/init.d/xvfb start
# sleep 3
displayName: 'Linux preinstall'
condition: eq(variables['Agent.OS'], 'Linux')
- script: |
yarn
displayName: 'Install'
- script: |
node_modules/.bin/gulp electron
node_modules/.bin/gulp compile --max_old_space_size=4096
node_modules/.bin/gulp optimize-vscode --max_old_space_size=4096
displayName: 'Scripts'
- script: |
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter
displayName: 'Tests'
- task: PublishTestResults@2
inputs:
testResultsFiles: '**/test-results.xml'
condition: succeededOrFailed()
- script: |
yarn run tslint
displayName: 'Run TSLint'

View File

@@ -1,30 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: '8.9'
displayName: 'Install Node.js'
- script: |
yarn
displayName: 'Yarn Install'
- script: |
.\node_modules\.bin\gulp electron
displayName: 'Electron'
- script: |
npm run compile
displayName: 'Compile'
- script: |
.\scripts\test.bat --reporter mocha-junit-reporter
displayName: 'Test'
- task: PublishTestResults@2
inputs:
testResultsFiles: 'test-results.xml'
condition: succeededOrFailed()
- script: |
yarn run tslint
displayName: 'Run TSLint'

View File

@@ -1,29 +0,0 @@
trigger:
- master
- releases/*
jobs:
# All tasks on Windows
- job: build_all_windows
displayName: Build all tasks (Windows)
pool:
vmImage: vs2017-win2016
steps:
- template: azure-pipelines-windows.yml
# All tasks on Linux
- job: build_all_linux
displayName: Build all tasks (Linux)
pool:
vmImage: 'Ubuntu 16.04'
steps:
- template: azure-pipelines-linux-mac.yml
# All tasks on macOS
- job: build_all_darwin
displayName: Build all tasks (macOS)
pool:
vmImage: macos-10.13
steps:
- template: azure-pipelines-linux-mac.yml

View File

@@ -1,40 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.12.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- script: |
yarn
displayName: Install Dependencies
- script: |
yarn gulp electron-x64
displayName: Download Electron
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- script: |
yarn strict-null-check
displayName: Run Strict Null Checks
- script: |
yarn compile
displayName: Compile Sources
- script: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- script: |
./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests
- script: |
./scripts/test-integration.sh --tfs "Integration Tests"
displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()

View File

@@ -1,91 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.12.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- script: |
set -e
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
yarn
yarn gulp -- hygiene
yarn monaco-compile-check
yarn strict-null-check
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" yarn gulp -- mixin
node build/azure-pipelines/common/installDistro.js
node build/lib/builtInExtensions.js
displayName: Prepare build
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" \
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
yarn gulp -- vscode-darwin-min upload-vscode-sourcemaps
displayName: Build
- script: |
set -e
./scripts/test.sh --build --tfs "Unit Tests"
# APP_NAME="`ls $(agent.builddirectory)/VSCode-darwin | head -n 1`"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-darwin/$APP_NAME"
displayName: Run unit tests
- script: |
set -e
pushd ../VSCode-darwin && zip -r -X -y ../VSCode-darwin.zip * && popd
displayName: Archive build
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)'
Pattern: 'VSCode-darwin.zip'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppDeveloperSign",
"parameters": [ ],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
displayName: Codesign
- script: |
set -e
# remove pkg from archive
zip -d ../VSCode-darwin.zip "*.pkg"
# publish the build
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/azure-pipelines/common/publish.js \
"$(VSCODE_QUALITY)" \
darwin \
archive \
"VSCode-darwin-$(VSCODE_QUALITY).zip" \
$VERSION \
true \
../VSCode-darwin.zip
# publish hockeyapp symbols
node build/azure-pipelines/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_MACOS)"
# upload configuration
AZURE_STORAGE_ACCESS_KEY="$(AZURE_STORAGE_ACCESS_KEY)" \
yarn gulp -- upload-vscode-configuration
displayName: Publish
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true

View File

@@ -1 +0,0 @@
pat

View File

@@ -1,45 +0,0 @@
steps:
- script: |
set -e
sudo apt-get update
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
- task: NodeTool@0
inputs:
versionSpec: "8.12.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- script: |
yarn
displayName: Install Dependencies
- script: |
yarn gulp electron-x64
displayName: Download Electron
- script: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- script: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- script: |
yarn strict-null-check
displayName: Run Strict Null Checks
- script: |
yarn compile
displayName: Compile Sources
- script: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- script: |
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
displayName: Run Unit Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()

View File

@@ -1,40 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const documentdb_1 = require("documentdb");
function createDefaultConfig(quality) {
return {
id: quality,
frozen: false
};
}
function getConfig(quality) {
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/config';
const query = {
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
parameters: [
{ name: '@quality', value: quality }
]
};
return new Promise((c, e) => {
client.queryDocuments(collection, query).toArray((err, results) => {
if (err && err.code !== 409) {
return e(err);
}
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0]);
});
});
}
getConfig(process.argv[2])
.then(config => {
console.log(config.frozen);
process.exit(0);
})
.catch(err => {
console.error(err);
process.exit(1);
});

View File

@@ -1,112 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.12.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- script: |
set -e
export npm_config_arch="$(VSCODE_ARCH)"
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
fi
echo "machine monacotools.visualstudio.com password $(VSO_PAT)" > ~/.netrc
CHILD_CONCURRENCY=1 yarn
npm run gulp -- hygiene
npm run monaco-compile-check
npm run strict-null-check
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- mixin
node build/azure-pipelines/common/installDistro.js
node build/lib/builtInExtensions.js
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)" npm run gulp -- vscode-linux-$(VSCODE_ARCH)-min
name: build
- script: |
set -e
npm run gulp -- "electron-$(VSCODE_ARCH)"
# xvfb seems to be crashing often, let's make sure it's always up
service xvfb start
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)"
name: test
- script: |
set -e
REPO="$(pwd)"
ROOT="$REPO/.."
ARCH="$(VSCODE_ARCH)"
# Publish tarball
PLATFORM_LINUX="linux-$(VSCODE_ARCH)"
[[ "$ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
[[ "$ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
BUILDNAME="VSCode-$PLATFORM_LINUX"
BUILD="$ROOT/$BUILDNAME"
BUILD_VERSION="$(date +%s)"
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
PACKAGEJSON="$BUILD/resources/app/package.json"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
rm -rf $ROOT/code-*.tar.*
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$VERSION" true "$TARBALL_PATH"
# Publish hockeyapp symbols
node build/azure-pipelines/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" "$(VSCODE_HOCKEYAPP_ID_LINUX64)"
# Publish DEB
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-deb"
PLATFORM_DEB="linux-deb-$ARCH"
[[ "$ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_DEB" package "$DEB_FILENAME" "$VERSION" true "$DEB_PATH"
# Publish RPM
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-build-rpm"
PLATFORM_RPM="linux-rpm-$ARCH"
[[ "$ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_RPM" package "$RPM_FILENAME" "$VERSION" true "$RPM_PATH"
# Publish Snap
npm run gulp -- "vscode-linux-$(VSCODE_ARCH)-prepare-snap"
# Pack snap tarball artifact, in order to preserve file perms
mkdir -p $REPO/.build/linux/snap-tarball
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$(VSCODE_ARCH).tar.gz"
rm -rf $SNAP_TARBALL_PATH
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true
- task: PublishPipelineArtifact@0
displayName: 'Publish Pipeline Artifact'
inputs:
artifactName: snap-$(VSCODE_ARCH)
targetPath: .build/linux/snap-tarball

View File

@@ -1,42 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.12.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- task: DownloadPipelineArtifact@0
displayName: 'Download Pipeline Artifact'
inputs:
artifactName: snap-$(VSCODE_ARCH)
targetPath: .build/linux/snap-tarball
- script: |
set -e
REPO="$(pwd)"
ARCH="$(VSCODE_ARCH)"
SNAP_ROOT="$REPO/.build/linux/snap/$ARCH"
# Install build dependencies
(cd build && yarn)
# Unpack snap tarball artifact, in order to preserve file perms
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$ARCH.tar.gz"
(cd .build/linux && tar -xzf $SNAP_TARBALL_PATH)
# Create snap package
BUILD_VERSION="$(date +%s)"
SNAP_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.snap"
PACKAGEJSON="$(ls $SNAP_ROOT/code*/usr/share/code*/resources/app/package.json)"
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME"
(cd $SNAP_ROOT/code-* && snapcraft snap --output "$SNAP_PATH")
# Publish snap package
AZURE_DOCUMENTDB_MASTERKEY="$(AZURE_DOCUMENTDB_MASTERKEY)" \
AZURE_STORAGE_ACCESS_KEY_2="$(AZURE_STORAGE_ACCESS_KEY_2)" \
MOONCAKE_STORAGE_ACCESS_KEY="$(MOONCAKE_STORAGE_ACCESS_KEY)" \
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-$ARCH" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"

View File

@@ -1,65 +0,0 @@
resources:
containers:
- container: vscode-x64
image: joaomoreno/vscode-linux-build-agent:x64
- container: vscode-ia32
image: joaomoreno/vscode-linux-build-agent:ia32
- container: snapcraft
image: snapcore/snapcraft
jobs:
- job: Windows
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
pool:
vmImage: VS2017-Win2016
variables:
VSCODE_ARCH: x64
steps:
- template: win32/product-build-win32.yml
- job: Windows32
condition: eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true')
pool:
vmImage: VS2017-Win2016
variables:
VSCODE_ARCH: ia32
steps:
- template: win32/product-build-win32.yml
- job: Linux
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: x64
container: vscode-x64
steps:
- template: linux/product-build-linux.yml
- job: LinuxSnap
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: x64
container: snapcraft
dependsOn: Linux
steps:
- template: linux/snap-build-linux.yml
- job: Linux32
condition: eq(variables['VSCODE_BUILD_LINUX_32BIT'], 'true')
pool:
vmImage: 'Ubuntu-16.04'
variables:
VSCODE_ARCH: ia32
container: vscode-ia32
steps:
- template: linux/product-build-linux.yml
- job: macOS
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
pool:
vmImage: macOS 10.13
steps:
- template: darwin/product-build-darwin.yml

View File

@@ -1,44 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.12.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- task: UsePythonVersion@0
inputs:
versionSpec: '2.x'
addToPath: true
- powershell: |
yarn
displayName: Install Dependencies
- powershell: |
yarn gulp electron
displayName: Download Electron
- powershell: |
yarn gulp hygiene
displayName: Run Hygiene Checks
- powershell: |
yarn monaco-compile-check
displayName: Run Monaco Editor Checks
- script: |
yarn strict-null-check
displayName: Run Strict Null Checks
- powershell: |
yarn compile
displayName: Compile Sources
- powershell: |
yarn download-builtin-extensions
displayName: Download Built-in Extensions
- powershell: |
.\scripts\test.bat --tfs "Unit Tests"
displayName: Run Unit Tests
- powershell: |
.\scripts\test-integration.bat --tfs "Integration Tests"
displayName: Run Integration Tests
- task: PublishTestResults@2
displayName: Publish Tests Results
inputs:
testResultsFiles: '*-results.xml'
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
condition: succeededOrFailed()

View File

@@ -1,152 +0,0 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "8.12.0"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.10.1"
- task: UsePythonVersion@0
inputs:
versionSpec: '2.x'
addToPath: true
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
"machine monacotools.visualstudio.com password $(VSO_PAT)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
$env:npm_config_arch="$(VSCODE_ARCH)"
$env:CHILD_CONCURRENCY="1"
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
exec { yarn }
exec { npm run gulp -- hygiene }
exec { npm run monaco-compile-check }
exec { npm run strict-null-check }
exec { npm run gulp -- mixin }
exec { node build/azure-pipelines/common/installDistro.js }
exec { node build/lib/builtInExtensions.js }
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:VSCODE_MIXIN_PASSWORD="$(VSCODE_MIXIN_PASSWORD)"
exec { npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-min" }
exec { npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-inno-updater" }
name: build
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { npm run gulp -- "electron-$(VSCODE_ARCH)" }
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
# yarn smoketest -- --build "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
name: test
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'ESRP CodeSign'
FolderPath: '$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)'
Pattern: '*.dll,*.exe,*.node'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolSign",
"parameters": [
{
"parameterName": "OpusName",
"parameterValue": "VS Code"
},
{
"parameterName": "OpusInfo",
"parameterValue": "https://code.visualstudio.com/"
},
{
"parameterName": "Append",
"parameterValue": "/as"
},
{
"parameterName": "FileDigest",
"parameterValue": "/fd \"SHA256\""
},
{
"parameterName": "PageHash",
"parameterValue": "/NPH"
},
{
"parameterName": "TimeStamp",
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-230012",
"operationSetCode": "SigntoolVerify",
"parameters": [
{
"parameterName": "VerifyAll",
"parameterValue": "/all"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
- task: NuGetCommand@2
displayName: Install ESRPClient.exe
inputs:
restoreSolution: 'build\azure-pipelines\win32\ESRPClient\packages.config'
feedsToUse: config
nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config'
externalFeedCredentials: 3fc0b7f7-da09-4ae7-a9c8-d69824b1819b
restoreDirectory: packages
- task: ESRPImportCertTask@1
displayName: Import ESRP Request Signing Certificate
inputs:
ESRP: 'ESRP CodeSign'
- powershell: |
$ErrorActionPreference = "Stop"
.\build\azure-pipelines\win32\import-esrp-auth-cert.ps1 -AuthCertificateBase64 $(ESRP_AUTH_CERTIFICATE) -AuthCertificateKey $(ESRP_AUTH_CERTIFICATE_KEY)
displayName: Import ESRP Auth Certificate
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { npm run gulp -- "vscode-win32-$(VSCODE_ARCH)-archive" "vscode-win32-$(VSCODE_ARCH)-system-setup" "vscode-win32-$(VSCODE_ARCH)-user-setup" --sign }
$Repo = "$(pwd)"
$Root = "$Repo\.."
$SystemExe = "$Repo\.build\win32-$(VSCODE_ARCH)\system-setup\VSCodeSetup.exe"
$UserExe = "$Repo\.build\win32-$(VSCODE_ARCH)\user-setup\VSCodeSetup.exe"
$Zip = "$Repo\.build\win32-$(VSCODE_ARCH)\archive\VSCode-win32-$(VSCODE_ARCH).zip"
$Build = "$Root\VSCode-win32-$(VSCODE_ARCH)"
# get version
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
$Version = $PackageJson.version
$Quality = "$env:VSCODE_QUALITY"
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(AZURE_STORAGE_ACCESS_KEY_2)"
$env:MOONCAKE_STORAGE_ACCESS_KEY = "$(MOONCAKE_STORAGE_ACCESS_KEY)"
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(AZURE_DOCUMENTDB_MASTERKEY)"
$assetPlatform = if ("$(VSCODE_ARCH)" -eq "ia32") { "win32" } else { "win32-x64" }
exec { node build/azure-pipelines/common/publish.js $Quality "$global:assetPlatform-archive" archive "VSCode-win32-$(VSCODE_ARCH)-$Version.zip" $Version true $Zip }
exec { node build/azure-pipelines/common/publish.js $Quality "$global:assetPlatform" setup "VSCodeSetup-$(VSCODE_ARCH)-$Version.exe" $Version true $SystemExe }
exec { node build/azure-pipelines/common/publish.js $Quality "$global:assetPlatform-user" setup "VSCodeUserSetup-$(VSCODE_ARCH)-$Version.exe" $Version true $UserExe }
# publish hockeyapp symbols
$hockeyAppId = if ("$(VSCODE_ARCH)" -eq "ia32") { "$(VSCODE_HOCKEYAPP_ID_WIN32)" } else { "$(VSCODE_HOCKEYAPP_ID_WIN64)" }
exec { node build/azure-pipelines/common/symbols.js "$(VSCODE_MIXIN_PASSWORD)" "$(VSCODE_HOCKEYAPP_TOKEN)" "$(VSCODE_ARCH)" $hockeyAppId }
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
continueOnError: true

View File

@@ -1,70 +0,0 @@
function Create-TmpJson($Obj) {
$FileName = [System.IO.Path]::GetTempFileName()
ConvertTo-Json -Depth 100 $Obj | Out-File -Encoding UTF8 $FileName
return $FileName
}
$Auth = Create-TmpJson @{
Version = "1.0.0"
AuthenticationType = "AAD_CERT"
ClientId = $env:ESRPClientId
AuthCert = @{
SubjectName = $env:ESRPAuthCertificateSubjectName
StoreLocation = "LocalMachine"
StoreName = "My"
}
RequestSigningCert = @{
SubjectName = $env:ESRPCertificateSubjectName
StoreLocation = "LocalMachine"
StoreName = "My"
}
}
$Policy = Create-TmpJson @{
Version = "1.0.0"
}
$Input = Create-TmpJson @{
Version = "1.0.0"
SignBatches = @(
@{
SourceLocationType = "UNC"
SignRequestFiles = @(
@{
SourceLocation = $args[0]
}
)
SigningInfo = @{
Operations = @(
@{
KeyCode = "CP-230012"
OperationCode = "SigntoolSign"
Parameters = @{
OpusName = "VS Code"
OpusInfo = "https://code.visualstudio.com/"
Append = "/as"
FileDigest = "/fd `"SHA256`""
PageHash = "/NPH"
TimeStamp = "/tr `"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer`" /td sha256"
}
ToolName = "sign"
ToolVersion = "1.0"
},
@{
KeyCode = "CP-230012"
OperationCode = "SigntoolVerify"
Parameters = @{
VerifyAll = "/all"
}
ToolName = "sign"
ToolVersion = "1.0"
}
)
}
}
)
}
$Output = [System.IO.Path]::GetTempFileName()
$ScriptPath = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent
& "$ScriptPath\ESRPClient\packages\EsrpClient.1.0.27\tools\ESRPClient.exe" Sign -a $Auth -p $Policy -i $Input -o $Output

View File

@@ -1,2 +1,12 @@
[ [
{
"name": "ms-vscode.node-debug",
"version": "1.26.7",
"repo": "https://github.com/Microsoft/vscode-node-debug"
},
{
"name": "ms-vscode.node-debug2",
"version": "1.26.8",
"repo": "https://github.com/Microsoft/vscode-node-debug2"
}
] ]

View File

@@ -43,7 +43,7 @@ function asYarnDependency(prefix, tree) {
} }
function getYarnProductionDependencies(cwd) { function getYarnProductionDependencies(cwd) {
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'inherit'] }); const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'ignore'] });
const match = /^{"type":"tree".*$/m.exec(raw); const match = /^{"type":"tree".*$/m.exec(raw);
if (!match || match.length !== 1) { if (!match || match.length !== 1) {

View File

@@ -28,7 +28,7 @@ var editorEntryPoints = [
name: 'vs/editor/editor.main', name: 'vs/editor/editor.main',
include: [], include: [],
exclude: ['vs/css', 'vs/nls'], exclude: ['vs/css', 'vs/nls'],
prepend: ['out-editor-build/vs/css.js', 'out-editor-build/vs/nls.js'], prepend: ['out-build/vs/css.js', 'out-build/vs/nls.js'],
}, },
{ {
name: 'vs/base/common/worker/simpleWorker', name: 'vs/base/common/worker/simpleWorker',
@@ -79,23 +79,18 @@ gulp.task('extract-editor-src', ['clean-editor-src'], function () {
apiusages, apiusages,
extrausages extrausages
], ],
typings: [
'typings/lib.ie11_safe_es6.d.ts',
'typings/thenable.d.ts',
'typings/es6-promise.d.ts',
'typings/require-monaco.d.ts',
'vs/monaco.d.ts'
],
libs: [ libs: [
`lib.es5.d.ts`, `lib.d.ts`,
`lib.dom.d.ts`, `lib.es2015.collection.d.ts`
`lib.webworker.importscripts.d.ts`
], ],
redirects: { redirects: {
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock', 'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
}, },
compilerOptions: {
module: 2, // ModuleKind.AMD
},
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
importIgnorePattern: /(^vs\/css!)|(promise-polyfill\/polyfill)/, importIgnorePattern: /^vs\/css!/,
destRoot: path.join(root, 'out-editor-src') destRoot: path.join(root, 'out-editor-src')
}); });
}); });
@@ -113,8 +108,6 @@ gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-editor-build'],
loaderConfig: { loaderConfig: {
paths: { paths: {
'vs': 'out-editor-build/vs', 'vs': 'out-editor-build/vs',
'vs/css': 'out-editor-build/vs/css.build',
'vs/nls': 'out-editor-build/vs/nls.build',
'vscode': 'empty:' 'vscode': 'empty:'
} }
}, },
@@ -129,42 +122,25 @@ gulp.task('clean-minified-editor', util.rimraf('out-editor-min'));
gulp.task('minify-editor', ['clean-minified-editor', 'optimize-editor'], common.minifyTask('out-editor')); gulp.task('minify-editor', ['clean-minified-editor', 'optimize-editor'], common.minifyTask('out-editor'));
gulp.task('clean-editor-esm', util.rimraf('out-editor-esm')); gulp.task('clean-editor-esm', util.rimraf('out-editor-esm'));
gulp.task('extract-editor-esm', ['clean-editor-esm', 'clean-editor-distro', 'extract-editor-src'], function () { gulp.task('extract-editor-esm', ['clean-editor-esm', 'clean-editor-distro'], function () {
standalone.createESMSourcesAndResources2({ standalone.createESMSourcesAndResources({
srcFolder: './out-editor-src', entryPoints: [
outFolder: './out-editor-esm', 'vs/editor/editor.main',
outResourcesFolder: './out-monaco-editor-core/esm', 'vs/editor/editor.worker'
ignores: [
'inlineEntryPoint:0.ts',
'inlineEntryPoint:1.ts',
'vs/loader.js',
'vs/nls.ts',
'vs/nls.build.js',
'vs/nls.d.ts',
'vs/css.js',
'vs/css.build.js',
'vs/css.d.ts',
'vs/base/worker/workerMain.ts',
], ],
renames: { outFolder: './out-editor-esm/src',
'vs/nls.mock.ts': 'vs/nls.ts' outResourcesFolder: './out-monaco-editor-core/esm',
redirects: {
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
'vs/nls': 'vs/nls.mock',
} }
}); });
}); });
gulp.task('compile-editor-esm', ['extract-editor-esm', 'clean-editor-distro'], function () { gulp.task('compile-editor-esm', ['extract-editor-esm', 'clean-editor-distro'], function () {
if (process.platform === 'win32') { const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
const result = cp.spawnSync(`..\\node_modules\\.bin\\tsc.cmd`, { cwd: path.join(__dirname, '../out-editor-esm')
cwd: path.join(__dirname, '../out-editor-esm') });
}); console.log(result.stdout.toString());
console.log(result.stdout.toString());
console.log(result.stderr.toString());
} else {
const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
cwd: path.join(__dirname, '../out-editor-esm')
});
console.log(result.stdout.toString());
console.log(result.stderr.toString());
}
}); });
function toExternalDTS(contents) { function toExternalDTS(contents) {
@@ -218,7 +194,7 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
this.emit('data', new File({ this.emit('data', new File({
path: data.path.replace(/monaco\.d\.ts/, 'editor.api.d.ts'), path: data.path.replace(/monaco\.d\.ts/, 'editor.api.d.ts'),
base: data.base, base: data.base,
contents: Buffer.from(toExternalDTS(data.contents.toString())) contents: new Buffer(toExternalDTS(data.contents.toString()))
})); }));
})) }))
.pipe(gulp.dest('out-monaco-editor-core/esm/vs/editor')), .pipe(gulp.dest('out-monaco-editor-core/esm/vs/editor')),

View File

@@ -21,7 +21,6 @@ const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname); const root = path.dirname(__dirname);
const commit = util.getVersion(root); const commit = util.getVersion(root);
const plumber = require('gulp-plumber'); const plumber = require('gulp-plumber');
const _ = require('underscore');
const extensionsPath = path.join(path.dirname(__dirname), 'extensions'); const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
@@ -36,8 +35,7 @@ const tasks = compilations.map(function (tsconfigFile) {
const absolutePath = path.join(extensionsPath, tsconfigFile); const absolutePath = path.join(extensionsPath, tsconfigFile);
const relativeDirname = path.dirname(tsconfigFile); const relativeDirname = path.dirname(tsconfigFile);
const tsconfig = require(absolutePath); const tsOptions = require(absolutePath).compilerOptions;
const tsOptions = _.assign({}, tsconfig.extends ? require(path.join(extensionsPath, relativeDirname, tsconfig.extends)).compilerOptions : {}, tsconfig.compilerOptions);
tsOptions.verbose = false; tsOptions.verbose = false;
tsOptions.sourceMap = true; tsOptions.sourceMap = true;
@@ -169,4 +167,4 @@ gulp.task('watch-extensions', tasks.map(t => t.watch));
gulp.task('clean-extensions-build', tasks.map(t => t.cleanBuild)); gulp.task('clean-extensions-build', tasks.map(t => t.cleanBuild));
gulp.task('compile-extensions-build', tasks.map(t => t.compileBuild)); gulp.task('compile-extensions-build', tasks.map(t => t.compileBuild));
gulp.task('watch-extensions-build', tasks.map(t => t.watchBuild)); gulp.task('watch-extensions-build', tasks.map(t => t.watchBuild));

View File

@@ -43,13 +43,11 @@ const indentationFilter = [
// except specific files // except specific files
'!ThirdPartyNotices.txt', '!ThirdPartyNotices.txt',
'!LICENSE.txt', '!LICENSE.txt',
'!**/LICENSE',
'!src/vs/nls.js', '!src/vs/nls.js',
'!src/vs/nls.build.js',
'!src/vs/css.js', '!src/vs/css.js',
'!src/vs/css.build.js',
'!src/vs/loader.js', '!src/vs/loader.js',
'!src/vs/base/common/marked/marked.js', '!src/vs/base/common/marked/marked.js',
'!src/vs/base/common/winjs.base.js',
'!src/vs/base/node/terminateProcess.sh', '!src/vs/base/node/terminateProcess.sh',
'!src/vs/base/node/cpuUsage.sh', '!src/vs/base/node/cpuUsage.sh',
'!test/assert.js', '!test/assert.js',
@@ -80,14 +78,12 @@ const indentationFilter = [
'!src/vs/*/**/*.d.ts', '!src/vs/*/**/*.d.ts',
'!src/typings/**/*.d.ts', '!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts', '!extensions/**/*.d.ts',
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns}', '!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe}',
'!build/{lib,tslintRules}/**/*.js', '!build/{lib,tslintRules}/**/*.js',
'!build/**/*.sh', '!build/**/*.sh',
'!build/azure-pipelines/**/*.js', '!build/tfs/**/*.js',
'!build/azure-pipelines/**/*.config', '!build/tfs/**/*.config',
'!**/Dockerfile', '!**/Dockerfile',
'!**/*.Dockerfile',
'!**/*.dockerfile',
'!extensions/markdown-language-features/media/*.js' '!extensions/markdown-language-features/media/*.js'
]; ];
@@ -100,8 +96,6 @@ const copyrightFilter = [
'!**/*.md', '!**/*.md',
'!**/*.bat', '!**/*.bat',
'!**/*.cmd', '!**/*.cmd',
'!**/*.ico',
'!**/*.icns',
'!**/*.xml', '!**/*.xml',
'!**/*.sh', '!**/*.sh',
'!**/*.txt', '!**/*.txt',
@@ -109,12 +103,10 @@ const copyrightFilter = [
'!**/*.opts', '!**/*.opts',
'!**/*.disabled', '!**/*.disabled',
'!**/*.code-workspace', '!**/*.code-workspace',
'!**/promise-polyfill/polyfill.js',
'!build/**/*.init', '!build/**/*.init',
'!resources/linux/snap/snapcraft.yaml', '!resources/linux/snap/snapcraft.yaml',
'!resources/linux/snap/electron-launch', '!resources/linux/snap/electron-launch',
'!resources/win32/bin/code.js', '!resources/win32/bin/code.js',
'!resources/completions/**',
'!extensions/markdown-language-features/media/highlight.css', '!extensions/markdown-language-features/media/highlight.css',
'!extensions/html-language-features/server/src/modes/typescript/*', '!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*' '!extensions/*/server/bin/*'
@@ -128,6 +120,7 @@ const eslintFilter = [
'!src/vs/nls.js', '!src/vs/nls.js',
'!src/vs/css.build.js', '!src/vs/css.build.js',
'!src/vs/nls.build.js', '!src/vs/nls.build.js',
'!src/**/winjs.base.js',
'!src/**/marked.js', '!src/**/marked.js',
'!**/test/**' '!**/test/**'
]; ];

View File

@@ -13,6 +13,7 @@ const es = require('event-stream');
const util = require('./lib/util'); const util = require('./lib/util');
const remote = require('gulp-remote-src'); const remote = require('gulp-remote-src');
const zip = require('gulp-vinyl-zip'); const zip = require('gulp-vinyl-zip');
const assign = require('object-assign');
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
const jeditor = require('gulp-json-editor'); const jeditor = require('gulp-json-editor');

View File

@@ -33,17 +33,16 @@ const i18n = require('./lib/i18n');
const serviceDownloader = require('service-downloader').ServiceDownloadProvider; const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
const platformInfo = require('service-downloader/out/platform').PlatformInformation; const platformInfo = require('service-downloader/out/platform').PlatformInformation;
const glob = require('glob'); const glob = require('glob');
// {{SQL CARBON EDIT}} - End
const deps = require('./dependencies'); const deps = require('./dependencies');
const getElectronVersion = require('./lib/electron').getElectronVersion; const getElectronVersion = require('./lib/electron').getElectronVersion;
const createAsar = require('./lib/asar').createAsar; const createAsar = require('./lib/asar').createAsar;
const minimist = require('minimist');
const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname)); const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname));
// @ts-ignore // @ts-ignore
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
var del = require('del'); var del = require('del');
const extensionsRoot = path.join(root, 'extensions');
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
const baseModules = Object.keys(process.binding('natives')).filter(n => !/^_|\//.test(n)); const baseModules = Object.keys(process.binding('natives')).filter(n => !/^_|\//.test(n));
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
const nodeModules = [ const nodeModules = [
@@ -57,7 +56,28 @@ const nodeModules = [
.concat(_.uniq(productionDependencies.map(d => d.name))) .concat(_.uniq(productionDependencies.map(d => d.name)))
.concat(baseModules); .concat(baseModules);
// Build // Build
const builtInExtensions = require('./builtInExtensions.json');
const excludedExtensions = [
'vscode-api-tests',
'vscode-colorize-tests',
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
];
// {{SQL CARBON EDIT}}
const vsce = require('vsce');
const sqlBuiltInExtensions = [
// Add SQL built-in extensions here.
// the extension will be excluded from SQLOps package and will have separate vsix packages
'agent',
'import',
'profiler'
];
var azureExtensions = [ 'azurecore'];
const vscodeEntryPoints = _.flatten([ const vscodeEntryPoints = _.flatten([
buildfile.entrypoint('vs/workbench/workbench.main'), buildfile.entrypoint('vs/workbench/workbench.main'),
buildfile.base, buildfile.base,
@@ -70,15 +90,14 @@ const vscodeResources = [
'out-build/cli.js', 'out-build/cli.js',
'out-build/driver.js', 'out-build/driver.js',
'out-build/bootstrap.js', 'out-build/bootstrap.js',
'out-build/bootstrap-fork.js',
'out-build/bootstrap-amd.js', 'out-build/bootstrap-amd.js',
'out-build/bootstrap-window.js',
'out-build/paths.js', 'out-build/paths.js',
'out-build/vs/**/*.{svg,png,cur,html}', 'out-build/vs/**/*.{svg,png,cur,html}',
'out-build/vs/base/common/performance.js', 'out-build/vs/base/common/performance.js',
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh}', 'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh}',
'out-build/vs/base/browser/ui/octiconLabel/octicons/**', 'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
'out-build/vs/workbench/browser/media/*-theme.css', 'out-build/vs/workbench/browser/media/*-theme.css',
'out-build/vs/workbench/electron-browser/bootstrap/**',
'out-build/vs/workbench/parts/debug/**/*.json', 'out-build/vs/workbench/parts/debug/**/*.json',
'out-build/vs/workbench/parts/execution/**/*.scpt', 'out-build/vs/workbench/parts/execution/**/*.scpt',
'out-build/vs/workbench/parts/webview/electron-browser/webview-pre.js', 'out-build/vs/workbench/parts/webview/electron-browser/webview-pre.js',
@@ -87,7 +106,6 @@ const vscodeResources = [
'out-build/vs/workbench/parts/welcome/walkThrough/**/*.md', 'out-build/vs/workbench/parts/welcome/walkThrough/**/*.md',
'out-build/vs/workbench/services/files/**/*.exe', 'out-build/vs/workbench/services/files/**/*.exe',
'out-build/vs/workbench/services/files/**/*.md', 'out-build/vs/workbench/services/files/**/*.md',
'out-build/vs/code/electron-browser/workbench/**',
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js', 'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
'out-build/vs/code/electron-browser/issue/issueReporter.js', 'out-build/vs/code/electron-browser/issue/issueReporter.js',
'out-build/vs/code/electron-browser/processExplorer/processExplorer.js', 'out-build/vs/code/electron-browser/processExplorer/processExplorer.js',
@@ -111,7 +129,6 @@ const vscodeResources = [
'out-build/sql/parts/jobManagement/common/media/*.svg', 'out-build/sql/parts/jobManagement/common/media/*.svg',
'out-build/sql/media/objectTypes/*.svg', 'out-build/sql/media/objectTypes/*.svg',
'out-build/sql/media/icons/*.svg', 'out-build/sql/media/icons/*.svg',
'out-build/sql/parts/notebook/media/**/*.svg',
'!**/test/**' '!**/test/**'
]; ];
@@ -135,58 +152,51 @@ gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compil
gulp.task('optimize-index-js', ['optimize-vscode'], () => { gulp.task('optimize-index-js', ['optimize-vscode'], () => {
const fullpath = path.join(process.cwd(), 'out-vscode/bootstrap-window.js'); const fullpath = path.join(process.cwd(), 'out-vscode/vs/workbench/electron-browser/bootstrap/index.js');
const contents = fs.readFileSync(fullpath).toString(); const contents = fs.readFileSync(fullpath).toString();
const newContents = contents.replace('[/*BUILD->INSERT_NODE_MODULES*/]', JSON.stringify(nodeModules)); const newContents = contents.replace('[/*BUILD->INSERT_NODE_MODULES*/]', JSON.stringify(nodeModules));
fs.writeFileSync(fullpath, newContents); fs.writeFileSync(fullpath, newContents);
}); });
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`; const baseUrl = `https://ticino.blob.core.windows.net/sourcemaps/${commit}/core`;
gulp.task('clean-minified-vscode', util.rimraf('out-vscode-min')); gulp.task('clean-minified-vscode', util.rimraf('out-vscode-min'));
gulp.task('minify-vscode', ['clean-minified-vscode', 'optimize-index-js'], common.minifyTask('out-vscode', `${sourceMappingURLBase}/core`)); gulp.task('minify-vscode', ['clean-minified-vscode', 'optimize-index-js'], common.minifyTask('out-vscode', baseUrl));
// Package // Package
// @ts-ignore JSON checking: darwinCredits is optional // @ts-ignore JSON checking: darwinCredits is optional
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8')); const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
function darwinBundleDocumentType(extensions, icon) {
return {
name: product.nameLong + ' document',
role: 'Editor',
ostypes: ["TEXT", "utxt", "TUTX", "****"],
extensions: extensions,
iconFile: icon
};
}
const config = { const config = {
version: getElectronVersion(), version: getElectronVersion(),
productAppName: product.nameLong, productAppName: product.nameLong,
companyName: 'Microsoft Corporation', companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved', copyright: 'Copyright (C) 2018 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns', darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier, darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools', darwinApplicationCategoryType: 'public.app-category.developer-tools',
darwinHelpBookFolder: 'VS Code HelpBook', darwinHelpBookFolder: 'VS Code HelpBook',
darwinHelpBookName: 'VS Code HelpBook', darwinHelpBookName: 'VS Code HelpBook',
darwinBundleDocumentTypes: [ darwinBundleDocumentTypes: [{
// {{SQL CARBON EDIT}} - Remove most document types and replace with ours name: product.nameLong + ' document',
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'), role: 'Editor',
], ostypes: ["TEXT", "utxt", "TUTX", "****"],
// {{SQL CARBON EDIT}}
extensions: ["csv", "json", "sqlplan", "sql", "xml"],
iconFile: 'resources/darwin/code_file.icns'
}],
darwinBundleURLTypes: [{ darwinBundleURLTypes: [{
role: 'Viewer', role: 'Viewer',
name: product.nameLong, name: product.nameLong,
urlSchemes: [product.urlProtocol] urlSchemes: [product.urlProtocol]
}], }],
darwinForceDarkModeSupport: true, darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : void 0,
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
linuxExecutableName: product.applicationName, linuxExecutableName: product.applicationName,
winIcon: 'resources/win32/code.ico', winIcon: 'resources/win32/code.ico',
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined, token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || void 0,
// @ts-ignore JSON checking: electronRepository is optional // @ts-ignore JSON checking: electronRepository is optional
repo: product.electronRepository || undefined repo: product.electronRepository || void 0
}; };
function getElectron(arch) { function getElectron(arch) {
@@ -199,7 +209,7 @@ function getElectron(arch) {
}); });
return gulp.src('package.json') return gulp.src('package.json')
.pipe(json({ name: product.nameShort })) .pipe(json({ name: product.nameShort }))
.pipe(electron(electronOpts)) .pipe(electron(electronOpts))
.pipe(filter(['**', '!**/app/package.json'])) .pipe(filter(['**', '!**/app/package.json']))
.pipe(vfs.dest('.build/electron')); .pipe(vfs.dest('.build/electron'));
@@ -210,8 +220,6 @@ gulp.task('clean-electron', util.rimraf('.build/electron'));
gulp.task('electron', ['clean-electron'], getElectron(process.arch)); gulp.task('electron', ['clean-electron'], getElectron(process.arch));
gulp.task('electron-ia32', ['clean-electron'], getElectron('ia32')); gulp.task('electron-ia32', ['clean-electron'], getElectron('ia32'));
gulp.task('electron-x64', ['clean-electron'], getElectron('x64')); gulp.task('electron-x64', ['clean-electron'], getElectron('x64'));
gulp.task('electron-arm', ['clean-electron'], getElectron('arm'));
gulp.task('electron-arm64', ['clean-electron'], getElectron('arm64'));
/** /**
@@ -248,6 +256,61 @@ function computeChecksum(filename) {
return hash; return hash;
} }
function packageBuiltInExtensions() {
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
const packagePath = path.join(path.dirname(root), element.name + '.vsix');
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
vsce.createVSIX({
cwd: element.path,
packagePath: packagePath,
useYarn: true
});
});
}
// {{SQL CARBON EDIT}}
function packageAzureCoreTask(platform, arch) {
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
if (platform === 'darwin') {
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', 'azurecore');
} else {
destination = path.join(destination, 'resources', 'app', 'extensions', 'azurecore');
}
platform = platform || process.platform;
return () => {
const root = path.resolve(path.join(__dirname, '..'));
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => azureExtensions.indexOf(name) > -1);
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
return ext.fromLocal(extension.path);
}));
let result = localExtensions
.pipe(util.skipDirectories())
.pipe(util.fixWin32DirectoryPermissions())
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
return result.pipe(vfs.dest(destination));
};
}
function packageTask(platform, arch, opts) { function packageTask(platform, arch, opts) {
opts = opts || {}; opts = opts || {};
@@ -261,23 +324,47 @@ function packageTask(platform, arch, opts) {
const checksums = computeChecksums(out, [ const checksums = computeChecksums(out, [
'vs/workbench/workbench.main.js', 'vs/workbench/workbench.main.js',
'vs/workbench/workbench.main.css', 'vs/workbench/workbench.main.css',
'vs/code/electron-browser/workbench/workbench.html', 'vs/workbench/electron-browser/bootstrap/index.html',
'vs/code/electron-browser/workbench/workbench.js' 'vs/workbench/electron-browser/bootstrap/index.js',
'vs/workbench/electron-browser/bootstrap/preload.js'
]); ]);
const src = gulp.src(out + '/**', { base: '.' }) const src = gulp.src(out + '/**', { base: '.' })
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); })) .pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }));
.pipe(util.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map']));
const root = path.resolve(path.join(__dirname, '..')); const root = path.resolve(path.join(__dirname, '..'));
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
// {{SQL CARBON EDIT}}
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1)
.filter(({ name }) => azureExtensions.indexOf(name) === -1);
packageBuiltInExtensions();
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
return ext.fromLocal(extension.path)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
}));
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
ext.packageBuiltInExtensions(); const extensionDepsSrc = [
..._.flatten(extensionsProductionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
];
const sources = es.merge(src, ext.packageExtensionsStream({ const localExtensionDependencies = gulp.src(extensionDepsSrc, { base: '.', dot: true })
sourceMappingURLBase: sourceMappingURLBase .pipe(filter(['**', '!**/package-lock.json']))
})); .pipe(util.cleanNodeModule('account-provider-azure', ['node_modules/date-utils/doc/**', 'node_modules/adal_node/node_modules/**'], undefined))
.pipe(util.cleanNodeModule('typescript', ['**/**'], undefined));
const sources = es.merge(src, localExtensions, localExtensionDependencies)
.pipe(util.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map']));
let version = packageJson.version; let version = packageJson.version;
// @ts-ignore JSON checking: quality is optional // @ts-ignore JSON checking: quality is optional
@@ -289,15 +376,8 @@ function packageTask(platform, arch, opts) {
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
const name = (platform === 'darwin') ? 'Azure Data Studio' : product.nameShort; const name = (platform === 'darwin') ? 'Azure Data Studio' : product.nameShort;
const packageJsonUpdates = { name, version };
// for linux url handling
if (platform === 'linux') {
packageJsonUpdates.desktopName = `${product.applicationName}-url-handler.desktop`;
}
const packageJsonStream = gulp.src(['package.json'], { base: '.' }) const packageJsonStream = gulp.src(['package.json'], { base: '.' })
.pipe(json(packageJsonUpdates)); .pipe(json({ name, version }));
const date = new Date().toISOString(); const date = new Date().toISOString();
const productJsonUpdate = { commit, date, checksums }; const productJsonUpdate = { commit, date, checksums };
@@ -315,7 +395,7 @@ function packageTask(platform, arch, opts) {
// TODO the API should be copied to `out` during compile, not here // TODO the API should be copied to `out` during compile, not here
const api = gulp.src('src/vs/vscode.d.ts').pipe(rename('out/vs/vscode.d.ts')); const api = gulp.src('src/vs/vscode.d.ts').pipe(rename('out/vs/vscode.d.ts'));
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
const dataApi = gulp.src('src/vs/data.d.ts').pipe(rename('out/sql/data.d.ts')); const dataApi = gulp.src('src/vs/data.d.ts').pipe(rename('out/sql/data.d.ts'));
const depsSrc = [ const depsSrc = [
@@ -327,17 +407,16 @@ function packageTask(platform, arch, opts) {
const deps = gulp.src(depsSrc, { base: '.', dot: true }) const deps = gulp.src(depsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json'])) .pipe(filter(['**', '!**/package-lock.json']))
.pipe(util.cleanNodeModule('fsevents', ['binding.gyp', 'fsevents.cc', 'build/**', 'src/**', 'test/**'], ['**/*.node'])) .pipe(util.cleanNodeModule('fsevents', ['binding.gyp', 'fsevents.cc', 'build/**', 'src/**', 'test/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('vscode-sqlite3', ['binding.gyp', 'benchmark/**', 'cloudformation/**', 'deps/**', 'test/**', 'build/**', 'src/**'], ['build/Release/*.node'])) .pipe(util.cleanNodeModule('oniguruma', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/*.js']))
.pipe(util.cleanNodeModule('oniguruma', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['build/Release/*.node', 'src/*.js']))
.pipe(util.cleanNodeModule('windows-mutex', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node'])) .pipe(util.cleanNodeModule('windows-mutex', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-keymap', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['build/Release/*.node'])) .pipe(util.cleanNodeModule('native-keymap', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-is-elevated', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['build/Release/*.node'])) .pipe(util.cleanNodeModule('native-is-elevated', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('native-watchdog', ['binding.gyp', 'build/**', 'src/**'], ['build/Release/*.node'])) .pipe(util.cleanNodeModule('native-watchdog', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('spdlog', ['binding.gyp', 'build/**', 'deps/**', 'src/**', 'test/**'], ['build/Release/*.node'])) .pipe(util.cleanNodeModule('spdlog', ['binding.gyp', 'build/**', 'deps/**', 'src/**', 'test/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('jschardet', ['dist/**'])) .pipe(util.cleanNodeModule('jschardet', ['dist/**']))
.pipe(util.cleanNodeModule('windows-foreground-love', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node'])) .pipe(util.cleanNodeModule('windows-foreground-love', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('windows-process-tree', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node'])) .pipe(util.cleanNodeModule('windows-process-tree', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('gc-signals', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['build/Release/*.node', 'src/index.js'])) .pipe(util.cleanNodeModule('gc-signals', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/index.js']))
.pipe(util.cleanNodeModule('keytar', ['binding.gyp', 'build/**', 'src/**', 'script/**', 'node_modules/**'], ['**/*.node'])) .pipe(util.cleanNodeModule('keytar', ['binding.gyp', 'build/**', 'src/**', 'script/**', 'node_modules/**'], ['**/*.node']))
.pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/*.exe', 'build/Release/*.dll', 'build/Release/*.node'])) .pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/*.exe', 'build/Release/*.dll', 'build/Release/*.node']))
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
@@ -349,7 +428,6 @@ function packageTask(platform, arch, opts) {
.pipe(util.cleanNodeModule('slickgrid', ['node_modules/**', 'examples/**'], undefined)) .pipe(util.cleanNodeModule('slickgrid', ['node_modules/**', 'examples/**'], undefined))
.pipe(util.cleanNodeModule('nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a'])) .pipe(util.cleanNodeModule('nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
.pipe(util.cleanNodeModule('vscode-nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a'])) .pipe(util.cleanNodeModule('vscode-nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
// {{SQL CARBON EDIT}} - End
.pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node'])) .pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node']))
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*'], 'app/node_modules.asar')); .pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*'], 'app/node_modules.asar'));
@@ -360,7 +438,7 @@ function packageTask(platform, arch, opts) {
'node_modules/slickgrid/**/*.*', 'node_modules/slickgrid/**/*.*',
'node_modules/underscore/**/*.*', 'node_modules/underscore/**/*.*',
'node_modules/zone.js/**/*.*', 'node_modules/zone.js/**/*.*',
'node_modules/chart.js/**/*.*', 'node_modules/chart.js/**/*.*'
], { base: '.', dot: true }); ], { base: '.', dot: true });
let all = es.merge( let all = es.merge(
@@ -377,37 +455,7 @@ function packageTask(platform, arch, opts) {
); );
if (platform === 'win32') { if (platform === 'win32') {
all = es.merge(all, gulp.src([ all = es.merge(all, gulp.src(['resources/win32/code_file.ico', 'resources/win32/code_70x70.png', 'resources/win32/code_150x150.png'], { base: '.' }));
'resources/win32/bower.ico',
'resources/win32/c.ico',
'resources/win32/config.ico',
'resources/win32/cpp.ico',
'resources/win32/csharp.ico',
'resources/win32/css.ico',
'resources/win32/default.ico',
'resources/win32/go.ico',
'resources/win32/html.ico',
'resources/win32/jade.ico',
'resources/win32/java.ico',
'resources/win32/javascript.ico',
'resources/win32/json.ico',
'resources/win32/less.ico',
'resources/win32/markdown.ico',
'resources/win32/php.ico',
'resources/win32/powershell.ico',
'resources/win32/python.ico',
'resources/win32/react.ico',
'resources/win32/ruby.ico',
'resources/win32/sass.ico',
'resources/win32/shell.ico',
'resources/win32/sql.ico',
'resources/win32/typescript.ico',
'resources/win32/vue.ico',
'resources/win32/xml.ico',
'resources/win32/yaml.ico',
'resources/win32/code_70x70.png',
'resources/win32/code_150x150.png'
], { base: '.' }));
} else if (platform === 'linux') { } else if (platform === 'linux') {
all = es.merge(all, gulp.src('resources/linux/code.png', { base: '.' })); all = es.merge(all, gulp.src('resources/linux/code.png', { base: '.' }));
} else if (platform === 'darwin') { } else if (platform === 'darwin') {
@@ -423,8 +471,6 @@ function packageTask(platform, arch, opts) {
.pipe(electron(_.extend({}, config, { platform, arch, ffmpegChromium: true }))) .pipe(electron(_.extend({}, config, { platform, arch, ffmpegChromium: true })))
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version'])); .pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
// result = es.merge(result, gulp.src('resources/completions/**', { base: '.' }));
if (platform === 'win32') { if (platform === 'win32') {
result = es.merge(result, gulp.src('resources/win32/bin/code.js', { base: 'resources/win32' })); result = es.merge(result, gulp.src('resources/win32/bin/code.js', { base: 'resources/win32' }));
@@ -434,8 +480,6 @@ function packageTask(platform, arch, opts) {
result = es.merge(result, gulp.src('resources/win32/bin/code.sh', { base: 'resources/win32' }) result = es.merge(result, gulp.src('resources/win32/bin/code.sh', { base: 'resources/win32' })
.pipe(replace('@@NAME@@', product.nameShort)) .pipe(replace('@@NAME@@', product.nameShort))
.pipe(replace('@@COMMIT@@', commit))
.pipe(replace('@@APPNAME@@', product.applicationName))
.pipe(rename(function (f) { f.basename = product.applicationName; f.extname = ''; }))); .pipe(rename(function (f) { f.basename = product.applicationName; f.extname = ''; })));
result = es.merge(result, gulp.src('resources/win32/VisualElementsManifest.xml', { base: 'resources/win32' }) result = es.merge(result, gulp.src('resources/win32/VisualElementsManifest.xml', { base: 'resources/win32' })
@@ -446,15 +490,6 @@ function packageTask(platform, arch, opts) {
.pipe(rename('bin/' + product.applicationName))); .pipe(rename('bin/' + product.applicationName)));
} }
// submit all stats that have been collected
// during the build phase
if (opts.stats) {
result.on('end', () => {
const { submitAllStats } = require('./lib/stats');
submitAllStats(product, commit).then(() => console.log('Submitted bundle stats!'));
});
}
return result.pipe(vfs.dest(destination)); return result.pipe(vfs.dest(destination));
}; };
} }
@@ -462,13 +497,9 @@ function packageTask(platform, arch, opts) {
const buildRoot = path.dirname(root); const buildRoot = path.dirname(root);
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
gulp.task('vscode-win32-x64-azurecore', ['optimize-vscode'], ext.packageExtensionTask('azurecore', 'win32', 'x64')); gulp.task('vscode-win32-x64-azurecore', ['optimize-vscode'], packageAzureCoreTask('win32', 'x64'));
gulp.task('vscode-darwin-azurecore', ['optimize-vscode'], ext.packageExtensionTask('azurecore', 'darwin')); gulp.task('vscode-darwin-azurecore', ['optimize-vscode'], packageAzureCoreTask('darwin'));
gulp.task('vscode-linux-x64-azurecore', ['optimize-vscode'], ext.packageExtensionTask('azurecore', 'linux', 'x64')); gulp.task('vscode-linux-x64-azurecore', ['optimize-vscode'], packageAzureCoreTask('linux', 'x64'));
gulp.task('vscode-win32-x64-mssql', ['vscode-linux-x64-azurecore', 'optimize-vscode'], ext.packageExtensionTask('mssql', 'win32', 'x64'));
gulp.task('vscode-darwin-mssql', ['vscode-linux-x64-azurecore', 'optimize-vscode'], ext.packageExtensionTask('mssql', 'darwin'));
gulp.task('vscode-linux-x64-mssql', ['vscode-linux-x64-azurecore', 'optimize-vscode'], ext.packageExtensionTask('mssql', 'linux', 'x64'));
gulp.task('clean-vscode-win32-ia32', util.rimraf(path.join(buildRoot, 'azuredatastudio-win32-ia32'))); gulp.task('clean-vscode-win32-ia32', util.rimraf(path.join(buildRoot, 'azuredatastudio-win32-ia32')));
gulp.task('clean-vscode-win32-x64', util.rimraf(path.join(buildRoot, 'azuredatastudio-win32-x64'))); gulp.task('clean-vscode-win32-x64', util.rimraf(path.join(buildRoot, 'azuredatastudio-win32-x64')));
@@ -476,23 +507,20 @@ gulp.task('clean-vscode-darwin', util.rimraf(path.join(buildRoot, 'azuredatastud
gulp.task('clean-vscode-linux-ia32', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-ia32'))); gulp.task('clean-vscode-linux-ia32', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-ia32')));
gulp.task('clean-vscode-linux-x64', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-x64'))); gulp.task('clean-vscode-linux-x64', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-x64')));
gulp.task('clean-vscode-linux-arm', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-arm'))); gulp.task('clean-vscode-linux-arm', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-arm')));
gulp.task('clean-vscode-linux-arm64', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-arm64')));
gulp.task('vscode-win32-ia32', ['optimize-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32')); gulp.task('vscode-win32-ia32', ['optimize-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32'));
gulp.task('vscode-win32-x64', ['vscode-win32-x64-azurecore', 'vscode-win32-x64-mssql', 'optimize-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64')); gulp.task('vscode-win32-x64', ['vscode-win32-x64-azurecore', 'optimize-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64'));
gulp.task('vscode-darwin', ['vscode-darwin-azurecore', 'vscode-darwin-mssql', 'optimize-vscode', 'clean-vscode-darwin'], packageTask('darwin', null, { stats: true })); gulp.task('vscode-darwin', ['vscode-darwin-azurecore', 'optimize-vscode', 'clean-vscode-darwin'], packageTask('darwin'));
gulp.task('vscode-linux-ia32', ['optimize-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32')); gulp.task('vscode-linux-ia32', ['optimize-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32'));
gulp.task('vscode-linux-x64', ['vscode-linux-x64-azurecore', 'vscode-linux-x64-mssql', 'optimize-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64')); gulp.task('vscode-linux-x64', ['vscode-linux-x64-azurecore', 'optimize-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64'));
gulp.task('vscode-linux-arm', ['optimize-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm')); gulp.task('vscode-linux-arm', ['optimize-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm'));
gulp.task('vscode-linux-arm64', ['optimize-vscode', 'clean-vscode-linux-arm64'], packageTask('linux', 'arm64'));
gulp.task('vscode-win32-ia32-min', ['minify-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32', { minified: true })); gulp.task('vscode-win32-ia32-min', ['minify-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32', { minified: true }));
gulp.task('vscode-win32-x64-min', ['minify-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64', { minified: true })); gulp.task('vscode-win32-x64-min', ['minify-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64', { minified: true }));
gulp.task('vscode-darwin-min', ['minify-vscode', 'clean-vscode-darwin'], packageTask('darwin', null, { minified: true, stats: true })); gulp.task('vscode-darwin-min', ['minify-vscode', 'clean-vscode-darwin'], packageTask('darwin', null, { minified: true }));
gulp.task('vscode-linux-ia32-min', ['minify-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32', { minified: true })); gulp.task('vscode-linux-ia32-min', ['minify-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32', { minified: true }));
gulp.task('vscode-linux-x64-min', ['minify-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64', { minified: true })); gulp.task('vscode-linux-x64-min', ['minify-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64', { minified: true }));
gulp.task('vscode-linux-arm-min', ['minify-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm', { minified: true })); gulp.task('vscode-linux-arm-min', ['minify-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm', { minified: true }));
gulp.task('vscode-linux-arm64-min', ['minify-vscode', 'clean-vscode-linux-arm64'], packageTask('linux', 'arm64', { minified: true }));
// Transifex Localizations // Transifex Localizations
@@ -528,7 +556,7 @@ gulp.task('vscode-translations-push', ['optimize-vscode'], function () {
).pipe(i18n.pushXlfFiles(apiHostname, apiName, apiToken)); ).pipe(i18n.pushXlfFiles(apiHostname, apiName, apiToken));
}); });
gulp.task('vscode-translations-export', ['optimize-vscode'], function () { gulp.task('vscode-translations-push-test', ['optimize-vscode'], function () {
const pathToMetadata = './out-vscode/nls.metadata.json'; const pathToMetadata = './out-vscode/nls.metadata.json';
const pathToExtensions = './extensions/*'; const pathToExtensions = './extensions/*';
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}'; const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
@@ -537,44 +565,46 @@ gulp.task('vscode-translations-export', ['optimize-vscode'], function () {
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()), gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()), gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions()) gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
).pipe(vfs.dest('../vscode-translations-export')); // {{SQL CARBON EDIT}}
// disable since function makes calls to VS Code Transifex API
// ).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
).pipe(vfs.dest('../vscode-transifex-input'));
}); });
gulp.task('vscode-translations-pull', function () { gulp.task('vscode-translations-pull', function () {
return es.merge([...i18n.defaultLanguages, ...i18n.extraLanguages].map(language => { [...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
i18n.pullCoreAndExtensionsXlfFiles(apiHostname, apiName, apiToken, language).pipe(vfs.dest(`../vscode-localization/${language.id}/build`));
let includeDefault = !!innoSetupConfig[language.id].defaultInfo; let includeDefault = !!innoSetupConfig[language.id].defaultInfo;
return i18n.pullSetupXlfFiles(apiHostname, apiName, apiToken, language, includeDefault).pipe(vfs.dest(`../vscode-translations-import/${language.id}/setup`)); i18n.pullSetupXlfFiles(apiHostname, apiName, apiToken, language, includeDefault).pipe(vfs.dest(`../vscode-localization/${language.id}/setup`));
})); });
}); });
gulp.task('vscode-translations-import', function () { gulp.task('vscode-translations-import', function () {
// {{SQL CARBON EDIT}} - Replace function body with our own
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => { [...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
gulp.src(`../vscode-localization/${language.id}/build/*/*.xlf`) gulp.src(`../vscode-localization/${language.id}/build/*/*.xlf`)
.pipe(i18n.prepareI18nFiles()) .pipe(i18n.prepareI18nFiles())
.pipe(vfs.dest(`./i18n/${language.folderName}`)); .pipe(vfs.dest(`./i18n/${language.folderName}`));
// {{SQL CARBON EDIT}}
// gulp.src(`../vscode-localization/${language.id}/setup/*/*.xlf`)
// .pipe(i18n.prepareIslFiles(language, innoSetupConfig[language.id]))
// .pipe(vfs.dest(`./build/win32/i18n`));
}); });
// {{SQL CARBON EDIT}} - End
}); });
// Sourcemaps // Sourcemaps
gulp.task('upload-vscode-sourcemaps', ['vscode-darwin-min', 'minify-vscode'], () => { gulp.task('upload-vscode-sourcemaps', ['minify-vscode'], () => {
const vs = gulp.src('out-vscode-min/**/*.map', { base: 'out-vscode-min' }) const vs = gulp.src('out-vscode-min/**/*.map', { base: 'out-vscode-min' })
.pipe(es.mapSync(f => { .pipe(es.mapSync(f => {
f.path = `${f.base}/core/${f.relative}`; f.path = `${f.base}/core/${f.relative}`;
return f; return f;
})); }));
const extensionsOut = gulp.src('extensions/**/out/**/*.map', { base: '.' }); const extensions = gulp.src('extensions/**/out/**/*.map', { base: '.' });
const extensionsDist = gulp.src('extensions/**/dist/**/*.map', { base: '.' });
return es.merge(vs, extensionsOut, extensionsDist) return es.merge(vs, extensions)
.pipe(es.through(function (data) {
// debug
console.log('Uploading Sourcemap', data.relative);
this.emit('data', data);
}))
.pipe(azure.upload({ .pipe(azure.upload({
account: process.env.AZURE_STORAGE_ACCOUNT, account: process.env.AZURE_STORAGE_ACCOUNT,
key: process.env.AZURE_STORAGE_ACCESS_KEY, key: process.env.AZURE_STORAGE_ACCESS_KEY,
@@ -619,7 +649,7 @@ function getSettingsSearchBuildId(packageJson) {
const branch = process.env.BUILD_SOURCEBRANCH; const branch = process.env.BUILD_SOURCEBRANCH;
const branchId = branch.indexOf('/release/') >= 0 ? 0 : const branchId = branch.indexOf('/release/') >= 0 ? 0 :
/\/master$/.test(branch) ? 1 : /\/master$/.test(branch) ? 1 :
2; // Some unexpected branch 2; // Some unexpected branch
const out = cp.execSync(`git rev-list HEAD --count`); const out = cp.execSync(`git rev-list HEAD --count`);
const count = parseInt(out.toString()); const count = parseInt(out.toString());
@@ -692,5 +722,6 @@ function installService() {
} }
gulp.task('install-sqltoolsservice', () => { gulp.task('install-sqltoolsservice', () => {
return installService(); return installService();
}); });

View File

@@ -15,14 +15,11 @@ const util = require('./lib/util');
const packageJson = require('../package.json'); const packageJson = require('../package.json');
const product = require('../product.json'); const product = require('../product.json');
const rpmDependencies = require('../resources/linux/rpm/dependencies.json'); const rpmDependencies = require('../resources/linux/rpm/dependencies.json');
const path = require('path');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000); const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
function getDebPackageArch(arch) { function getDebPackageArch(arch) {
return { x64: 'amd64', ia32: 'i386', arm: 'armhf', arm64: "arm64" }[arch]; return { x64: 'amd64', ia32: 'i386', arm: 'armhf' }[arch];
} }
function prepareDebPackage(arch) { function prepareDebPackage(arch) {
@@ -33,17 +30,11 @@ function prepareDebPackage(arch) {
return function () { return function () {
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' }) const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
.pipe(rename('usr/share/applications/' + product.applicationName + '.desktop'));
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
.pipe(rename('usr/share/applications/' + product.applicationName + '-url-handler.desktop'));
const desktops = es.merge(desktop, desktopUrlHandler)
.pipe(replace('@@NAME_LONG@@', product.nameLong)) .pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort)) .pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName)) .pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@ICON@@', product.applicationName)) .pipe(replace('@@ICON@@', product.applicationName))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol)); .pipe(rename('usr/share/applications/' + product.applicationName + '.desktop'));
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' }) const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
.pipe(replace('@@NAME_LONG@@', product.nameLong)) .pipe(replace('@@NAME_LONG@@', product.nameLong))
@@ -54,12 +45,6 @@ function prepareDebPackage(arch) {
const icon = gulp.src('resources/linux/code.png', { base: '.' }) const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename('usr/share/pixmaps/' + product.applicationName + '.png')); .pipe(rename('usr/share/pixmaps/' + product.applicationName + '.png'));
// const bash_completion = gulp.src('resources/completions/bash/code')
// .pipe(rename('usr/share/bash-completion/completions/code'));
// const zsh_completion = gulp.src('resources/completions/zsh/_code')
// .pipe(rename('usr/share/zsh/vendor-completions/_code'));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir }) const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; })); .pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; }));
@@ -94,7 +79,7 @@ function prepareDebPackage(arch) {
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@')) .pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
.pipe(rename('DEBIAN/postinst')); .pipe(rename('DEBIAN/postinst'));
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, icon, /* bash_completion, zsh_completion, */ code); const all = es.merge(control, postinst, postrm, prerm, desktop, appdata, icon, code);
return all.pipe(vfs.dest(destination)); return all.pipe(vfs.dest(destination));
}; };
@@ -114,7 +99,7 @@ function getRpmBuildPath(rpmArch) {
} }
function getRpmPackageArch(arch) { function getRpmPackageArch(arch) {
return { x64: 'x86_64', ia32: 'i386', arm: 'armhf', arm64: "arm64" }[arch]; return { x64: 'x86_64', ia32: 'i386', arm: 'armhf' }[arch];
} }
function prepareRpmPackage(arch) { function prepareRpmPackage(arch) {
@@ -124,17 +109,11 @@ function prepareRpmPackage(arch) {
return function () { return function () {
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' }) const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
.pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '.desktop'));
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
.pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '-url-handler.desktop'));
const desktops = es.merge(desktop, desktopUrlHandler)
.pipe(replace('@@NAME_LONG@@', product.nameLong)) .pipe(replace('@@NAME_LONG@@', product.nameLong))
.pipe(replace('@@NAME_SHORT@@', product.nameShort)) .pipe(replace('@@NAME_SHORT@@', product.nameShort))
.pipe(replace('@@NAME@@', product.applicationName)) .pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@ICON@@', product.applicationName)) .pipe(replace('@@ICON@@', product.applicationName))
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol)); .pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '.desktop'));
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' }) const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
.pipe(replace('@@NAME_LONG@@', product.nameLong)) .pipe(replace('@@NAME_LONG@@', product.nameLong))
@@ -145,12 +124,6 @@ function prepareRpmPackage(arch) {
const icon = gulp.src('resources/linux/code.png', { base: '.' }) const icon = gulp.src('resources/linux/code.png', { base: '.' })
.pipe(rename('BUILD/usr/share/pixmaps/' + product.applicationName + '.png')); .pipe(rename('BUILD/usr/share/pixmaps/' + product.applicationName + '.png'));
// const bash_completion = gulp.src('resources/completions/bash/code')
// .pipe(rename('BUILD/usr/share/bash-completion/completions/code'));
// const zsh_completion = gulp.src('resources/completions/zsh/_code')
// .pipe(rename('BUILD/usr/share/zsh/site-functions/_code'));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir }) const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = 'BUILD/usr/share/' + product.applicationName + '/' + p.dirname; })); .pipe(rename(function (p) { p.dirname = 'BUILD/usr/share/' + product.applicationName + '/' + p.dirname; }));
@@ -171,7 +144,7 @@ function prepareRpmPackage(arch) {
const specIcon = gulp.src('resources/linux/rpm/code.xpm', { base: '.' }) const specIcon = gulp.src('resources/linux/rpm/code.xpm', { base: '.' })
.pipe(rename('SOURCES/' + product.applicationName + '.xpm')); .pipe(rename('SOURCES/' + product.applicationName + '.xpm'));
const all = es.merge(code, desktops, appdata, icon, /* bash_completion, zsh_completion, */ spec, specIcon); const all = es.merge(code, desktop, appdata, icon, spec, specIcon);
return all.pipe(vfs.dest(getRpmBuildPath(rpmArch))); return all.pipe(vfs.dest(getRpmBuildPath(rpmArch)));
}; };
@@ -189,7 +162,6 @@ function buildRpmPackage(arch) {
'cp "' + rpmOut + '/$(ls ' + rpmOut + ')" ' + destination + '/' 'cp "' + rpmOut + '/$(ls ' + rpmOut + ')" ' + destination + '/'
]); ]);
} }
function getSnapBuildPath(arch) { function getSnapBuildPath(arch) {
return `.build/linux/snap/${arch}/${product.applicationName}-${arch}`; return `.build/linux/snap/${arch}/${product.applicationName}-${arch}`;
} }
@@ -210,11 +182,11 @@ function prepareSnapPackage(arch) {
.pipe(rename(`usr/share/pixmaps/${product.applicationName}.png`)); .pipe(rename(`usr/share/pixmaps/${product.applicationName}.png`));
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir }) const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
.pipe(rename(function (p) { p.dirname = `usr/share/${product.applicationName}/${p.dirname}`; })); .pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; }));
const snapcraft = gulp.src('resources/linux/snap/snapcraft.yaml', { base: '.' }) const snapcraft = gulp.src('resources/linux/snap/snapcraft.yaml', { base: '.' })
.pipe(replace('@@NAME@@', product.applicationName)) .pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@VERSION@@', commit.substr(0, 8))) .pipe(replace('@@VERSION@@', packageJson.version))
.pipe(rename('snap/snapcraft.yaml')); .pipe(rename('snap/snapcraft.yaml'));
const electronLaunch = gulp.src('resources/linux/snap/electron-launch', { base: '.' }) const electronLaunch = gulp.src('resources/linux/snap/electron-launch', { base: '.' })
@@ -228,7 +200,11 @@ function prepareSnapPackage(arch) {
function buildSnapPackage(arch) { function buildSnapPackage(arch) {
const snapBuildPath = getSnapBuildPath(arch); const snapBuildPath = getSnapBuildPath(arch);
return shell.task(`cd ${snapBuildPath} && snapcraft build`); const snapFilename = `${product.applicationName}-${packageJson.version}-${linuxPackageRevision}-${arch}.snap`;
return shell.task([
`chmod +x ${snapBuildPath}/electron-launch`,
`cd ${snapBuildPath} && snapcraft snap --output ../${snapFilename}`
]);
} }
function getFlatpakArch(arch) { function getFlatpakArch(arch) {
@@ -308,39 +284,33 @@ function buildFlatpak(arch) {
gulp.task('clean-vscode-linux-ia32-deb', util.rimraf('.build/linux/deb/i386')); gulp.task('clean-vscode-linux-ia32-deb', util.rimraf('.build/linux/deb/i386'));
gulp.task('clean-vscode-linux-x64-deb', util.rimraf('.build/linux/deb/amd64')); gulp.task('clean-vscode-linux-x64-deb', util.rimraf('.build/linux/deb/amd64'));
gulp.task('clean-vscode-linux-arm-deb', util.rimraf('.build/linux/deb/armhf')); gulp.task('clean-vscode-linux-arm-deb', util.rimraf('.build/linux/deb/armhf'));
gulp.task('clean-vscode-linux-arm64-deb', util.rimraf('.build/linux/deb/arm64'));
gulp.task('clean-vscode-linux-ia32-rpm', util.rimraf('.build/linux/rpm/i386')); gulp.task('clean-vscode-linux-ia32-rpm', util.rimraf('.build/linux/rpm/i386'));
gulp.task('clean-vscode-linux-x64-rpm', util.rimraf('.build/linux/rpm/x86_64')); gulp.task('clean-vscode-linux-x64-rpm', util.rimraf('.build/linux/rpm/x86_64'));
gulp.task('clean-vscode-linux-arm-rpm', util.rimraf('.build/linux/rpm/armhf')); gulp.task('clean-vscode-linux-arm-rpm', util.rimraf('.build/linux/rpm/armhf'));
gulp.task('clean-vscode-linux-arm64-rpm', util.rimraf('.build/linux/rpm/arm64'));
gulp.task('clean-vscode-linux-ia32-snap', util.rimraf('.build/linux/snap/x64')); gulp.task('clean-vscode-linux-ia32-snap', util.rimraf('.build/linux/snap/x64'));
gulp.task('clean-vscode-linux-x64-snap', util.rimraf('.build/linux/snap/x64')); gulp.task('clean-vscode-linux-x64-snap', util.rimraf('.build/linux/snap/x64'));
gulp.task('clean-vscode-linux-arm-snap', util.rimraf('.build/linux/snap/x64')); gulp.task('clean-vscode-linux-arm-snap', util.rimraf('.build/linux/snap/x64'));
gulp.task('clean-vscode-linux-arm64-snap', util.rimraf('.build/linux/snap/x64')); gulp.task('clean-vscode-linux-ia32-flatpak', util.rimraf('.build/linux/flatpak/i386'));
gulp.task('clean-vscode-linux-x64-flatpak', util.rimraf('.build/linux/flatpak/x86_64'));
gulp.task('clean-vscode-linux-arm-flatpak', util.rimraf('.build/linux/flatpak/arm'));
gulp.task('vscode-linux-ia32-prepare-deb', ['clean-vscode-linux-ia32-deb'], prepareDebPackage('ia32')); gulp.task('vscode-linux-ia32-prepare-deb', ['clean-vscode-linux-ia32-deb'], prepareDebPackage('ia32'));
gulp.task('vscode-linux-x64-prepare-deb', ['clean-vscode-linux-x64-deb'], prepareDebPackage('x64')); gulp.task('vscode-linux-x64-prepare-deb', ['clean-vscode-linux-x64-deb'], prepareDebPackage('x64'));
gulp.task('vscode-linux-arm-prepare-deb', ['clean-vscode-linux-arm-deb'], prepareDebPackage('arm')); gulp.task('vscode-linux-arm-prepare-deb', ['clean-vscode-linux-arm-deb'], prepareDebPackage('arm'));
gulp.task('vscode-linux-arm64-prepare-deb', ['clean-vscode-linux-arm64-deb'], prepareDebPackage('arm64'));
gulp.task('vscode-linux-ia32-build-deb', ['vscode-linux-ia32-prepare-deb'], buildDebPackage('ia32')); gulp.task('vscode-linux-ia32-build-deb', ['vscode-linux-ia32-prepare-deb'], buildDebPackage('ia32'));
gulp.task('vscode-linux-x64-build-deb', ['vscode-linux-x64-prepare-deb'], buildDebPackage('x64')); gulp.task('vscode-linux-x64-build-deb', ['vscode-linux-x64-prepare-deb'], buildDebPackage('x64'));
gulp.task('vscode-linux-arm-build-deb', ['vscode-linux-arm-prepare-deb'], buildDebPackage('arm')); gulp.task('vscode-linux-arm-build-deb', ['vscode-linux-arm-prepare-deb'], buildDebPackage('arm'));
gulp.task('vscode-linux-arm64-build-deb', ['vscode-linux-arm64-prepare-deb'], buildDebPackage('arm64'));
gulp.task('vscode-linux-ia32-prepare-rpm', ['clean-vscode-linux-ia32-rpm'], prepareRpmPackage('ia32')); gulp.task('vscode-linux-ia32-prepare-rpm', ['clean-vscode-linux-ia32-rpm'], prepareRpmPackage('ia32'));
gulp.task('vscode-linux-x64-prepare-rpm', ['clean-vscode-linux-x64-rpm'], prepareRpmPackage('x64')); gulp.task('vscode-linux-x64-prepare-rpm', ['clean-vscode-linux-x64-rpm'], prepareRpmPackage('x64'));
gulp.task('vscode-linux-arm-prepare-rpm', ['clean-vscode-linux-arm-rpm'], prepareRpmPackage('arm')); gulp.task('vscode-linux-arm-prepare-rpm', ['clean-vscode-linux-arm-rpm'], prepareRpmPackage('arm'));
gulp.task('vscode-linux-arm64-prepare-rpm', ['clean-vscode-linux-arm64-rpm'], prepareRpmPackage('arm64'));
gulp.task('vscode-linux-ia32-build-rpm', ['vscode-linux-ia32-prepare-rpm'], buildRpmPackage('ia32')); gulp.task('vscode-linux-ia32-build-rpm', ['vscode-linux-ia32-prepare-rpm'], buildRpmPackage('ia32'));
gulp.task('vscode-linux-x64-build-rpm', ['vscode-linux-x64-prepare-rpm'], buildRpmPackage('x64')); gulp.task('vscode-linux-x64-build-rpm', ['vscode-linux-x64-prepare-rpm'], buildRpmPackage('x64'));
gulp.task('vscode-linux-arm-build-rpm', ['vscode-linux-arm-prepare-rpm'], buildRpmPackage('arm')); gulp.task('vscode-linux-arm-build-rpm', ['vscode-linux-arm-prepare-rpm'], buildRpmPackage('arm'));
gulp.task('vscode-linux-arm64-build-rpm', ['vscode-linux-arm64-prepare-rpm'], buildRpmPackage('arm64'));
gulp.task('vscode-linux-ia32-prepare-snap', ['clean-vscode-linux-ia32-snap'], prepareSnapPackage('ia32')); gulp.task('vscode-linux-ia32-prepare-snap', ['clean-vscode-linux-ia32-snap'], prepareSnapPackage('ia32'));
gulp.task('vscode-linux-x64-prepare-snap', ['clean-vscode-linux-x64-snap'], prepareSnapPackage('x64')); gulp.task('vscode-linux-x64-prepare-snap', ['clean-vscode-linux-x64-snap'], prepareSnapPackage('x64'));
gulp.task('vscode-linux-arm-prepare-snap', ['clean-vscode-linux-arm-snap'], prepareSnapPackage('arm')); gulp.task('vscode-linux-arm-prepare-snap', ['clean-vscode-linux-arm-snap'], prepareSnapPackage('arm'));
gulp.task('vscode-linux-arm64-prepare-snap', ['clean-vscode-linux-arm64-snap'], prepareSnapPackage('arm64'));
gulp.task('vscode-linux-ia32-build-snap', ['vscode-linux-ia32-prepare-snap'], buildSnapPackage('ia32')); gulp.task('vscode-linux-ia32-build-snap', ['vscode-linux-ia32-prepare-snap'], buildSnapPackage('ia32'));
gulp.task('vscode-linux-x64-build-snap', ['vscode-linux-x64-prepare-snap'], buildSnapPackage('x64')); gulp.task('vscode-linux-x64-build-snap', ['vscode-linux-x64-prepare-snap'], buildSnapPackage('x64'));
gulp.task('vscode-linux-arm-build-snap', ['vscode-linux-arm-prepare-snap'], buildSnapPackage('arm')); gulp.task('vscode-linux-arm-build-snap', ['vscode-linux-arm-prepare-snap'], buildSnapPackage('arm'));
gulp.task('vscode-linux-arm64-build-snap', ['vscode-linux-arm64-prepare-snap'], buildSnapPackage('arm64'));

View File

@@ -15,7 +15,6 @@ const util = require('./lib/util');
const pkg = require('../package.json'); const pkg = require('../package.json');
const product = require('../product.json'); const product = require('../product.json');
const vfs = require('vinyl-fs'); const vfs = require('vinyl-fs');
const rcedit = require('rcedit');
const mkdirp = require('mkdirp'); const mkdirp = require('mkdirp');
const repoPath = path.dirname(__dirname); const repoPath = path.dirname(__dirname);
@@ -26,21 +25,18 @@ const zipPath = arch => path.join(zipDir(arch), `VSCode-win32-${arch}.zip`);
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`); const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
const issPath = path.join(__dirname, 'win32', 'code.iss'); const issPath = path.join(__dirname, 'win32', 'code.iss');
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup-compiler'))), 'bin', 'ISCC.exe'); const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup-compiler'))), 'bin', 'ISCC.exe');
const signPS1 = path.join(repoPath, 'build', 'azure-pipelines', 'win32', 'sign.ps1'); const signPS1 = path.join(repoPath, 'build', 'tfs', 'win32', 'sign.ps1');
function packageInnoSetup(iss, options, cb) { function packageInnoSetup(iss, options, cb) {
options = options || {}; options = options || {};
const definitions = options.definitions || {}; const definitions = options.definitions || {};
const debug = process.argv.some(arg => arg === '--debug-inno');
if (process.argv.some(arg => arg === '--debug-inno')) { if (debug) {
definitions['Debug'] = 'true'; definitions['Debug'] = 'true';
} }
if (process.argv.some(arg => arg === '--sign')) {
definitions['Sign'] = 'true';
}
const keys = Object.keys(definitions); const keys = Object.keys(definitions);
keys.forEach(key => assert(typeof definitions[key] === 'string', `Missing value for '${key}' in Inno Setup package step`)); keys.forEach(key => assert(typeof definitions[key] === 'string', `Missing value for '${key}' in Inno Setup package step`));
@@ -140,14 +136,4 @@ function copyInnoUpdater(arch) {
} }
gulp.task('vscode-win32-ia32-copy-inno-updater', copyInnoUpdater('ia32')); gulp.task('vscode-win32-ia32-copy-inno-updater', copyInnoUpdater('ia32'));
gulp.task('vscode-win32-x64-copy-inno-updater', copyInnoUpdater('x64')); gulp.task('vscode-win32-x64-copy-inno-updater', copyInnoUpdater('x64'));
function patchInnoUpdater(arch) {
return cb => {
const icon = path.join(repoPath, 'resources', 'win32', 'code.ico');
rcedit(path.join(buildPath(arch), 'tools', 'inno_updater.exe'), { icon }, cb);
};
}
gulp.task('vscode-win32-ia32-inno-updater', ['vscode-win32-ia32-copy-inno-updater'], patchInnoUpdater('ia32'));
gulp.task('vscode-win32-x64-inno-updater', ['vscode-win32-x64-copy-inno-updater'], patchInnoUpdater('x64'));

View File

@@ -1,15 +0,0 @@
{
"compilerOptions": {
"module": "commonjs",
"target": "es2017",
"jsx": "preserve",
"checkJs": true
},
"include": [
"**/*.js"
],
"exclude": [
"node_modules",
"**/node_modules/*"
]
}

View File

@@ -4,33 +4,33 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
'use strict'; 'use strict';
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path"); var path = require("path");
const es = require("event-stream"); var es = require("event-stream");
const pickle = require('chromium-pickle-js'); var pickle = require("chromium-pickle-js");
const Filesystem = require('asar/lib/filesystem'); var Filesystem = require("asar/lib/filesystem");
const VinylFile = require("vinyl"); var VinylFile = require("vinyl");
const minimatch = require("minimatch"); var minimatch = require("minimatch");
function createAsar(folderPath, unpackGlobs, destFilename) { function createAsar(folderPath, unpackGlobs, destFilename) {
const shouldUnpackFile = (file) => { var shouldUnpackFile = function (file) {
for (let i = 0; i < unpackGlobs.length; i++) { for (var i = 0; i < unpackGlobs.length; i++) {
if (minimatch(file.relative, unpackGlobs[i])) { if (minimatch(file.relative, unpackGlobs[i])) {
return true; return true;
} }
} }
return false; return false;
}; };
const filesystem = new Filesystem(folderPath); var filesystem = new Filesystem(folderPath);
const out = []; var out = [];
// Keep track of pending inserts // Keep track of pending inserts
let pendingInserts = 0; var pendingInserts = 0;
let onFileInserted = () => { pendingInserts--; }; var onFileInserted = function () { pendingInserts--; };
// Do not insert twice the same directory // Do not insert twice the same directory
const seenDir = {}; var seenDir = {};
const insertDirectoryRecursive = (dir) => { var insertDirectoryRecursive = function (dir) {
if (seenDir[dir]) { if (seenDir[dir]) {
return; return;
} }
let lastSlash = dir.lastIndexOf('/'); var lastSlash = dir.lastIndexOf('/');
if (lastSlash === -1) { if (lastSlash === -1) {
lastSlash = dir.lastIndexOf('\\'); lastSlash = dir.lastIndexOf('\\');
} }
@@ -40,8 +40,8 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
seenDir[dir] = true; seenDir[dir] = true;
filesystem.insertDirectory(dir); filesystem.insertDirectory(dir);
}; };
const insertDirectoryForFile = (file) => { var insertDirectoryForFile = function (file) {
let lastSlash = file.lastIndexOf('/'); var lastSlash = file.lastIndexOf('/');
if (lastSlash === -1) { if (lastSlash === -1) {
lastSlash = file.lastIndexOf('\\'); lastSlash = file.lastIndexOf('\\');
} }
@@ -49,7 +49,7 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
insertDirectoryRecursive(file.substring(0, lastSlash)); insertDirectoryRecursive(file.substring(0, lastSlash));
} }
}; };
const insertFile = (relativePath, stat, shouldUnpack) => { var insertFile = function (relativePath, stat, shouldUnpack) {
insertDirectoryForFile(relativePath); insertDirectoryForFile(relativePath);
pendingInserts++; pendingInserts++;
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted); filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
@@ -59,13 +59,13 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
return; return;
} }
if (!file.stat.isFile()) { if (!file.stat.isFile()) {
throw new Error(`unknown item in stream!`); throw new Error("unknown item in stream!");
} }
const shouldUnpack = shouldUnpackFile(file); var shouldUnpack = shouldUnpackFile(file);
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack); insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
if (shouldUnpack) { if (shouldUnpack) {
// The file goes outside of xx.asar, in a folder xx.asar.unpacked // The file goes outside of xx.asar, in a folder xx.asar.unpacked
const relative = path.relative(folderPath, file.path); var relative = path.relative(folderPath, file.path);
this.queue(new VinylFile({ this.queue(new VinylFile({
cwd: folderPath, cwd: folderPath,
base: folderPath, base: folderPath,
@@ -79,33 +79,34 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
out.push(file.contents); out.push(file.contents);
} }
}, function () { }, function () {
let finish = () => { var _this = this;
var finish = function () {
{ {
const headerPickle = pickle.createEmpty(); var headerPickle = pickle.createEmpty();
headerPickle.writeString(JSON.stringify(filesystem.header)); headerPickle.writeString(JSON.stringify(filesystem.header));
const headerBuf = headerPickle.toBuffer(); var headerBuf = headerPickle.toBuffer();
const sizePickle = pickle.createEmpty(); var sizePickle = pickle.createEmpty();
sizePickle.writeUInt32(headerBuf.length); sizePickle.writeUInt32(headerBuf.length);
const sizeBuf = sizePickle.toBuffer(); var sizeBuf = sizePickle.toBuffer();
out.unshift(headerBuf); out.unshift(headerBuf);
out.unshift(sizeBuf); out.unshift(sizeBuf);
} }
const contents = Buffer.concat(out); var contents = Buffer.concat(out);
out.length = 0; out.length = 0;
this.queue(new VinylFile({ _this.queue(new VinylFile({
cwd: folderPath, cwd: folderPath,
base: folderPath, base: folderPath,
path: destFilename, path: destFilename,
contents: contents contents: contents
})); }));
this.queue(null); _this.queue(null);
}; };
// Call finish() only when all file inserts have finished... // Call finish() only when all file inserts have finished...
if (pendingInserts === 0) { if (pendingInserts === 0) {
finish(); finish();
} }
else { else {
onFileInserted = () => { onFileInserted = function () {
pendingInserts--; pendingInserts--;
if (pendingInserts === 0) { if (pendingInserts === 0) {
finish(); finish();

View File

@@ -7,8 +7,8 @@
import * as path from 'path'; import * as path from 'path';
import * as es from 'event-stream'; import * as es from 'event-stream';
const pickle = require('chromium-pickle-js'); import * as pickle from 'chromium-pickle-js';
const Filesystem = require('asar/lib/filesystem'); import * as Filesystem from 'asar/lib/filesystem';
import * as VinylFile from 'vinyl'; import * as VinylFile from 'vinyl';
import * as minimatch from 'minimatch'; import * as minimatch from 'minimatch';

View File

@@ -49,7 +49,7 @@ function syncMarketplaceExtension(extension) {
rimraf.sync(getExtensionPath(extension)); rimraf.sync(getExtensionPath(extension));
return ext.fromMarketplace(extension.name, extension.version, extension.metadata) return ext.fromMarketplace(extension.name, extension.version)
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`)) .pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
.pipe(vfs.dest('.build/builtInExtensions')) .pipe(vfs.dest('.build/builtInExtensions'))
.on('end', () => util.log(util.colors.blue('[marketplace]'), extension.name, util.colors.green('✔︎'))); .on('end', () => util.log(util.colors.blue('[marketplace]'), extension.name, util.colors.green('✔︎')));

View File

@@ -4,19 +4,19 @@
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs"); var fs = require("fs");
const path = require("path"); var path = require("path");
const vm = require("vm"); var vm = require("vm");
/** /**
* Bundle `entryPoints` given config `config`. * Bundle `entryPoints` given config `config`.
*/ */
function bundle(entryPoints, config, callback) { function bundle(entryPoints, config, callback) {
const entryPointsMap = {}; var entryPointsMap = {};
entryPoints.forEach((module) => { entryPoints.forEach(function (module) {
entryPointsMap[module.name] = module; entryPointsMap[module.name] = module;
}); });
const allMentionedModulesMap = {}; var allMentionedModulesMap = {};
entryPoints.forEach((module) => { entryPoints.forEach(function (module) {
allMentionedModulesMap[module.name] = true; allMentionedModulesMap[module.name] = true;
(module.include || []).forEach(function (includedModule) { (module.include || []).forEach(function (includedModule) {
allMentionedModulesMap[includedModule] = true; allMentionedModulesMap[includedModule] = true;
@@ -25,30 +25,26 @@ function bundle(entryPoints, config, callback) {
allMentionedModulesMap[excludedModule] = true; allMentionedModulesMap[excludedModule] = true;
}); });
}); });
const code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js')); var code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
const r = vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});'); var r = vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
const loaderModule = { exports: {} }; var loaderModule = { exports: {} };
r.call({}, require, loaderModule, loaderModule.exports); r.call({}, require, loaderModule, loaderModule.exports);
const loader = loaderModule.exports; var loader = loaderModule.exports;
config.isBuild = true; config.isBuild = true;
config.paths = config.paths || {}; config.paths = config.paths || {};
if (!config.paths['vs/nls']) { config.paths['vs/nls'] = 'out-build/vs/nls.build';
config.paths['vs/nls'] = 'out-build/vs/nls.build'; config.paths['vs/css'] = 'out-build/vs/css.build';
}
if (!config.paths['vs/css']) {
config.paths['vs/css'] = 'out-build/vs/css.build';
}
loader.config(config); loader.config(config);
loader(['require'], (localRequire) => { loader(['require'], function (localRequire) {
const resolvePath = (path) => { var resolvePath = function (path) {
const r = localRequire.toUrl(path); var r = localRequire.toUrl(path);
if (!/\.js/.test(r)) { if (!/\.js/.test(r)) {
return r + '.js'; return r + '.js';
} }
return r; return r;
}; };
for (const moduleId in entryPointsMap) { for (var moduleId in entryPointsMap) {
const entryPoint = entryPointsMap[moduleId]; var entryPoint = entryPointsMap[moduleId];
if (entryPoint.append) { if (entryPoint.append) {
entryPoint.append = entryPoint.append.map(resolvePath); entryPoint.append = entryPoint.append.map(resolvePath);
} }
@@ -57,59 +53,59 @@ function bundle(entryPoints, config, callback) {
} }
} }
}); });
loader(Object.keys(allMentionedModulesMap), () => { loader(Object.keys(allMentionedModulesMap), function () {
const modules = loader.getBuildInfo(); var modules = loader.getBuildInfo();
const partialResult = emitEntryPoints(modules, entryPointsMap); var partialResult = emitEntryPoints(modules, entryPointsMap);
const cssInlinedResources = loader('vs/css').getInlinedResources(); var cssInlinedResources = loader('vs/css').getInlinedResources();
callback(null, { callback(null, {
files: partialResult.files, files: partialResult.files,
cssInlinedResources: cssInlinedResources, cssInlinedResources: cssInlinedResources,
bundleData: partialResult.bundleData bundleData: partialResult.bundleData
}); });
}, (err) => callback(err, null)); }, function (err) { return callback(err, null); });
} }
exports.bundle = bundle; exports.bundle = bundle;
function emitEntryPoints(modules, entryPoints) { function emitEntryPoints(modules, entryPoints) {
const modulesMap = {}; var modulesMap = {};
modules.forEach((m) => { modules.forEach(function (m) {
modulesMap[m.id] = m; modulesMap[m.id] = m;
}); });
const modulesGraph = {}; var modulesGraph = {};
modules.forEach((m) => { modules.forEach(function (m) {
modulesGraph[m.id] = m.dependencies; modulesGraph[m.id] = m.dependencies;
}); });
const sortedModules = topologicalSort(modulesGraph); var sortedModules = topologicalSort(modulesGraph);
let result = []; var result = [];
const usedPlugins = {}; var usedPlugins = {};
const bundleData = { var bundleData = {
graph: modulesGraph, graph: modulesGraph,
bundles: {} bundles: {}
}; };
Object.keys(entryPoints).forEach((moduleToBundle) => { Object.keys(entryPoints).forEach(function (moduleToBundle) {
const info = entryPoints[moduleToBundle]; var info = entryPoints[moduleToBundle];
const rootNodes = [moduleToBundle].concat(info.include || []); var rootNodes = [moduleToBundle].concat(info.include || []);
const allDependencies = visit(rootNodes, modulesGraph); var allDependencies = visit(rootNodes, modulesGraph);
const excludes = ['require', 'exports', 'module'].concat(info.exclude || []); var excludes = ['require', 'exports', 'module'].concat(info.exclude || []);
excludes.forEach((excludeRoot) => { excludes.forEach(function (excludeRoot) {
const allExcludes = visit([excludeRoot], modulesGraph); var allExcludes = visit([excludeRoot], modulesGraph);
Object.keys(allExcludes).forEach((exclude) => { Object.keys(allExcludes).forEach(function (exclude) {
delete allDependencies[exclude]; delete allDependencies[exclude];
}); });
}); });
const includedModules = sortedModules.filter((module) => { var includedModules = sortedModules.filter(function (module) {
return allDependencies[module]; return allDependencies[module];
}); });
bundleData.bundles[moduleToBundle] = includedModules; bundleData.bundles[moduleToBundle] = includedModules;
const res = emitEntryPoint(modulesMap, modulesGraph, moduleToBundle, includedModules, info.prepend || [], info.append || [], info.dest); var res = emitEntryPoint(modulesMap, modulesGraph, moduleToBundle, includedModules, info.prepend, info.append, info.dest);
result = result.concat(res.files); result = result.concat(res.files);
for (const pluginName in res.usedPlugins) { for (var pluginName in res.usedPlugins) {
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName]; usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
} }
}); });
Object.keys(usedPlugins).forEach((pluginName) => { Object.keys(usedPlugins).forEach(function (pluginName) {
const plugin = usedPlugins[pluginName]; var plugin = usedPlugins[pluginName];
if (typeof plugin.finishBuild === 'function') { if (typeof plugin.finishBuild === 'function') {
const write = (filename, contents) => { var write = function (filename, contents) {
result.push({ result.push({
dest: filename, dest: filename,
sources: [{ sources: [{
@@ -128,16 +124,16 @@ function emitEntryPoints(modules, entryPoints) {
}; };
} }
function extractStrings(destFiles) { function extractStrings(destFiles) {
const parseDefineCall = (moduleMatch, depsMatch) => { var parseDefineCall = function (moduleMatch, depsMatch) {
const module = moduleMatch.replace(/^"|"$/g, ''); var module = moduleMatch.replace(/^"|"$/g, '');
let deps = depsMatch.split(','); var deps = depsMatch.split(',');
deps = deps.map((dep) => { deps = deps.map(function (dep) {
dep = dep.trim(); dep = dep.trim();
dep = dep.replace(/^"|"$/g, ''); dep = dep.replace(/^"|"$/g, '');
dep = dep.replace(/^'|'$/g, ''); dep = dep.replace(/^'|'$/g, '');
let prefix = null; var prefix = null;
let _path = null; var _path = null;
const pieces = dep.split('!'); var pieces = dep.split('!');
if (pieces.length > 1) { if (pieces.length > 1) {
prefix = pieces[0] + '!'; prefix = pieces[0] + '!';
_path = pieces[1]; _path = pieces[1];
@@ -147,7 +143,7 @@ function extractStrings(destFiles) {
_path = pieces[0]; _path = pieces[0];
} }
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) { if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
const res = path.join(path.dirname(module), _path).replace(/\\/g, '/'); var res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
return prefix + res; return prefix + res;
} }
return prefix + _path; return prefix + _path;
@@ -157,7 +153,7 @@ function extractStrings(destFiles) {
deps: deps deps: deps
}; };
}; };
destFiles.forEach((destFile) => { destFiles.forEach(function (destFile, index) {
if (!/\.js$/.test(destFile.dest)) { if (!/\.js$/.test(destFile.dest)) {
return; return;
} }
@@ -165,44 +161,44 @@ function extractStrings(destFiles) {
return; return;
} }
// Do one pass to record the usage counts for each module id // Do one pass to record the usage counts for each module id
const useCounts = {}; var useCounts = {};
destFile.sources.forEach((source) => { destFile.sources.forEach(function (source) {
const matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/); var matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
if (!matches) { if (!matches) {
return; return;
} }
const defineCall = parseDefineCall(matches[1], matches[2]); var defineCall = parseDefineCall(matches[1], matches[2]);
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1; useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
defineCall.deps.forEach((dep) => { defineCall.deps.forEach(function (dep) {
useCounts[dep] = (useCounts[dep] || 0) + 1; useCounts[dep] = (useCounts[dep] || 0) + 1;
}); });
}); });
const sortedByUseModules = Object.keys(useCounts); var sortedByUseModules = Object.keys(useCounts);
sortedByUseModules.sort((a, b) => { sortedByUseModules.sort(function (a, b) {
return useCounts[b] - useCounts[a]; return useCounts[b] - useCounts[a];
}); });
const replacementMap = {}; var replacementMap = {};
sortedByUseModules.forEach((module, index) => { sortedByUseModules.forEach(function (module, index) {
replacementMap[module] = index; replacementMap[module] = index;
}); });
destFile.sources.forEach((source) => { destFile.sources.forEach(function (source) {
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, (_, moduleMatch, depsMatch) => { source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, function (_, moduleMatch, depsMatch) {
const defineCall = parseDefineCall(moduleMatch, depsMatch); var defineCall = parseDefineCall(moduleMatch, depsMatch);
return `define(__m[${replacementMap[defineCall.module]}/*${defineCall.module}*/], __M([${defineCall.deps.map(dep => replacementMap[dep] + '/*' + dep + '*/').join(',')}])`; return "define(__m[" + replacementMap[defineCall.module] + "/*" + defineCall.module + "*/], __M([" + defineCall.deps.map(function (dep) { return replacementMap[dep] + '/*' + dep + '*/'; }).join(',') + "])";
}); });
}); });
destFile.sources.unshift({ destFile.sources.unshift({
path: null, path: null,
contents: [ contents: [
'(function() {', '(function() {',
`var __m = ${JSON.stringify(sortedByUseModules)};`, "var __m = " + JSON.stringify(sortedByUseModules) + ";",
`var __M = function(deps) {`, "var __M = function(deps) {",
` var result = [];`, " var result = [];",
` for (var i = 0, len = deps.length; i < len; i++) {`, " for (var i = 0, len = deps.length; i < len; i++) {",
` result[i] = __m[deps[i]];`, " result[i] = __m[deps[i]];",
` }`, " }",
` return result;`, " return result;",
`};` "};"
].join('\n') ].join('\n')
}); });
destFile.sources.push({ destFile.sources.push({
@@ -214,7 +210,7 @@ function extractStrings(destFiles) {
} }
function removeDuplicateTSBoilerplate(destFiles) { function removeDuplicateTSBoilerplate(destFiles) {
// Taken from typescript compiler => emitFiles // Taken from typescript compiler => emitFiles
const BOILERPLATE = [ var BOILERPLATE = [
{ start: /^var __extends/, end: /^}\)\(\);$/ }, { start: /^var __extends/, end: /^}\)\(\);$/ },
{ start: /^var __assign/, end: /^};$/ }, { start: /^var __assign/, end: /^};$/ },
{ start: /^var __decorate/, end: /^};$/ }, { start: /^var __decorate/, end: /^};$/ },
@@ -223,14 +219,14 @@ function removeDuplicateTSBoilerplate(destFiles) {
{ start: /^var __awaiter/, end: /^};$/ }, { start: /^var __awaiter/, end: /^};$/ },
{ start: /^var __generator/, end: /^};$/ }, { start: /^var __generator/, end: /^};$/ },
]; ];
destFiles.forEach((destFile) => { destFiles.forEach(function (destFile) {
const SEEN_BOILERPLATE = []; var SEEN_BOILERPLATE = [];
destFile.sources.forEach((source) => { destFile.sources.forEach(function (source) {
const lines = source.contents.split(/\r\n|\n|\r/); var lines = source.contents.split(/\r\n|\n|\r/);
const newLines = []; var newLines = [];
let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE; var IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE;
for (let i = 0; i < lines.length; i++) { for (var i = 0; i < lines.length; i++) {
const line = lines[i]; var line = lines[i];
if (IS_REMOVING_BOILERPLATE) { if (IS_REMOVING_BOILERPLATE) {
newLines.push(''); newLines.push('');
if (END_BOILERPLATE.test(line)) { if (END_BOILERPLATE.test(line)) {
@@ -238,8 +234,8 @@ function removeDuplicateTSBoilerplate(destFiles) {
} }
} }
else { else {
for (let j = 0; j < BOILERPLATE.length; j++) { for (var j = 0; j < BOILERPLATE.length; j++) {
const boilerplate = BOILERPLATE[j]; var boilerplate = BOILERPLATE[j];
if (boilerplate.start.test(line)) { if (boilerplate.start.test(line)) {
if (SEEN_BOILERPLATE[j]) { if (SEEN_BOILERPLATE[j]) {
IS_REMOVING_BOILERPLATE = true; IS_REMOVING_BOILERPLATE = true;
@@ -267,45 +263,45 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
if (!dest) { if (!dest) {
dest = entryPoint + '.js'; dest = entryPoint + '.js';
} }
const mainResult = { var mainResult = {
sources: [], sources: [],
dest: dest dest: dest
}, results = [mainResult]; }, results = [mainResult];
const usedPlugins = {}; var usedPlugins = {};
const getLoaderPlugin = (pluginName) => { var getLoaderPlugin = function (pluginName) {
if (!usedPlugins[pluginName]) { if (!usedPlugins[pluginName]) {
usedPlugins[pluginName] = modulesMap[pluginName].exports; usedPlugins[pluginName] = modulesMap[pluginName].exports;
} }
return usedPlugins[pluginName]; return usedPlugins[pluginName];
}; };
includedModules.forEach((c) => { includedModules.forEach(function (c) {
const bangIndex = c.indexOf('!'); var bangIndex = c.indexOf('!');
if (bangIndex >= 0) { if (bangIndex >= 0) {
const pluginName = c.substr(0, bangIndex); var pluginName = c.substr(0, bangIndex);
const plugin = getLoaderPlugin(pluginName); var plugin = getLoaderPlugin(pluginName);
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1))); mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
return; return;
} }
const module = modulesMap[c]; var module = modulesMap[c];
if (module.path === 'empty:') { if (module.path === 'empty:') {
return; return;
} }
const contents = readFileAndRemoveBOM(module.path); var contents = readFileAndRemoveBOM(module.path);
if (module.shim) { if (module.shim) {
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents)); mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
} }
else { else {
mainResult.sources.push(emitNamedModule(c, module.defineLocation, module.path, contents)); mainResult.sources.push(emitNamedModule(c, deps[c], module.defineLocation, module.path, contents));
} }
}); });
Object.keys(usedPlugins).forEach((pluginName) => { Object.keys(usedPlugins).forEach(function (pluginName) {
const plugin = usedPlugins[pluginName]; var plugin = usedPlugins[pluginName];
if (typeof plugin.writeFile === 'function') { if (typeof plugin.writeFile === 'function') {
const req = (() => { var req = (function () {
throw new Error('no-no!'); throw new Error('no-no!');
}); });
req.toUrl = something => something; req.toUrl = function (something) { return something; };
const write = (filename, contents) => { var write = function (filename, contents) {
results.push({ results.push({
dest: filename, dest: filename,
sources: [{ sources: [{
@@ -317,15 +313,15 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
plugin.writeFile(pluginName, entryPoint, req, write, {}); plugin.writeFile(pluginName, entryPoint, req, write, {});
} }
}); });
const toIFile = (path) => { var toIFile = function (path) {
const contents = readFileAndRemoveBOM(path); var contents = readFileAndRemoveBOM(path);
return { return {
path: path, path: path,
contents: contents contents: contents
}; };
}; };
const toPrepend = (prepend || []).map(toIFile); var toPrepend = (prepend || []).map(toIFile);
const toAppend = (append || []).map(toIFile); var toAppend = (append || []).map(toIFile);
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend); mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
return { return {
files: results, files: results,
@@ -333,8 +329,8 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
}; };
} }
function readFileAndRemoveBOM(path) { function readFileAndRemoveBOM(path) {
const BOM_CHAR_CODE = 65279; var BOM_CHAR_CODE = 65279;
let contents = fs.readFileSync(path, 'utf8'); var contents = fs.readFileSync(path, 'utf8');
// Remove BOM // Remove BOM
if (contents.charCodeAt(0) === BOM_CHAR_CODE) { if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
contents = contents.substring(1); contents = contents.substring(1);
@@ -342,15 +338,15 @@ function readFileAndRemoveBOM(path) {
return contents; return contents;
} }
function emitPlugin(entryPoint, plugin, pluginName, moduleName) { function emitPlugin(entryPoint, plugin, pluginName, moduleName) {
let result = ''; var result = '';
if (typeof plugin.write === 'function') { if (typeof plugin.write === 'function') {
const write = ((what) => { var write = (function (what) {
result += what; result += what;
}); });
write.getEntryPoint = () => { write.getEntryPoint = function () {
return entryPoint; return entryPoint;
}; };
write.asModule = (moduleId, code) => { write.asModule = function (moduleId, code) {
code = code.replace(/^define\(/, 'define("' + moduleId + '",'); code = code.replace(/^define\(/, 'define("' + moduleId + '",');
result += code; result += code;
}; };
@@ -361,20 +357,20 @@ function emitPlugin(entryPoint, plugin, pluginName, moduleName) {
contents: result contents: result
}; };
} }
function emitNamedModule(moduleId, defineCallPosition, path, contents) { function emitNamedModule(moduleId, myDeps, defineCallPosition, path, contents) {
// `defineCallPosition` is the position in code: |define() // `defineCallPosition` is the position in code: |define()
const defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col); var defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
// `parensOffset` is the position in code: define|() // `parensOffset` is the position in code: define|()
const parensOffset = contents.indexOf('(', defineCallOffset); var parensOffset = contents.indexOf('(', defineCallOffset);
const insertStr = '"' + moduleId + '", '; var insertStr = '"' + moduleId + '", ';
return { return {
path: path, path: path,
contents: contents.substr(0, parensOffset + 1) + insertStr + contents.substr(parensOffset + 1) contents: contents.substr(0, parensOffset + 1) + insertStr + contents.substr(parensOffset + 1)
}; };
} }
function emitShimmedModule(moduleId, myDeps, factory, path, contents) { function emitShimmedModule(moduleId, myDeps, factory, path, contents) {
const strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : ''); var strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
const strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');'; var strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
return { return {
path: path, path: path,
contents: contents + '\n;\n' + strDefine contents: contents + '\n;\n' + strDefine
@@ -387,8 +383,7 @@ function positionToOffset(str, desiredLine, desiredCol) {
if (desiredLine === 1) { if (desiredLine === 1) {
return desiredCol - 1; return desiredCol - 1;
} }
let line = 1; var line = 1, lastNewLineOffset = -1;
let lastNewLineOffset = -1;
do { do {
if (desiredLine === line) { if (desiredLine === line) {
return lastNewLineOffset + 1 + desiredCol - 1; return lastNewLineOffset + 1 + desiredCol - 1;
@@ -402,15 +397,14 @@ function positionToOffset(str, desiredLine, desiredCol) {
* Return a set of reachable nodes in `graph` starting from `rootNodes` * Return a set of reachable nodes in `graph` starting from `rootNodes`
*/ */
function visit(rootNodes, graph) { function visit(rootNodes, graph) {
const result = {}; var result = {}, queue = rootNodes;
const queue = rootNodes; rootNodes.forEach(function (node) {
rootNodes.forEach((node) => {
result[node] = true; result[node] = true;
}); });
while (queue.length > 0) { while (queue.length > 0) {
const el = queue.shift(); var el = queue.shift();
const myEdges = graph[el] || []; var myEdges = graph[el] || [];
myEdges.forEach((toNode) => { myEdges.forEach(function (toNode) {
if (!result[toNode]) { if (!result[toNode]) {
result[toNode] = true; result[toNode] = true;
queue.push(toNode); queue.push(toNode);
@@ -423,11 +417,11 @@ function visit(rootNodes, graph) {
* Perform a topological sort on `graph` * Perform a topological sort on `graph`
*/ */
function topologicalSort(graph) { function topologicalSort(graph) {
const allNodes = {}, outgoingEdgeCount = {}, inverseEdges = {}; var allNodes = {}, outgoingEdgeCount = {}, inverseEdges = {};
Object.keys(graph).forEach((fromNode) => { Object.keys(graph).forEach(function (fromNode) {
allNodes[fromNode] = true; allNodes[fromNode] = true;
outgoingEdgeCount[fromNode] = graph[fromNode].length; outgoingEdgeCount[fromNode] = graph[fromNode].length;
graph[fromNode].forEach((toNode) => { graph[fromNode].forEach(function (toNode) {
allNodes[toNode] = true; allNodes[toNode] = true;
outgoingEdgeCount[toNode] = outgoingEdgeCount[toNode] || 0; outgoingEdgeCount[toNode] = outgoingEdgeCount[toNode] || 0;
inverseEdges[toNode] = inverseEdges[toNode] || []; inverseEdges[toNode] = inverseEdges[toNode] || [];
@@ -435,8 +429,8 @@ function topologicalSort(graph) {
}); });
}); });
// https://en.wikipedia.org/wiki/Topological_sorting // https://en.wikipedia.org/wiki/Topological_sorting
const S = [], L = []; var S = [], L = [];
Object.keys(allNodes).forEach((node) => { Object.keys(allNodes).forEach(function (node) {
if (outgoingEdgeCount[node] === 0) { if (outgoingEdgeCount[node] === 0) {
delete outgoingEdgeCount[node]; delete outgoingEdgeCount[node];
S.push(node); S.push(node);
@@ -445,10 +439,10 @@ function topologicalSort(graph) {
while (S.length > 0) { while (S.length > 0) {
// Ensure the exact same order all the time with the same inputs // Ensure the exact same order all the time with the same inputs
S.sort(); S.sort();
const n = S.shift(); var n = S.shift();
L.push(n); L.push(n);
const myInverseEdges = inverseEdges[n] || []; var myInverseEdges = inverseEdges[n] || [];
myInverseEdges.forEach((m) => { myInverseEdges.forEach(function (m) {
outgoingEdgeCount[m]--; outgoingEdgeCount[m]--;
if (outgoingEdgeCount[m] === 0) { if (outgoingEdgeCount[m] === 0) {
delete outgoingEdgeCount[m]; delete outgoingEdgeCount[m];

View File

@@ -3,9 +3,9 @@
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
import * as fs from 'fs'; import fs = require('fs');
import * as path from 'path'; import path = require('path');
import * as vm from 'vm'; import vm = require('vm');
interface IPosition { interface IPosition {
line: number; line: number;
@@ -46,7 +46,7 @@ export interface IEntryPoint {
name: string; name: string;
include?: string[]; include?: string[];
exclude?: string[]; exclude?: string[];
prepend?: string[]; prepend: string[];
append?: string[]; append?: string[];
dest?: string; dest?: string;
} }
@@ -64,7 +64,7 @@ interface INodeSet {
} }
export interface IFile { export interface IFile {
path: string | null; path: string;
contents: string; contents: string;
} }
@@ -97,13 +97,13 @@ export interface ILoaderConfig {
/** /**
* Bundle `entryPoints` given config `config`. * Bundle `entryPoints` given config `config`.
*/ */
export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callback: (err: any, result: IBundleResult | null) => void): void { export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callback: (err: any, result: IBundleResult) => void): void {
const entryPointsMap: IEntryPointMap = {}; let entryPointsMap: IEntryPointMap = {};
entryPoints.forEach((module: IEntryPoint) => { entryPoints.forEach((module: IEntryPoint) => {
entryPointsMap[module.name] = module; entryPointsMap[module.name] = module;
}); });
const allMentionedModulesMap: { [modules: string]: boolean; } = {}; let allMentionedModulesMap: { [modules: string]: boolean; } = {};
entryPoints.forEach((module: IEntryPoint) => { entryPoints.forEach((module: IEntryPoint) => {
allMentionedModulesMap[module.name] = true; allMentionedModulesMap[module.name] = true;
(module.include || []).forEach(function (includedModule) { (module.include || []).forEach(function (includedModule) {
@@ -115,32 +115,28 @@ export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callba
}); });
const code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js')); var code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
const r: Function = <any>vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});'); var r: Function = <any>vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
const loaderModule = { exports: {} }; var loaderModule = { exports: {} };
r.call({}, require, loaderModule, loaderModule.exports); r.call({}, require, loaderModule, loaderModule.exports);
const loader: any = loaderModule.exports; var loader: any = loaderModule.exports;
config.isBuild = true; config.isBuild = true;
config.paths = config.paths || {}; config.paths = config.paths || {};
if (!config.paths['vs/nls']) { config.paths['vs/nls'] = 'out-build/vs/nls.build';
config.paths['vs/nls'] = 'out-build/vs/nls.build'; config.paths['vs/css'] = 'out-build/vs/css.build';
}
if (!config.paths['vs/css']) {
config.paths['vs/css'] = 'out-build/vs/css.build';
}
loader.config(config); loader.config(config);
loader(['require'], (localRequire: any) => { loader(['require'], (localRequire) => {
const resolvePath = (path: string) => { let resolvePath = (path: string) => {
const r = localRequire.toUrl(path); let r = localRequire.toUrl(path);
if (!/\.js/.test(r)) { if (!/\.js/.test(r)) {
return r + '.js'; return r + '.js';
} }
return r; return r;
}; };
for (const moduleId in entryPointsMap) { for (let moduleId in entryPointsMap) {
const entryPoint = entryPointsMap[moduleId]; let entryPoint = entryPointsMap[moduleId];
if (entryPoint.append) { if (entryPoint.append) {
entryPoint.append = entryPoint.append.map(resolvePath); entryPoint.append = entryPoint.append.map(resolvePath);
} }
@@ -151,76 +147,76 @@ export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callba
}); });
loader(Object.keys(allMentionedModulesMap), () => { loader(Object.keys(allMentionedModulesMap), () => {
const modules = <IBuildModuleInfo[]>loader.getBuildInfo(); let modules = <IBuildModuleInfo[]>loader.getBuildInfo();
const partialResult = emitEntryPoints(modules, entryPointsMap); let partialResult = emitEntryPoints(modules, entryPointsMap);
const cssInlinedResources = loader('vs/css').getInlinedResources(); let cssInlinedResources = loader('vs/css').getInlinedResources();
callback(null, { callback(null, {
files: partialResult.files, files: partialResult.files,
cssInlinedResources: cssInlinedResources, cssInlinedResources: cssInlinedResources,
bundleData: partialResult.bundleData bundleData: partialResult.bundleData
}); });
}, (err: any) => callback(err, null)); }, (err) => callback(err, null));
} }
function emitEntryPoints(modules: IBuildModuleInfo[], entryPoints: IEntryPointMap): IPartialBundleResult { function emitEntryPoints(modules: IBuildModuleInfo[], entryPoints: IEntryPointMap): IPartialBundleResult {
const modulesMap: IBuildModuleInfoMap = {}; let modulesMap: IBuildModuleInfoMap = {};
modules.forEach((m: IBuildModuleInfo) => { modules.forEach((m: IBuildModuleInfo) => {
modulesMap[m.id] = m; modulesMap[m.id] = m;
}); });
const modulesGraph: IGraph = {}; let modulesGraph: IGraph = {};
modules.forEach((m: IBuildModuleInfo) => { modules.forEach((m: IBuildModuleInfo) => {
modulesGraph[m.id] = m.dependencies; modulesGraph[m.id] = m.dependencies;
}); });
const sortedModules = topologicalSort(modulesGraph); let sortedModules = topologicalSort(modulesGraph);
let result: IConcatFile[] = []; let result: IConcatFile[] = [];
const usedPlugins: IPluginMap = {}; let usedPlugins: IPluginMap = {};
const bundleData: IBundleData = { let bundleData: IBundleData = {
graph: modulesGraph, graph: modulesGraph,
bundles: {} bundles: {}
}; };
Object.keys(entryPoints).forEach((moduleToBundle: string) => { Object.keys(entryPoints).forEach((moduleToBundle: string) => {
const info = entryPoints[moduleToBundle]; let info = entryPoints[moduleToBundle];
const rootNodes = [moduleToBundle].concat(info.include || []); let rootNodes = [moduleToBundle].concat(info.include || []);
const allDependencies = visit(rootNodes, modulesGraph); let allDependencies = visit(rootNodes, modulesGraph);
const excludes: string[] = ['require', 'exports', 'module'].concat(info.exclude || []); let excludes: string[] = ['require', 'exports', 'module'].concat(info.exclude || []);
excludes.forEach((excludeRoot: string) => { excludes.forEach((excludeRoot: string) => {
const allExcludes = visit([excludeRoot], modulesGraph); let allExcludes = visit([excludeRoot], modulesGraph);
Object.keys(allExcludes).forEach((exclude: string) => { Object.keys(allExcludes).forEach((exclude: string) => {
delete allDependencies[exclude]; delete allDependencies[exclude];
}); });
}); });
const includedModules = sortedModules.filter((module: string) => { let includedModules = sortedModules.filter((module: string) => {
return allDependencies[module]; return allDependencies[module];
}); });
bundleData.bundles[moduleToBundle] = includedModules; bundleData.bundles[moduleToBundle] = includedModules;
const res = emitEntryPoint( let res = emitEntryPoint(
modulesMap, modulesMap,
modulesGraph, modulesGraph,
moduleToBundle, moduleToBundle,
includedModules, includedModules,
info.prepend || [], info.prepend,
info.append || [], info.append,
info.dest info.dest
); );
result = result.concat(res.files); result = result.concat(res.files);
for (const pluginName in res.usedPlugins) { for (let pluginName in res.usedPlugins) {
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName]; usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
} }
}); });
Object.keys(usedPlugins).forEach((pluginName: string) => { Object.keys(usedPlugins).forEach((pluginName: string) => {
const plugin = usedPlugins[pluginName]; let plugin = usedPlugins[pluginName];
if (typeof plugin.finishBuild === 'function') { if (typeof plugin.finishBuild === 'function') {
const write = (filename: string, contents: string) => { let write = (filename: string, contents: string) => {
result.push({ result.push({
dest: filename, dest: filename,
sources: [{ sources: [{
@@ -241,16 +237,16 @@ function emitEntryPoints(modules: IBuildModuleInfo[], entryPoints: IEntryPointMa
} }
function extractStrings(destFiles: IConcatFile[]): IConcatFile[] { function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
const parseDefineCall = (moduleMatch: string, depsMatch: string) => { let parseDefineCall = (moduleMatch: string, depsMatch: string) => {
const module = moduleMatch.replace(/^"|"$/g, ''); let module = moduleMatch.replace(/^"|"$/g, '');
let deps = depsMatch.split(','); let deps = depsMatch.split(',');
deps = deps.map((dep) => { deps = deps.map((dep) => {
dep = dep.trim(); dep = dep.trim();
dep = dep.replace(/^"|"$/g, ''); dep = dep.replace(/^"|"$/g, '');
dep = dep.replace(/^'|'$/g, ''); dep = dep.replace(/^'|'$/g, '');
let prefix: string | null = null; let prefix: string = null;
let _path: string | null = null; let _path: string = null;
const pieces = dep.split('!'); let pieces = dep.split('!');
if (pieces.length > 1) { if (pieces.length > 1) {
prefix = pieces[0] + '!'; prefix = pieces[0] + '!';
_path = pieces[1]; _path = pieces[1];
@@ -260,7 +256,7 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
} }
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) { if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
const res = path.join(path.dirname(module), _path).replace(/\\/g, '/'); let res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
return prefix + res; return prefix + res;
} }
return prefix + _path; return prefix + _path;
@@ -271,7 +267,7 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
}; };
}; };
destFiles.forEach((destFile) => { destFiles.forEach((destFile, index) => {
if (!/\.js$/.test(destFile.dest)) { if (!/\.js$/.test(destFile.dest)) {
return; return;
} }
@@ -280,33 +276,33 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
} }
// Do one pass to record the usage counts for each module id // Do one pass to record the usage counts for each module id
const useCounts: { [moduleId: string]: number; } = {}; let useCounts: { [moduleId: string]: number; } = {};
destFile.sources.forEach((source) => { destFile.sources.forEach((source) => {
const matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/); let matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
if (!matches) { if (!matches) {
return; return;
} }
const defineCall = parseDefineCall(matches[1], matches[2]); let defineCall = parseDefineCall(matches[1], matches[2]);
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1; useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
defineCall.deps.forEach((dep) => { defineCall.deps.forEach((dep) => {
useCounts[dep] = (useCounts[dep] || 0) + 1; useCounts[dep] = (useCounts[dep] || 0) + 1;
}); });
}); });
const sortedByUseModules = Object.keys(useCounts); let sortedByUseModules = Object.keys(useCounts);
sortedByUseModules.sort((a, b) => { sortedByUseModules.sort((a, b) => {
return useCounts[b] - useCounts[a]; return useCounts[b] - useCounts[a];
}); });
const replacementMap: { [moduleId: string]: number; } = {}; let replacementMap: { [moduleId: string]: number; } = {};
sortedByUseModules.forEach((module, index) => { sortedByUseModules.forEach((module, index) => {
replacementMap[module] = index; replacementMap[module] = index;
}); });
destFile.sources.forEach((source) => { destFile.sources.forEach((source) => {
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, (_, moduleMatch, depsMatch) => { source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, (_, moduleMatch, depsMatch) => {
const defineCall = parseDefineCall(moduleMatch, depsMatch); let defineCall = parseDefineCall(moduleMatch, depsMatch);
return `define(__m[${replacementMap[defineCall.module]}/*${defineCall.module}*/], __M([${defineCall.deps.map(dep => replacementMap[dep] + '/*' + dep + '*/').join(',')}])`; return `define(__m[${replacementMap[defineCall.module]}/*${defineCall.module}*/], __M([${defineCall.deps.map(dep => replacementMap[dep] + '/*' + dep + '*/').join(',')}])`;
}); });
}); });
@@ -336,7 +332,7 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] { function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
// Taken from typescript compiler => emitFiles // Taken from typescript compiler => emitFiles
const BOILERPLATE = [ let BOILERPLATE = [
{ start: /^var __extends/, end: /^}\)\(\);$/ }, { start: /^var __extends/, end: /^}\)\(\);$/ },
{ start: /^var __assign/, end: /^};$/ }, { start: /^var __assign/, end: /^};$/ },
{ start: /^var __decorate/, end: /^};$/ }, { start: /^var __decorate/, end: /^};$/ },
@@ -347,22 +343,22 @@ function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
]; ];
destFiles.forEach((destFile) => { destFiles.forEach((destFile) => {
const SEEN_BOILERPLATE: boolean[] = []; let SEEN_BOILERPLATE = [];
destFile.sources.forEach((source) => { destFile.sources.forEach((source) => {
const lines = source.contents.split(/\r\n|\n|\r/); let lines = source.contents.split(/\r\n|\n|\r/);
const newLines: string[] = []; let newLines: string[] = [];
let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE: RegExp; let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE: RegExp;
for (let i = 0; i < lines.length; i++) { for (let i = 0; i < lines.length; i++) {
const line = lines[i]; let line = lines[i];
if (IS_REMOVING_BOILERPLATE) { if (IS_REMOVING_BOILERPLATE) {
newLines.push(''); newLines.push('');
if (END_BOILERPLATE!.test(line)) { if (END_BOILERPLATE.test(line)) {
IS_REMOVING_BOILERPLATE = false; IS_REMOVING_BOILERPLATE = false;
} }
} else { } else {
for (let j = 0; j < BOILERPLATE.length; j++) { for (let j = 0; j < BOILERPLATE.length; j++) {
const boilerplate = BOILERPLATE[j]; let boilerplate = BOILERPLATE[j];
if (boilerplate.start.test(line)) { if (boilerplate.start.test(line)) {
if (SEEN_BOILERPLATE[j]) { if (SEEN_BOILERPLATE[j]) {
IS_REMOVING_BOILERPLATE = true; IS_REMOVING_BOILERPLATE = true;
@@ -402,19 +398,19 @@ function emitEntryPoint(
includedModules: string[], includedModules: string[],
prepend: string[], prepend: string[],
append: string[], append: string[],
dest: string | undefined dest: string
): IEmitEntryPointResult { ): IEmitEntryPointResult {
if (!dest) { if (!dest) {
dest = entryPoint + '.js'; dest = entryPoint + '.js';
} }
const mainResult: IConcatFile = { let mainResult: IConcatFile = {
sources: [], sources: [],
dest: dest dest: dest
}, },
results: IConcatFile[] = [mainResult]; results: IConcatFile[] = [mainResult];
const usedPlugins: IPluginMap = {}; let usedPlugins: IPluginMap = {};
const getLoaderPlugin = (pluginName: string): ILoaderPlugin => { let getLoaderPlugin = (pluginName: string): ILoaderPlugin => {
if (!usedPlugins[pluginName]) { if (!usedPlugins[pluginName]) {
usedPlugins[pluginName] = modulesMap[pluginName].exports; usedPlugins[pluginName] = modulesMap[pluginName].exports;
} }
@@ -422,39 +418,39 @@ function emitEntryPoint(
}; };
includedModules.forEach((c: string) => { includedModules.forEach((c: string) => {
const bangIndex = c.indexOf('!'); let bangIndex = c.indexOf('!');
if (bangIndex >= 0) { if (bangIndex >= 0) {
const pluginName = c.substr(0, bangIndex); let pluginName = c.substr(0, bangIndex);
const plugin = getLoaderPlugin(pluginName); let plugin = getLoaderPlugin(pluginName);
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1))); mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
return; return;
} }
const module = modulesMap[c]; let module = modulesMap[c];
if (module.path === 'empty:') { if (module.path === 'empty:') {
return; return;
} }
const contents = readFileAndRemoveBOM(module.path); let contents = readFileAndRemoveBOM(module.path);
if (module.shim) { if (module.shim) {
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents)); mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
} else { } else {
mainResult.sources.push(emitNamedModule(c, module.defineLocation, module.path, contents)); mainResult.sources.push(emitNamedModule(c, deps[c], module.defineLocation, module.path, contents));
} }
}); });
Object.keys(usedPlugins).forEach((pluginName: string) => { Object.keys(usedPlugins).forEach((pluginName: string) => {
const plugin = usedPlugins[pluginName]; let plugin = usedPlugins[pluginName];
if (typeof plugin.writeFile === 'function') { if (typeof plugin.writeFile === 'function') {
const req: ILoaderPluginReqFunc = <any>(() => { let req: ILoaderPluginReqFunc = <any>(() => {
throw new Error('no-no!'); throw new Error('no-no!');
}); });
req.toUrl = something => something; req.toUrl = something => something;
const write = (filename: string, contents: string) => { let write = (filename: string, contents: string) => {
results.push({ results.push({
dest: filename, dest: filename,
sources: [{ sources: [{
@@ -467,16 +463,16 @@ function emitEntryPoint(
} }
}); });
const toIFile = (path: string): IFile => { let toIFile = (path): IFile => {
const contents = readFileAndRemoveBOM(path); let contents = readFileAndRemoveBOM(path);
return { return {
path: path, path: path,
contents: contents contents: contents
}; };
}; };
const toPrepend = (prepend || []).map(toIFile); let toPrepend = (prepend || []).map(toIFile);
const toAppend = (append || []).map(toIFile); let toAppend = (append || []).map(toIFile);
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend); mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
@@ -487,8 +483,8 @@ function emitEntryPoint(
} }
function readFileAndRemoveBOM(path: string): string { function readFileAndRemoveBOM(path: string): string {
const BOM_CHAR_CODE = 65279; var BOM_CHAR_CODE = 65279;
let contents = fs.readFileSync(path, 'utf8'); var contents = fs.readFileSync(path, 'utf8');
// Remove BOM // Remove BOM
if (contents.charCodeAt(0) === BOM_CHAR_CODE) { if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
contents = contents.substring(1); contents = contents.substring(1);
@@ -499,7 +495,7 @@ function readFileAndRemoveBOM(path: string): string {
function emitPlugin(entryPoint: string, plugin: ILoaderPlugin, pluginName: string, moduleName: string): IFile { function emitPlugin(entryPoint: string, plugin: ILoaderPlugin, pluginName: string, moduleName: string): IFile {
let result = ''; let result = '';
if (typeof plugin.write === 'function') { if (typeof plugin.write === 'function') {
const write: ILoaderPluginWriteFunc = <any>((what: string) => { let write: ILoaderPluginWriteFunc = <any>((what) => {
result += what; result += what;
}); });
write.getEntryPoint = () => { write.getEntryPoint = () => {
@@ -517,15 +513,15 @@ function emitPlugin(entryPoint: string, plugin: ILoaderPlugin, pluginName: strin
}; };
} }
function emitNamedModule(moduleId: string, defineCallPosition: IPosition, path: string, contents: string): IFile { function emitNamedModule(moduleId: string, myDeps: string[], defineCallPosition: IPosition, path: string, contents: string): IFile {
// `defineCallPosition` is the position in code: |define() // `defineCallPosition` is the position in code: |define()
const defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col); let defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
// `parensOffset` is the position in code: define|() // `parensOffset` is the position in code: define|()
const parensOffset = contents.indexOf('(', defineCallOffset); let parensOffset = contents.indexOf('(', defineCallOffset);
const insertStr = '"' + moduleId + '", '; let insertStr = '"' + moduleId + '", ';
return { return {
path: path, path: path,
@@ -534,8 +530,8 @@ function emitNamedModule(moduleId: string, defineCallPosition: IPosition, path:
} }
function emitShimmedModule(moduleId: string, myDeps: string[], factory: string, path: string, contents: string): IFile { function emitShimmedModule(moduleId: string, myDeps: string[], factory: string, path: string, contents: string): IFile {
const strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : ''); let strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
const strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');'; let strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
return { return {
path: path, path: path,
contents: contents + '\n;\n' + strDefine contents: contents + '\n;\n' + strDefine
@@ -550,8 +546,8 @@ function positionToOffset(str: string, desiredLine: number, desiredCol: number):
return desiredCol - 1; return desiredCol - 1;
} }
let line = 1; let line = 1,
let lastNewLineOffset = -1; lastNewLineOffset = -1;
do { do {
if (desiredLine === line) { if (desiredLine === line) {
@@ -569,16 +565,16 @@ function positionToOffset(str: string, desiredLine: number, desiredCol: number):
* Return a set of reachable nodes in `graph` starting from `rootNodes` * Return a set of reachable nodes in `graph` starting from `rootNodes`
*/ */
function visit(rootNodes: string[], graph: IGraph): INodeSet { function visit(rootNodes: string[], graph: IGraph): INodeSet {
const result: INodeSet = {}; let result: INodeSet = {},
const queue = rootNodes; queue = rootNodes;
rootNodes.forEach((node) => { rootNodes.forEach((node) => {
result[node] = true; result[node] = true;
}); });
while (queue.length > 0) { while (queue.length > 0) {
const el = queue.shift(); let el = queue.shift();
const myEdges = graph[el!] || []; let myEdges = graph[el] || [];
myEdges.forEach((toNode) => { myEdges.forEach((toNode) => {
if (!result[toNode]) { if (!result[toNode]) {
result[toNode] = true; result[toNode] = true;
@@ -595,7 +591,7 @@ function visit(rootNodes: string[], graph: IGraph): INodeSet {
*/ */
function topologicalSort(graph: IGraph): string[] { function topologicalSort(graph: IGraph): string[] {
const allNodes: INodeSet = {}, let allNodes: INodeSet = {},
outgoingEdgeCount: { [node: string]: number; } = {}, outgoingEdgeCount: { [node: string]: number; } = {},
inverseEdges: IGraph = {}; inverseEdges: IGraph = {};
@@ -613,7 +609,7 @@ function topologicalSort(graph: IGraph): string[] {
}); });
// https://en.wikipedia.org/wiki/Topological_sorting // https://en.wikipedia.org/wiki/Topological_sorting
const S: string[] = [], let S: string[] = [],
L: string[] = []; L: string[] = [];
Object.keys(allNodes).forEach((node: string) => { Object.keys(allNodes).forEach((node: string) => {
@@ -627,10 +623,10 @@ function topologicalSort(graph: IGraph): string[] {
// Ensure the exact same order all the time with the same inputs // Ensure the exact same order all the time with the same inputs
S.sort(); S.sort();
const n: string = S.shift()!; let n: string = S.shift();
L.push(n); L.push(n);
const myInverseEdges = inverseEdges[n] || []; let myInverseEdges = inverseEdges[n] || [];
myInverseEdges.forEach((m: string) => { myInverseEdges.forEach((m: string) => {
outgoingEdgeCount[m]--; outgoingEdgeCount[m]--;
if (outgoingEdgeCount[m] === 0) { if (outgoingEdgeCount[m] === 0) {

View File

@@ -4,53 +4,44 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
'use strict'; 'use strict';
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const es = require("event-stream"); var gulp = require("gulp");
const fs = require("fs"); var tsb = require("gulp-tsb");
const gulp = require("gulp"); var es = require("event-stream");
const bom = require("gulp-bom"); var watch = require('./watch');
const sourcemaps = require("gulp-sourcemaps"); var nls = require("./nls");
const tsb = require("gulp-tsb"); var util = require("./util");
const path = require("path"); var reporter_1 = require("./reporter");
const _ = require("underscore"); var path = require("path");
const monacodts = require("../monaco/api"); var bom = require("gulp-bom");
const nls = require("./nls"); var sourcemaps = require("gulp-sourcemaps");
const reporter_1 = require("./reporter"); var _ = require("underscore");
const util = require("./util"); var monacodts = require("../monaco/api");
const util2 = require("gulp-util"); var fs = require("fs");
const watch = require('./watch'); var reporter = reporter_1.createReporter();
const reporter = reporter_1.createReporter();
function getTypeScriptCompilerOptions(src) { function getTypeScriptCompilerOptions(src) {
const rootDir = path.join(__dirname, `../../${src}`); var rootDir = path.join(__dirname, "../../" + src);
const tsconfig = require(`../../${src}/tsconfig.json`); var options = require("../../" + src + "/tsconfig.json").compilerOptions;
let options;
if (tsconfig.extends) {
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
}
else {
options = tsconfig.compilerOptions;
}
options.verbose = false; options.verbose = false;
options.sourceMap = true; options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
options.sourceMap = false; options.sourceMap = false;
} }
options.rootDir = rootDir; options.rootDir = rootDir;
options.baseUrl = rootDir;
options.sourceRoot = util.toFileUri(rootDir); options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF'; options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
return options; return options;
} }
function createCompile(src, build, emitError) { function createCompile(src, build, emitError) {
const opts = _.clone(getTypeScriptCompilerOptions(src)); var opts = _.clone(getTypeScriptCompilerOptions(src));
opts.inlineSources = !!build; opts.inlineSources = !!build;
opts.noFilesystemLookup = true; opts.noFilesystemLookup = true;
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString())); var ts = tsb.create(opts, null, null, function (err) { return reporter(err.toString()); });
return function (token) { return function (token) {
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path)); var utf8Filter = util.filter(function (data) { return /(\/|\\)test(\/|\\).*utf8/.test(data.path); });
const tsFilter = util.filter(data => /\.ts$/.test(data.path)); var tsFilter = util.filter(function (data) { return /\.ts$/.test(data.path); });
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path))); var noDeclarationsFilter = util.filter(function (data) { return !(/\.d\.ts$/.test(data.path)); });
const input = es.through(); var input = es.through();
const output = input var output = input
.pipe(utf8Filter) .pipe(utf8Filter)
.pipe(bom()) .pipe(bom())
.pipe(utf8Filter.restore) .pipe(utf8Filter.restore)
@@ -66,136 +57,91 @@ function createCompile(src, build, emitError) {
sourceRoot: opts.sourceRoot sourceRoot: opts.sourceRoot
})) }))
.pipe(tsFilter.restore) .pipe(tsFilter.restore)
.pipe(reporter.end(!!emitError)); .pipe(reporter.end(emitError));
return es.duplex(input, output); return es.duplex(input, output);
}; };
} }
const typesDts = [
'node_modules/typescript/lib/*.d.ts',
'node_modules/@types/**/*.d.ts',
'!node_modules/@types/webpack/**/*',
'!node_modules/@types/uglify-js/**/*',
];
function compileTask(src, out, build) { function compileTask(src, out, build) {
return function () { return function () {
const compile = createCompile(src, build, true); var compile = createCompile(src, build, true);
const srcPipe = es.merge(gulp.src(`${src}/**`, { base: `${src}` }), gulp.src(typesDts)); var srcPipe = es.merge(gulp.src(src + "/**", { base: "" + src }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
let generator = new MonacoGenerator(false); // Do not write .d.ts files to disk, as they are not needed there.
if (src === 'src') { var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
generator.execute();
}
return srcPipe return srcPipe
.pipe(generator.stream)
.pipe(compile()) .pipe(compile())
.pipe(gulp.dest(out)); .pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(src !== 'src' ? es.through() : monacodtsTask(out, false));
}; };
} }
exports.compileTask = compileTask; exports.compileTask = compileTask;
function watchTask(out, build) { function watchTask(out, build) {
return function () { return function () {
const compile = createCompile('src', build); var compile = createCompile('src', build);
const src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src(typesDts)); var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
const watchSrc = watch('src/**', { base: 'src' }); var watchSrc = watch('src/**', { base: 'src' });
let generator = new MonacoGenerator(true); // Do not write .d.ts files to disk, as they are not needed there.
generator.execute(); var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
return watchSrc return watchSrc
.pipe(generator.stream)
.pipe(util.incremental(compile, src, true)) .pipe(util.incremental(compile, src, true))
.pipe(gulp.dest(out)); .pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, true));
}; };
} }
exports.watchTask = watchTask; exports.watchTask = watchTask;
const REPO_SRC_FOLDER = path.join(__dirname, '../../src'); function monacodtsTask(out, isWatch) {
class MonacoGenerator { var basePath = path.resolve(process.cwd(), out);
constructor(isWatch) { var neededFiles = {};
this._executeSoonTimer = null; monacodts.getFilesToWatch(out).forEach(function (filePath) {
this._isWatch = isWatch; filePath = path.normalize(filePath);
this.stream = es.through(); neededFiles[filePath] = true;
this._watchers = []; });
this._watchedFiles = {}; var inputFiles = {};
let onWillReadFile = (moduleId, filePath) => { for (var filePath in neededFiles) {
if (!this._isWatch) { if (/\bsrc(\/|\\)vs\b/.test(filePath)) {
return; // This file is needed from source => simply read it now
} inputFiles[filePath] = fs.readFileSync(filePath).toString();
if (this._watchedFiles[filePath]) {
return;
}
this._watchedFiles[filePath] = true;
const watcher = fs.watch(filePath);
watcher.addListener('change', () => {
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
watcher.addListener('error', (err) => {
console.error(`Encountered error while watching ${filePath}.`);
console.log(err);
delete this._watchedFiles[filePath];
for (let i = 0; i < this._watchers.length; i++) {
if (this._watchers[i] === watcher) {
this._watchers.splice(i, 1);
break;
}
}
watcher.close();
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
this._watchers.push(watcher);
};
this._fsProvider = new class extends monacodts.FSProvider {
readFileSync(moduleId, filePath) {
onWillReadFile(moduleId, filePath);
return super.readFileSync(moduleId, filePath);
}
};
this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider);
if (this._isWatch) {
const recipeWatcher = fs.watch(monacodts.RECIPE_PATH);
recipeWatcher.addListener('change', () => {
this._executeSoon();
});
this._watchers.push(recipeWatcher);
} }
} }
_executeSoon() { var setInputFile = function (filePath, contents) {
if (this._executeSoonTimer !== null) { if (inputFiles[filePath] === contents) {
clearTimeout(this._executeSoonTimer); // no change
this._executeSoonTimer = null;
}
this._executeSoonTimer = setTimeout(() => {
this._executeSoonTimer = null;
this.execute();
}, 20);
}
dispose() {
this._watchers.forEach(watcher => watcher.close());
}
_run() {
let r = monacodts.run3(this._declarationResolver);
if (!r && !this._isWatch) {
// The build must always be able to generate the monaco.d.ts
throw new Error(`monaco.d.ts generation error - Cannot continue`);
}
return r;
}
_log(message, ...rest) {
util2.log(util2.colors.cyan('[monaco.d.ts]'), message, ...rest);
}
execute() {
const startTime = Date.now();
const result = this._run();
if (!result) {
// nothing really changed
return; return;
} }
if (result.isTheSame) { inputFiles[filePath] = contents;
return; var neededInputFilesCount = Object.keys(neededFiles).length;
var availableInputFilesCount = Object.keys(inputFiles).length;
if (neededInputFilesCount === availableInputFilesCount) {
run();
} }
fs.writeFileSync(result.filePath, result.content); };
fs.writeFileSync(path.join(REPO_SRC_FOLDER, 'vs/editor/common/standalone/standaloneEnums.ts'), result.enums); var run = function () {
this._log(`monaco.d.ts is changed - total time took ${Date.now() - startTime} ms`); var result = monacodts.run(out, inputFiles);
if (!this._isWatch) { if (!result.isTheSame) {
this.stream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.'); if (isWatch) {
fs.writeFileSync(result.filePath, result.content);
}
else {
fs.writeFileSync(result.filePath, result.content);
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
}
} }
};
var resultStream;
if (isWatch) {
watch('build/monaco/*').pipe(es.through(function () {
run();
}));
} }
resultStream = es.through(function (data) {
var filePath = path.normalize(path.resolve(basePath, data.relative));
if (neededFiles[filePath]) {
setInputFile(filePath, data.contents.toString());
}
this.emit('data', data);
});
return resultStream;
} }

View File

@@ -5,39 +5,31 @@
'use strict'; 'use strict';
import * as es from 'event-stream';
import * as fs from 'fs';
import * as gulp from 'gulp'; import * as gulp from 'gulp';
import * as tsb from 'gulp-tsb';
import * as es from 'event-stream';
const watch = require('./watch');
import * as nls from './nls';
import * as util from './util';
import { createReporter } from './reporter';
import * as path from 'path';
import * as bom from 'gulp-bom'; import * as bom from 'gulp-bom';
import * as sourcemaps from 'gulp-sourcemaps'; import * as sourcemaps from 'gulp-sourcemaps';
import * as tsb from 'gulp-tsb';
import * as path from 'path';
import * as _ from 'underscore'; import * as _ from 'underscore';
import * as monacodts from '../monaco/api'; import * as monacodts from '../monaco/api';
import * as nls from './nls'; import * as fs from 'fs';
import { createReporter } from './reporter';
import * as util from './util';
import * as util2 from 'gulp-util';
const watch = require('./watch');
const reporter = createReporter(); const reporter = createReporter();
function getTypeScriptCompilerOptions(src: string) { function getTypeScriptCompilerOptions(src: string) {
const rootDir = path.join(__dirname, `../../${src}`); const rootDir = path.join(__dirname, `../../${src}`);
const tsconfig = require(`../../${src}/tsconfig.json`); const options = require(`../../${src}/tsconfig.json`).compilerOptions;
let options: { [key: string]: any };
if (tsconfig.extends) {
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
} else {
options = tsconfig.compilerOptions;
}
options.verbose = false; options.verbose = false;
options.sourceMap = true; options.sourceMap = true;
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
options.sourceMap = false; options.sourceMap = false;
} }
options.rootDir = rootDir; options.rootDir = rootDir;
options.baseUrl = rootDir;
options.sourceRoot = util.toFileUri(rootDir); options.sourceRoot = util.toFileUri(rootDir);
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF'; options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
return options; return options;
@@ -48,7 +40,7 @@ function createCompile(src: string, build: boolean, emitError?: boolean): (token
opts.inlineSources = !!build; opts.inlineSources = !!build;
opts.noFilesystemLookup = true; opts.noFilesystemLookup = true;
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString())); const ts = tsb.create(opts, null, null, err => reporter(err.toString()));
return function (token?: util.ICancellationToken) { return function (token?: util.ICancellationToken) {
@@ -73,19 +65,12 @@ function createCompile(src: string, build: boolean, emitError?: boolean): (token
sourceRoot: opts.sourceRoot sourceRoot: opts.sourceRoot
})) }))
.pipe(tsFilter.restore) .pipe(tsFilter.restore)
.pipe(reporter.end(!!emitError)); .pipe(reporter.end(emitError));
return es.duplex(input, output); return es.duplex(input, output);
}; };
} }
const typesDts = [
'node_modules/typescript/lib/*.d.ts',
'node_modules/@types/**/*.d.ts',
'!node_modules/@types/webpack/**/*',
'!node_modules/@types/uglify-js/**/*',
];
export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream { export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream {
return function () { return function () {
@@ -93,18 +78,18 @@ export function compileTask(src: string, out: string, build: boolean): () => Nod
const srcPipe = es.merge( const srcPipe = es.merge(
gulp.src(`${src}/**`, { base: `${src}` }), gulp.src(`${src}/**`, { base: `${src}` }),
gulp.src(typesDts), gulp.src('node_modules/typescript/lib/lib.d.ts'),
); );
let generator = new MonacoGenerator(false); // Do not write .d.ts files to disk, as they are not needed there.
if (src === 'src') { const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
generator.execute();
}
return srcPipe return srcPipe
.pipe(generator.stream)
.pipe(compile()) .pipe(compile())
.pipe(gulp.dest(out)); .pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(src !== 'src' ? es.through() : monacodtsTask(out, false));
}; };
} }
@@ -115,128 +100,80 @@ export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteSt
const src = es.merge( const src = es.merge(
gulp.src('src/**', { base: 'src' }), gulp.src('src/**', { base: 'src' }),
gulp.src(typesDts), gulp.src('node_modules/typescript/lib/lib.d.ts'),
); );
const watchSrc = watch('src/**', { base: 'src' }); const watchSrc = watch('src/**', { base: 'src' });
let generator = new MonacoGenerator(true); // Do not write .d.ts files to disk, as they are not needed there.
generator.execute(); const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
return watchSrc return watchSrc
.pipe(generator.stream)
.pipe(util.incremental(compile, src, true)) .pipe(util.incremental(compile, src, true))
.pipe(gulp.dest(out)); .pipe(dtsFilter)
.pipe(gulp.dest(out))
.pipe(dtsFilter.restore)
.pipe(monacodtsTask(out, true));
}; };
} }
const REPO_SRC_FOLDER = path.join(__dirname, '../../src'); function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
class MonacoGenerator { const basePath = path.resolve(process.cwd(), out);
private readonly _isWatch: boolean;
public readonly stream: NodeJS.ReadWriteStream;
private readonly _watchers: fs.FSWatcher[]; const neededFiles: { [file: string]: boolean; } = {};
private readonly _watchedFiles: { [filePath: string]: boolean; }; monacodts.getFilesToWatch(out).forEach(function (filePath) {
private readonly _fsProvider: monacodts.FSProvider; filePath = path.normalize(filePath);
private readonly _declarationResolver: monacodts.DeclarationResolver; neededFiles[filePath] = true;
});
constructor(isWatch: boolean) { const inputFiles: { [file: string]: string; } = {};
this._isWatch = isWatch; for (let filePath in neededFiles) {
this.stream = es.through(); if (/\bsrc(\/|\\)vs\b/.test(filePath)) {
this._watchers = []; // This file is needed from source => simply read it now
this._watchedFiles = {}; inputFiles[filePath] = fs.readFileSync(filePath).toString();
let onWillReadFile = (moduleId: string, filePath: string) => {
if (!this._isWatch) {
return;
}
if (this._watchedFiles[filePath]) {
return;
}
this._watchedFiles[filePath] = true;
const watcher = fs.watch(filePath);
watcher.addListener('change', () => {
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
watcher.addListener('error', (err) => {
console.error(`Encountered error while watching ${filePath}.`);
console.log(err);
delete this._watchedFiles[filePath];
for (let i = 0; i < this._watchers.length; i++) {
if (this._watchers[i] === watcher) {
this._watchers.splice(i, 1);
break;
}
}
watcher.close();
this._declarationResolver.invalidateCache(moduleId);
this._executeSoon();
});
this._watchers.push(watcher);
};
this._fsProvider = new class extends monacodts.FSProvider {
public readFileSync(moduleId: string, filePath: string): Buffer {
onWillReadFile(moduleId, filePath);
return super.readFileSync(moduleId, filePath);
}
};
this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider);
if (this._isWatch) {
const recipeWatcher = fs.watch(monacodts.RECIPE_PATH);
recipeWatcher.addListener('change', () => {
this._executeSoon();
});
this._watchers.push(recipeWatcher);
} }
} }
private _executeSoonTimer: NodeJS.Timer | null = null; const setInputFile = (filePath: string, contents: string) => {
private _executeSoon(): void { if (inputFiles[filePath] === contents) {
if (this._executeSoonTimer !== null) { // no change
clearTimeout(this._executeSoonTimer);
this._executeSoonTimer = null;
}
this._executeSoonTimer = setTimeout(() => {
this._executeSoonTimer = null;
this.execute();
}, 20);
}
public dispose(): void {
this._watchers.forEach(watcher => watcher.close());
}
private _run(): monacodts.IMonacoDeclarationResult | null {
let r = monacodts.run3(this._declarationResolver);
if (!r && !this._isWatch) {
// The build must always be able to generate the monaco.d.ts
throw new Error(`monaco.d.ts generation error - Cannot continue`);
}
return r;
}
private _log(message: any, ...rest: any[]): void {
util2.log(util2.colors.cyan('[monaco.d.ts]'), message, ...rest);
}
public execute(): void {
const startTime = Date.now();
const result = this._run();
if (!result) {
// nothing really changed
return; return;
} }
if (result.isTheSame) { inputFiles[filePath] = contents;
return; const neededInputFilesCount = Object.keys(neededFiles).length;
const availableInputFilesCount = Object.keys(inputFiles).length;
if (neededInputFilesCount === availableInputFilesCount) {
run();
} }
};
fs.writeFileSync(result.filePath, result.content); const run = () => {
fs.writeFileSync(path.join(REPO_SRC_FOLDER, 'vs/editor/common/standalone/standaloneEnums.ts'), result.enums); const result = monacodts.run(out, inputFiles);
this._log(`monaco.d.ts is changed - total time took ${Date.now() - startTime} ms`); if (!result.isTheSame) {
if (!this._isWatch) { if (isWatch) {
this.stream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.'); fs.writeFileSync(result.filePath, result.content);
} else {
fs.writeFileSync(result.filePath, result.content);
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
}
} }
};
let resultStream: NodeJS.ReadWriteStream;
if (isWatch) {
watch('build/monaco/*').pipe(es.through(function () {
run();
}));
} }
resultStream = es.through(function (data) {
const filePath = path.normalize(path.resolve(basePath, data.relative));
if (neededFiles[filePath]) {
setInputFile(filePath, data.contents.toString());
}
this.emit('data', data);
});
return resultStream;
} }

View File

@@ -11,7 +11,6 @@ const root = path.dirname(path.dirname(__dirname));
function getElectronVersion() { function getElectronVersion() {
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8'); const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
// @ts-ignore
const target = /^target "(.*)"$/m.exec(yarnrc)[1]; const target = /^target "(.*)"$/m.exec(yarnrc)[1];
return target; return target;
@@ -20,7 +19,6 @@ function getElectronVersion() {
module.exports.getElectronVersion = getElectronVersion; module.exports.getElectronVersion = getElectronVersion;
// returns 0 if the right version of electron is in .build/electron // returns 0 if the right version of electron is in .build/electron
// @ts-ignore
if (require.main === module) { if (require.main === module) {
const version = getElectronVersion(); const version = getElectronVersion();
const versionFile = path.join(root, '.build', 'electron', 'version'); const versionFile = path.join(root, '.build', 'electron', 'version');

View File

@@ -4,317 +4,116 @@
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const es = require("event-stream"); var es = require("event-stream");
const fs = require("fs"); var assign = require("object-assign");
const glob = require("glob"); var remote = require("gulp-remote-src");
const gulp = require("gulp"); var flatmap = require('gulp-flatmap');
const path = require("path"); var vzip = require('gulp-vinyl-zip');
const File = require("vinyl"); var filter = require('gulp-filter');
const vsce = require("vsce"); var rename = require('gulp-rename');
const stats_1 = require("./stats"); var util = require('gulp-util');
const util2 = require("./util"); var buffer = require('gulp-buffer');
const remote = require("gulp-remote-src"); var json = require('gulp-json-editor');
const vzip = require('gulp-vinyl-zip'); var fs = require("fs");
const filter = require("gulp-filter"); var path = require("path");
const rename = require("gulp-rename"); var vsce = require("vsce");
const util = require('gulp-util'); var File = require("vinyl");
const buffer = require('gulp-buffer'); function fromLocal(extensionPath) {
const json = require("gulp-json-editor"); var result = es.through();
const webpack = require('webpack');
const webpackGulp = require('webpack-stream');
const root = path.resolve(path.join(__dirname, '..', '..'));
// {{SQL CARBON EDIT}}
const _ = require("underscore");
const vfs = require("vinyl-fs");
const deps = require('../dependencies');
const extensionsRoot = path.join(root, 'extensions');
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
function packageBuiltInExtensions() {
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
const packagePath = path.join(path.dirname(root), element.name + '.vsix');
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
vsce.createVSIX({
cwd: element.path,
packagePath: packagePath,
useYarn: true
});
});
}
exports.packageBuiltInExtensions = packageBuiltInExtensions;
function packageExtensionTask(extensionName, platform, arch) {
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
if (platform === 'darwin') {
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
}
else {
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
}
platform = platform || process.platform;
return () => {
const root = path.resolve(path.join(__dirname, '../..'));
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => extensionName === name);
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
return fromLocal(extension.path);
}));
let result = localExtensions
.pipe(util2.skipDirectories())
.pipe(util2.fixWin32DirectoryPermissions())
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
return result.pipe(vfs.dest(destination));
};
}
exports.packageExtensionTask = packageExtensionTask;
// {{SQL CARBON EDIT}} - End
function fromLocal(extensionPath, sourceMappingURLBase) {
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
if (fs.existsSync(webpackFilename)) {
return fromLocalWebpack(extensionPath, sourceMappingURLBase);
}
else {
return fromLocalNormal(extensionPath);
}
}
function fromLocalWebpack(extensionPath, sourceMappingURLBase) {
const result = es.through();
const packagedDependencies = [];
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
if (packageJsonConfig.dependencies) {
const webpackRootConfig = require(path.join(extensionPath, 'extension.webpack.config.js'));
for (const key in webpackRootConfig.externals) {
if (key in packageJsonConfig.dependencies) {
packagedDependencies.push(key);
}
}
}
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
const filesStream = es.readArray(files);
// check for a webpack configuration files, then invoke webpack
// and merge its output with the files stream. also rewrite the package.json
// file to a new entry point
const webpackConfigLocations = glob.sync(path.join(extensionPath, '/**/extension.webpack.config.js'), { ignore: ['**/node_modules'] });
const packageJsonFilter = filter(f => {
if (path.basename(f.path) === 'package.json') {
// only modify package.json's next to the webpack file.
// to be safe, use existsSync instead of path comparison.
return fs.existsSync(path.join(path.dirname(f.path), 'extension.webpack.config.js'));
}
return false;
}, { restore: true });
const patchFilesStream = filesStream
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json((data) => {
if (data.main) {
// hardcoded entry point directory!
data.main = data.main.replace('/out/', /dist/);
}
return data;
}))
.pipe(packageJsonFilter.restore);
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => () => {
const webpackDone = (err, stats) => {
util.log(`Bundled extension: ${util.colors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
if (err) {
result.emit('error', err);
}
const { compilation } = stats;
if (compilation.errors.length > 0) {
result.emit('error', compilation.errors.join('\n'));
}
if (compilation.warnings.length > 0) {
result.emit('error', compilation.warnings.join('\n'));
}
};
const webpackConfig = Object.assign({}, require(webpackConfigPath), { mode: 'production' });
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(es.through(function (data) {
data.stat = data.stat || {};
data.base = extensionPath;
this.emit('data', data);
}))
.pipe(es.through(function (data) {
// source map handling:
// * rewrite sourceMappingURL
// * save to disk so that upload-task picks this up
if (sourceMappingURLBase) {
const contents = data.contents.toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
if (/\.js\.map$/.test(data.path)) {
if (!fs.existsSync(path.dirname(data.path))) {
fs.mkdirSync(path.dirname(data.path));
}
fs.writeFileSync(data.path, data.contents);
}
}
this.emit('data', data);
}));
});
es.merge(sequence(webpackStreams), patchFilesStream)
// .pipe(es.through(function (data) {
// // debug
// console.log('out', data.path, data.contents.length);
// this.emit('data', data);
// }))
.pipe(result);
}).catch(err => {
console.error(extensionPath);
console.error(packagedDependencies);
result.emit('error', err);
});
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
}
function fromLocalNormal(extensionPath) {
const result = es.through();
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn }) vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
.then(fileNames => { .then(function (fileNames) {
const files = fileNames var files = fileNames
.map(fileName => path.join(extensionPath, fileName)) .map(function (fileName) { return path.join(extensionPath, fileName); })
.map(filePath => new File({ .map(function (filePath) { return new File({
path: filePath, path: filePath,
stat: fs.statSync(filePath), stat: fs.statSync(filePath),
base: extensionPath, base: extensionPath,
contents: fs.createReadStream(filePath) contents: fs.createReadStream(filePath)
})); }); });
es.readArray(files).pipe(result); es.readArray(files).pipe(result);
}) })
.catch(err => result.emit('error', err)); .catch(function (err) { return result.emit('error', err); });
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath))); return result;
} }
const baseHeaders = { exports.fromLocal = fromLocal;
function error(err) {
var result = es.through();
setTimeout(function () { return result.emit('error', err); });
return result;
}
var baseHeaders = {
'X-Market-Client-Id': 'VSCode Build', 'X-Market-Client-Id': 'VSCode Build',
'User-Agent': 'VSCode Build', 'User-Agent': 'VSCode Build',
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2', 'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
}; };
function fromMarketplace(extensionName, version, metadata) { function fromMarketplace(extensionName, version) {
const [publisher, name] = extensionName.split('.'); var filterType = 7;
const url = `https://marketplace.visualstudio.com/_apis/public/gallery/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`; var value = extensionName;
util.log('Downloading extension:', util.colors.yellow(`${extensionName}@${version}`), '...'); var criterium = { filterType: filterType, value: value };
const options = { var criteria = [criterium];
base: url, var pageNumber = 1;
var pageSize = 1;
var sortBy = 0;
var sortOrder = 0;
var flags = 0x1 | 0x2 | 0x80;
var assetTypes = ['Microsoft.VisualStudio.Services.VSIXPackage'];
var filters = [{ criteria: criteria, pageNumber: pageNumber, pageSize: pageSize, sortBy: sortBy, sortOrder: sortOrder }];
var body = JSON.stringify({ filters: filters, assetTypes: assetTypes, flags: flags });
var headers = assign({}, baseHeaders, {
'Content-Type': 'application/json',
'Accept': 'application/json;api-version=3.0-preview.1',
'Content-Length': body.length
});
var options = {
base: 'https://marketplace.visualstudio.com/_apis/public/gallery',
requestOptions: { requestOptions: {
method: 'POST',
gzip: true, gzip: true,
headers: baseHeaders headers: headers,
body: body
} }
}; };
const packageJsonFilter = filter('package.json', { restore: true }); return remote('/extensionquery', options)
return remote('', options) .pipe(flatmap(function (stream, f) {
.pipe(vzip.src()) var rawResult = f.contents.toString('utf8');
.pipe(filter('extension/**')) var result = JSON.parse(rawResult);
.pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, ''))) var extension = result.results[0].extensions[0];
.pipe(packageJsonFilter) if (!extension) {
.pipe(buffer()) return error("No such extension: " + extension);
.pipe(json({ __metadata: metadata })) }
.pipe(packageJsonFilter.restore); var metadata = {
id: extension.extensionId,
publisherId: extension.publisher,
publisherDisplayName: extension.publisher.displayName
};
var extensionVersion = extension.versions.filter(function (v) { return v.version === version; })[0];
if (!extensionVersion) {
return error("No such extension version: " + extensionName + " @ " + version);
}
var asset = extensionVersion.files.filter(function (f) { return f.assetType === 'Microsoft.VisualStudio.Services.VSIXPackage'; })[0];
if (!asset) {
return error("No VSIX found for extension version: " + extensionName + " @ " + version);
}
util.log('Downloading extension:', util.colors.yellow(extensionName + "@" + version), '...');
var options = {
base: asset.source,
requestOptions: {
gzip: true,
headers: baseHeaders
}
};
return remote('', options)
.pipe(flatmap(function (stream) {
var packageJsonFilter = filter('package.json', { restore: true });
return stream
.pipe(vzip.src())
.pipe(filter('extension/**'))
.pipe(rename(function (p) { return p.dirname = p.dirname.replace(/^extension\/?/, ''); }))
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json({ __metadata: metadata }))
.pipe(packageJsonFilter.restore);
}));
}));
} }
exports.fromMarketplace = fromMarketplace; exports.fromMarketplace = fromMarketplace;
const excludedExtensions = [
'vscode-api-tests',
'vscode-colorize-tests',
'ms-vscode.node-debug',
'ms-vscode.node-debug2',
// {{SQL CARBON EDIT}}
'integration-tests'
];
// {{SQL CARBON EDIT}}
const sqlBuiltInExtensions = [
// Add SQL built-in extensions here.
// the extension will be excluded from SQLOps package and will have separate vsix packages
'agent',
'import',
'profiler',
'admin-pack',
'big-data-cluster',
'dacpac'
];
var azureExtensions = ['azurecore', 'mssql'];
const builtInExtensions = require('../builtInExtensions.json');
/**
* We're doing way too much stuff at once, with webpack et al. So much stuff
* that while downloading extensions from the marketplace, node js doesn't get enough
* stack frames to complete the download in under 2 minutes, at which point the
* marketplace server cuts off the http request. So, we sequentialize the extensino tasks.
*/
function sequence(streamProviders) {
const result = es.through();
function pop() {
if (streamProviders.length === 0) {
result.emit('end');
}
else {
const fn = streamProviders.shift();
fn()
.on('end', function () { setTimeout(pop, 0); })
.pipe(result, { end: false });
}
}
pop();
return result;
}
function packageExtensionsStream(optsIn) {
const opts = optsIn || {};
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
// {{SQL CARBON EDIT}}
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1)
.filter(({ name }) => azureExtensions.indexOf(name) === -1);
const localExtensions = () => sequence([...localExtensionDescriptions.map(extension => () => {
return fromLocal(extension.path, opts.sourceMappingURLBase)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
})]);
// {{SQL CARBON EDIT}}
const extensionDepsSrc = [
..._.flatten(extensionsProductionDependencies.map((d) => path.relative(root, d.path)).map((d) => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
];
const localExtensionDependencies = () => gulp.src(extensionDepsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util2.cleanNodeModule('account-provider-azure', ['node_modules/date-utils/doc/**', 'node_modules/adal_node/node_modules/**'], undefined))
.pipe(util2.cleanNodeModule('typescript', ['**/**'], undefined));
// Original code commented out here
// const localExtensionDependencies = () => gulp.src('extensions/node_modules/**', { base: '.' });
// const marketplaceExtensions = () => es.merge(
// ...builtInExtensions
// .filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
// .map(extension => {
// return fromMarketplace(extension.name, extension.version, extension.metadata)
// .pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
// })
// );
return sequence([localExtensions, localExtensionDependencies,])
.pipe(util2.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map']));
// {{SQL CARBON EDIT}} - End
}
exports.packageExtensionsStream = packageExtensionsStream;

View File

@@ -4,221 +4,22 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
import * as es from 'event-stream'; import * as es from 'event-stream';
import * as fs from 'fs';
import * as glob from 'glob';
import * as gulp from 'gulp';
import * as path from 'path';
import { Stream } from 'stream'; import { Stream } from 'stream';
import * as File from 'vinyl'; import assign = require('object-assign');
import * as vsce from 'vsce';
import { createStatsStream } from './stats';
import * as util2 from './util';
import remote = require('gulp-remote-src'); import remote = require('gulp-remote-src');
const flatmap = require('gulp-flatmap');
const vzip = require('gulp-vinyl-zip'); const vzip = require('gulp-vinyl-zip');
import filter = require('gulp-filter'); const filter = require('gulp-filter');
import rename = require('gulp-rename'); const rename = require('gulp-rename');
const util = require('gulp-util'); const util = require('gulp-util');
const buffer = require('gulp-buffer'); const buffer = require('gulp-buffer');
import json = require('gulp-json-editor'); const json = require('gulp-json-editor');
const webpack = require('webpack'); import * as fs from 'fs';
const webpackGulp = require('webpack-stream'); import * as path from 'path';
import * as vsce from 'vsce';
import * as File from 'vinyl';
const root = path.resolve(path.join(__dirname, '..', '..')); export function fromLocal(extensionPath: string): Stream {
// {{SQL CARBON EDIT}}
import * as _ from 'underscore';
import * as vfs from 'vinyl-fs';
const deps = require('../dependencies');
const extensionsRoot = path.join(root, 'extensions');
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
export function packageBuiltInExtensions() {
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
const packagePath = path.join(path.dirname(root), element.name + '.vsix');
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
vsce.createVSIX({
cwd: element.path,
packagePath: packagePath,
useYarn: true
});
});
}
export function packageExtensionTask(extensionName: string, platform: string, arch: string) {
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
if (platform === 'darwin') {
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
} else {
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
}
platform = platform || process.platform;
return () => {
const root = path.resolve(path.join(__dirname, '../..'));
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => extensionName === name);
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
return fromLocal(extension.path);
}));
let result = localExtensions
.pipe(util2.skipDirectories())
.pipe(util2.fixWin32DirectoryPermissions())
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
return result.pipe(vfs.dest(destination));
};
}
// {{SQL CARBON EDIT}} - End
function fromLocal(extensionPath: string, sourceMappingURLBase?: string): Stream {
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
if (fs.existsSync(webpackFilename)) {
return fromLocalWebpack(extensionPath, sourceMappingURLBase);
} else {
return fromLocalNormal(extensionPath);
}
}
function fromLocalWebpack(extensionPath: string, sourceMappingURLBase: string | undefined): Stream {
const result = es.through();
const packagedDependencies: string[] = [];
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
if (packageJsonConfig.dependencies) {
const webpackRootConfig = require(path.join(extensionPath, 'extension.webpack.config.js'));
for (const key in webpackRootConfig.externals) {
if (key in packageJsonConfig.dependencies) {
packagedDependencies.push(key);
}
}
}
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath) as any
}));
const filesStream = es.readArray(files);
// check for a webpack configuration files, then invoke webpack
// and merge its output with the files stream. also rewrite the package.json
// file to a new entry point
const webpackConfigLocations = (<string[]>glob.sync(
path.join(extensionPath, '/**/extension.webpack.config.js'),
{ ignore: ['**/node_modules'] }
));
const packageJsonFilter = filter(f => {
if (path.basename(f.path) === 'package.json') {
// only modify package.json's next to the webpack file.
// to be safe, use existsSync instead of path comparison.
return fs.existsSync(path.join(path.dirname(f.path), 'extension.webpack.config.js'));
}
return false;
}, { restore: true });
const patchFilesStream = filesStream
.pipe(packageJsonFilter)
.pipe(buffer())
.pipe(json((data: any) => {
if (data.main) {
// hardcoded entry point directory!
data.main = data.main.replace('/out/', /dist/);
}
return data;
}))
.pipe(packageJsonFilter.restore);
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => () => {
const webpackDone = (err: any, stats: any) => {
util.log(`Bundled extension: ${util.colors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
if (err) {
result.emit('error', err);
}
const { compilation } = stats;
if (compilation.errors.length > 0) {
result.emit('error', compilation.errors.join('\n'));
}
if (compilation.warnings.length > 0) {
result.emit('error', compilation.warnings.join('\n'));
}
};
const webpackConfig = {
...require(webpackConfigPath),
...{ mode: 'production' }
};
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
return webpackGulp(webpackConfig, webpack, webpackDone)
.pipe(es.through(function (data) {
data.stat = data.stat || {};
data.base = extensionPath;
this.emit('data', data);
}))
.pipe(es.through(function (data: File) {
// source map handling:
// * rewrite sourceMappingURL
// * save to disk so that upload-task picks this up
if (sourceMappingURLBase) {
const contents = (<Buffer>data.contents).toString('utf8');
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
}), 'utf8');
if (/\.js\.map$/.test(data.path)) {
if (!fs.existsSync(path.dirname(data.path))) {
fs.mkdirSync(path.dirname(data.path));
}
fs.writeFileSync(data.path, data.contents);
}
}
this.emit('data', data);
}));
});
es.merge(sequence(webpackStreams), patchFilesStream)
// .pipe(es.through(function (data) {
// // debug
// console.log('out', data.path, data.contents.length);
// this.emit('data', data);
// }))
.pipe(result);
}).catch(err => {
console.error(extensionPath);
console.error(packagedDependencies);
result.emit('error', err);
});
return result.pipe(createStatsStream(path.basename(extensionPath)));
}
function fromLocalNormal(extensionPath: string): Stream {
const result = es.through(); const result = es.through();
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn }) vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
@@ -236,7 +37,13 @@ function fromLocalNormal(extensionPath: string): Stream {
}) })
.catch(err => result.emit('error', err)); .catch(err => result.emit('error', err));
return result.pipe(createStatsStream(path.basename(extensionPath))); return result;
}
function error(err: any): Stream {
const result = es.through();
setTimeout(() => result.emit('error', err));
return result;
} }
const baseHeaders = { const baseHeaders = {
@@ -245,141 +52,82 @@ const baseHeaders = {
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2', 'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
}; };
export function fromMarketplace(extensionName: string, version: string, metadata: any): Stream { export function fromMarketplace(extensionName: string, version: string): Stream {
const [publisher, name] = extensionName.split('.'); const filterType = 7;
const url = `https://marketplace.visualstudio.com/_apis/public/gallery/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`; const value = extensionName;
const criterium = { filterType, value };
util.log('Downloading extension:', util.colors.yellow(`${extensionName}@${version}`), '...'); const criteria = [criterium];
const pageNumber = 1;
const pageSize = 1;
const sortBy = 0;
const sortOrder = 0;
const flags = 0x1 | 0x2 | 0x80;
const assetTypes = ['Microsoft.VisualStudio.Services.VSIXPackage'];
const filters = [{ criteria, pageNumber, pageSize, sortBy, sortOrder }];
const body = JSON.stringify({ filters, assetTypes, flags });
const headers: any = assign({}, baseHeaders, {
'Content-Type': 'application/json',
'Accept': 'application/json;api-version=3.0-preview.1',
'Content-Length': body.length
});
const options = { const options = {
base: url, base: 'https://marketplace.visualstudio.com/_apis/public/gallery',
requestOptions: { requestOptions: {
method: 'POST',
gzip: true, gzip: true,
headers: baseHeaders headers,
body: body
} }
}; };
const packageJsonFilter = filter('package.json', { restore: true }); return remote('/extensionquery', options)
.pipe(flatmap((stream, f) => {
const rawResult = f.contents.toString('utf8');
const result = JSON.parse(rawResult);
const extension = result.results[0].extensions[0];
if (!extension) {
return error(`No such extension: ${extension}`);
}
return remote('', options) const metadata = {
.pipe(vzip.src()) id: extension.extensionId,
.pipe(filter('extension/**')) publisherId: extension.publisher,
.pipe(rename(p => p.dirname = p.dirname!.replace(/^extension\/?/, ''))) publisherDisplayName: extension.publisher.displayName
.pipe(packageJsonFilter) };
.pipe(buffer())
.pipe(json({ __metadata: metadata })) const extensionVersion = extension.versions.filter(v => v.version === version)[0];
.pipe(packageJsonFilter.restore); if (!extensionVersion) {
} return error(`No such extension version: ${extensionName} @ ${version}`);
}
interface IPackageExtensionsOptions {
/** const asset = extensionVersion.files.filter(f => f.assetType === 'Microsoft.VisualStudio.Services.VSIXPackage')[0];
* Set to undefined to package all of them. if (!asset) {
*/ return error(`No VSIX found for extension version: ${extensionName} @ ${version}`);
desiredExtensions?: string[]; }
sourceMappingURLBase?: string;
} util.log('Downloading extension:', util.colors.yellow(`${extensionName}@${version}`), '...');
const excludedExtensions = [ const options = {
'vscode-api-tests', base: asset.source,
'vscode-colorize-tests', requestOptions: {
'ms-vscode.node-debug', gzip: true,
'ms-vscode.node-debug2', headers: baseHeaders
// {{SQL CARBON EDIT}} }
'integration-tests' };
];
return remote('', options)
// {{SQL CARBON EDIT}} .pipe(flatmap(stream => {
const sqlBuiltInExtensions = [ const packageJsonFilter = filter('package.json', { restore: true });
// Add SQL built-in extensions here.
// the extension will be excluded from SQLOps package and will have separate vsix packages return stream
'agent', .pipe(vzip.src())
'import', .pipe(filter('extension/**'))
'profiler', .pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, '')))
'admin-pack', .pipe(packageJsonFilter)
'big-data-cluster', .pipe(buffer())
'dacpac' .pipe(json({ __metadata: metadata }))
]; .pipe(packageJsonFilter.restore);
var azureExtensions = ['azurecore', 'mssql']; }));
// {{SQL CARBON EDIT}} - End }));
interface IBuiltInExtension {
name: string;
version: string;
repo: string;
metadata: any;
}
const builtInExtensions: IBuiltInExtension[] = require('../builtInExtensions.json');
/**
* We're doing way too much stuff at once, with webpack et al. So much stuff
* that while downloading extensions from the marketplace, node js doesn't get enough
* stack frames to complete the download in under 2 minutes, at which point the
* marketplace server cuts off the http request. So, we sequentialize the extensino tasks.
*/
function sequence(streamProviders: { (): Stream }[]): Stream {
const result = es.through();
function pop() {
if (streamProviders.length === 0) {
result.emit('end');
} else {
const fn = streamProviders.shift()!;
fn()
.on('end', function () { setTimeout(pop, 0); })
.pipe(result, { end: false });
}
}
pop();
return result;
}
export function packageExtensionsStream(optsIn?: IPackageExtensionsOptions): NodeJS.ReadWriteStream {
const opts = optsIn || {};
const localExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
.filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
// {{SQL CARBON EDIT}}
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1)
.filter(({ name }) => azureExtensions.indexOf(name) === -1);
const localExtensions = () => sequence([...localExtensionDescriptions.map(extension => () => {
return fromLocal(extension.path, opts.sourceMappingURLBase)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
})]);
// {{SQL CARBON EDIT}}
const extensionDepsSrc = [
..._.flatten(extensionsProductionDependencies.map((d: any) => path.relative(root, d.path)).map((d: any) => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
];
const localExtensionDependencies = () => gulp.src(extensionDepsSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(util2.cleanNodeModule('account-provider-azure', ['node_modules/date-utils/doc/**', 'node_modules/adal_node/node_modules/**'], undefined))
.pipe(util2.cleanNodeModule('typescript', ['**/**'], undefined));
// Original code commented out here
// const localExtensionDependencies = () => gulp.src('extensions/node_modules/**', { base: '.' });
// const marketplaceExtensions = () => es.merge(
// ...builtInExtensions
// .filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
// .map(extension => {
// return fromMarketplace(extension.name, extension.version, extension.metadata)
// .pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
// })
// );
return sequence([localExtensions, localExtensionDependencies, /*marketplaceExtensions*/])
.pipe(util2.setExecutableBit(['**/*.sh']))
.pipe(filter(['**', '!**/*.js.map']));
// {{SQL CARBON EDIT}} - End
} }

View File

@@ -4,47 +4,47 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
'use strict'; 'use strict';
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path"); var path = require("path");
const fs = require("fs"); var fs = require("fs");
/** /**
* Returns the sha1 commit version of a repository or undefined in case of failure. * Returns the sha1 commit version of a repository or undefined in case of failure.
*/ */
function getVersion(repo) { function getVersion(repo) {
const git = path.join(repo, '.git'); var git = path.join(repo, '.git');
const headPath = path.join(git, 'HEAD'); var headPath = path.join(git, 'HEAD');
let head; var head;
try { try {
head = fs.readFileSync(headPath, 'utf8').trim(); head = fs.readFileSync(headPath, 'utf8').trim();
} }
catch (e) { catch (e) {
return undefined; return void 0;
} }
if (/^[0-9a-f]{40}$/i.test(head)) { if (/^[0-9a-f]{40}$/i.test(head)) {
return head; return head;
} }
const refMatch = /^ref: (.*)$/.exec(head); var refMatch = /^ref: (.*)$/.exec(head);
if (!refMatch) { if (!refMatch) {
return undefined; return void 0;
} }
const ref = refMatch[1]; var ref = refMatch[1];
const refPath = path.join(git, ref); var refPath = path.join(git, ref);
try { try {
return fs.readFileSync(refPath, 'utf8').trim(); return fs.readFileSync(refPath, 'utf8').trim();
} }
catch (e) { catch (e) {
// noop // noop
} }
const packedRefsPath = path.join(git, 'packed-refs'); var packedRefsPath = path.join(git, 'packed-refs');
let refsRaw; var refsRaw;
try { try {
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim(); refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
} }
catch (e) { catch (e) {
return undefined; return void 0;
} }
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm; var refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
let refsMatch; var refsMatch;
let refs = {}; var refs = {};
while (refsMatch = refsRegex.exec(refsRaw)) { while (refsMatch = refsRegex.exec(refsRaw)) {
refs[refsMatch[2]] = refsMatch[1]; refs[refsMatch[2]] = refsMatch[1];
} }

View File

@@ -10,7 +10,7 @@ import * as fs from 'fs';
/** /**
* Returns the sha1 commit version of a repository or undefined in case of failure. * Returns the sha1 commit version of a repository or undefined in case of failure.
*/ */
export function getVersion(repo: string): string | undefined { export function getVersion(repo: string): string {
const git = path.join(repo, '.git'); const git = path.join(repo, '.git');
const headPath = path.join(git, 'HEAD'); const headPath = path.join(git, 'HEAD');
let head: string; let head: string;
@@ -18,7 +18,7 @@ export function getVersion(repo: string): string | undefined {
try { try {
head = fs.readFileSync(headPath, 'utf8').trim(); head = fs.readFileSync(headPath, 'utf8').trim();
} catch (e) { } catch (e) {
return undefined; return void 0;
} }
if (/^[0-9a-f]{40}$/i.test(head)) { if (/^[0-9a-f]{40}$/i.test(head)) {
@@ -28,7 +28,7 @@ export function getVersion(repo: string): string | undefined {
const refMatch = /^ref: (.*)$/.exec(head); const refMatch = /^ref: (.*)$/.exec(head);
if (!refMatch) { if (!refMatch) {
return undefined; return void 0;
} }
const ref = refMatch[1]; const ref = refMatch[1];
@@ -46,11 +46,11 @@ export function getVersion(repo: string): string | undefined {
try { try {
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim(); refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
} catch (e) { } catch (e) {
return undefined; return void 0;
} }
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm; const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
let refsMatch: RegExpExecArray | null; let refsMatch: RegExpExecArray;
let refs: { [ref: string]: string } = {}; let refs: { [ref: string]: string } = {};
while (refsMatch = refsRegex.exec(refsRaw)) { while (refsMatch = refsRegex.exec(refsRaw)) {

File diff suppressed because it is too large Load Diff

View File

@@ -78,6 +78,10 @@
"name": "vs/workbench/parts/logs", "name": "vs/workbench/parts/logs",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/parts/navigation",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/parts/output", "name": "vs/workbench/parts/output",
"project": "vscode-workbench" "project": "vscode-workbench"
@@ -110,10 +114,6 @@
"name": "vs/workbench/parts/snippets", "name": "vs/workbench/parts/snippets",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/parts/stats",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/parts/surveys", "name": "vs/workbench/parts/surveys",
"project": "vscode-workbench" "project": "vscode-workbench"
@@ -166,10 +166,6 @@
"name": "vs/workbench/services/bulkEdit", "name": "vs/workbench/services/bulkEdit",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/services/commands",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/services/configuration", "name": "vs/workbench/services/configuration",
"project": "vscode-workbench" "project": "vscode-workbench"
@@ -214,10 +210,6 @@
"name": "vs/workbench/services/progress", "name": "vs/workbench/services/progress",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/services/remote",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/services/textfile", "name": "vs/workbench/services/textfile",
"project": "vscode-workbench" "project": "vscode-workbench"
@@ -238,10 +230,6 @@
"name": "vs/workbench/services/decorations", "name": "vs/workbench/services/decorations",
"project": "vscode-workbench" "project": "vscode-workbench"
}, },
{
"name": "vs/workbench/services/label",
"project": "vscode-workbench"
},
{ {
"name": "vs/workbench/services/preferences", "name": "vs/workbench/services/preferences",
"project": "vscode-preferences" "project": "vscode-preferences"

View File

@@ -7,15 +7,15 @@ import * as path from 'path';
import * as fs from 'fs'; import * as fs from 'fs';
import { through, readable, ThroughStream } from 'event-stream'; import { through, readable, ThroughStream } from 'event-stream';
import * as File from 'vinyl'; import File = require('vinyl');
import * as Is from 'is'; import * as Is from 'is';
import * as xml2js from 'xml2js'; import * as xml2js from 'xml2js';
import * as glob from 'glob'; import * as glob from 'glob';
import * as https from 'https'; import * as https from 'https';
import * as gulp from 'gulp'; import * as gulp from 'gulp';
import * as util from 'gulp-util'; var util = require('gulp-util');
import * as iconv from 'iconv-lite'; var iconv = require('iconv-lite');
const NUMBER_OF_CONCURRENT_DOWNLOADS = 4; const NUMBER_OF_CONCURRENT_DOWNLOADS = 4;
@@ -57,7 +57,7 @@ export const extraLanguages: Language[] = [
]; ];
// non built-in extensions also that are transifex and need to be part of the language packs // non built-in extensions also that are transifex and need to be part of the language packs
export const externalExtensionsWithTranslations = { const externalExtensionsWithTranslations = {
'vscode-chrome-debug': 'msjsdiag.debugger-for-chrome', 'vscode-chrome-debug': 'msjsdiag.debugger-for-chrome',
'vscode-node-debug': 'ms-vscode.node-debug', 'vscode-node-debug': 'ms-vscode.node-debug',
'vscode-node-debug2': 'ms-vscode.node-debug2' 'vscode-node-debug2': 'ms-vscode.node-debug2'
@@ -71,7 +71,7 @@ interface Map<V> {
interface Item { interface Item {
id: string; id: string;
message: string; message: string;
comment?: string; comment: string;
} }
export interface Resource { export interface Resource {
@@ -137,6 +137,27 @@ module PackageJsonFormat {
} }
} }
interface ModuleJsonFormat {
messages: string[];
keys: (string | LocalizeInfo)[];
}
module ModuleJsonFormat {
export function is(value: any): value is ModuleJsonFormat {
let candidate = value as ModuleJsonFormat;
return Is.defined(candidate)
&& Is.array(candidate.messages) && candidate.messages.every(message => Is.string(message))
&& Is.array(candidate.keys) && candidate.keys.every(key => Is.string(key) || LocalizeInfo.is(key));
}
}
interface BundledExtensionHeaderFormat {
id: string;
type: string;
hash: string;
outDir: string;
}
interface BundledExtensionFormat { interface BundledExtensionFormat {
[key: string]: { [key: string]: {
messages: string[]; messages: string[];
@@ -147,7 +168,7 @@ interface BundledExtensionFormat {
export class Line { export class Line {
private buffer: string[] = []; private buffer: string[] = [];
constructor(indent: number = 0) { constructor(private indent: number = 0) {
if (indent > 0) { if (indent > 0) {
this.buffer.push(new Array(indent + 1).join(' ')); this.buffer.push(new Array(indent + 1).join(' '));
} }
@@ -214,8 +235,8 @@ export class XLF {
let existingKeys = new Set<string>(); let existingKeys = new Set<string>();
for (let i = 0; i < keys.length; i++) { for (let i = 0; i < keys.length; i++) {
let key = keys[i]; let key = keys[i];
let realKey: string | undefined; let realKey: string;
let comment: string | undefined; let comment: string;
if (Is.string(key)) { if (Is.string(key)) {
realKey = key; realKey = key;
comment = undefined; comment = undefined;
@@ -265,17 +286,17 @@ export class XLF {
} }
static parsePseudo = function (xlfString: string): Promise<ParsedXLF[]> { static parsePseudo = function (xlfString: string): Promise<ParsedXLF[]> {
return new Promise((resolve) => { return new Promise((resolve, reject) => {
let parser = new xml2js.Parser(); let parser = new xml2js.Parser();
let files: { messages: Map<string>, originalFilePath: string, language: string }[] = []; let files: { messages: Map<string>, originalFilePath: string, language: string }[] = [];
parser.parseString(xlfString, function (_err: any, result: any) { parser.parseString(xlfString, function (err, result) {
const fileNodes: any[] = result['xliff']['file']; const fileNodes: any[] = result['xliff']['file'];
fileNodes.forEach(file => { fileNodes.forEach(file => {
const originalFilePath = file.$.original; const originalFilePath = file.$.original;
const messages: Map<string> = {}; const messages: Map<string> = {};
const transUnits = file.body[0]['trans-unit']; const transUnits = file.body[0]['trans-unit'];
if (transUnits) { if (transUnits) {
transUnits.forEach((unit: any) => { transUnits.forEach(unit => {
const key = unit.$.id; const key = unit.$.id;
const val = pseudify(unit.source[0]['_'].toString()); const val = pseudify(unit.source[0]['_'].toString());
if (key && val) { if (key && val) {
@@ -296,7 +317,7 @@ export class XLF {
let files: { messages: Map<string>, originalFilePath: string, language: string }[] = []; let files: { messages: Map<string>, originalFilePath: string, language: string }[] = [];
parser.parseString(xlfString, function (err: any, result: any) { parser.parseString(xlfString, function (err, result) {
if (err) { if (err) {
reject(new Error(`XLF parsing error: Failed to parse XLIFF string. ${err}`)); reject(new Error(`XLF parsing error: Failed to parse XLIFF string. ${err}`));
} }
@@ -319,20 +340,17 @@ export class XLF {
const transUnits = file.body[0]['trans-unit']; const transUnits = file.body[0]['trans-unit'];
if (transUnits) { if (transUnits) {
transUnits.forEach((unit: any) => { transUnits.forEach(unit => {
const key = unit.$.id; const key = unit.$.id;
if (!unit.target) { if (!unit.target) {
return; // No translation available return; // No translation available
} }
let val = unit.target[0]; const val = unit.target.toString();
if (typeof val !== 'string') {
val = val._;
}
if (key && val) { if (key && val) {
messages[key] = decodeEntities(val); messages[key] = decodeEntities(val);
} else { } else {
reject(new Error(`XLF parsing error: XLIFF file ${originalFilePath} does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.`)); reject(new Error(`XLF parsing error: XLIFF file does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.`));
} }
}); });
files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() }); files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() });
@@ -351,7 +369,7 @@ export interface ITask<T> {
interface ILimitedTaskFactory<T> { interface ILimitedTaskFactory<T> {
factory: ITask<Promise<T>>; factory: ITask<Promise<T>>;
c: (value?: T | Promise<T>) => void; c: (value?: T | Thenable<T>) => void;
e: (error?: any) => void; e: (error?: any) => void;
} }
@@ -373,7 +391,7 @@ export class Limiter<T> {
private consume(): void { private consume(): void {
while (this.outstandingPromises.length && this.runningPromises < this.maxDegreeOfParalellism) { while (this.outstandingPromises.length && this.runningPromises < this.maxDegreeOfParalellism) {
const iLimitedTask = this.outstandingPromises.shift()!; const iLimitedTask = this.outstandingPromises.shift();
this.runningPromises++; this.runningPromises++;
const promise = iLimitedTask.factory(); const promise = iLimitedTask.factory();
@@ -401,8 +419,8 @@ function stripComments(content: string): string {
* Third matches block comments * Third matches block comments
* Fourth matches line comments * Fourth matches line comments
*/ */
const regexp = /("(?:[^\\\"]*(?:\\.)?)*")|('(?:[^\\\']*(?:\\.)?)*')|(\/\*(?:\r?\n|.)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g; var regexp: RegExp = /("(?:[^\\\"]*(?:\\.)?)*")|('(?:[^\\\']*(?:\\.)?)*')|(\/\*(?:\r?\n|.)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g;
let result = content.replace(regexp, (match, _m1, _m2, m3, m4) => { let result = content.replace(regexp, (match, m1, m2, m3, m4) => {
// Only one of m1, m2, m3, m4 matches // Only one of m1, m2, m3, m4 matches
if (m3) { if (m3) {
// A block comment. Replace with nothing // A block comment. Replace with nothing
@@ -424,9 +442,9 @@ function stripComments(content: string): string {
} }
function escapeCharacters(value: string): string { function escapeCharacters(value: string): string {
const result: string[] = []; var result: string[] = [];
for (let i = 0; i < value.length; i++) { for (var i = 0; i < value.length; i++) {
const ch = value.charAt(i); var ch = value.charAt(i);
switch (ch) { switch (ch) {
case '\'': case '\'':
result.push('\\\''); result.push('\\\'');
@@ -466,6 +484,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
let statistics: Map<number> = Object.create(null); let statistics: Map<number> = Object.create(null);
let total: number = 0;
let defaultMessages: Map<Map<string>> = Object.create(null); let defaultMessages: Map<Map<string>> = Object.create(null);
let modules = Object.keys(keysSection); let modules = Object.keys(keysSection);
modules.forEach((module) => { modules.forEach((module) => {
@@ -478,6 +497,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
let messageMap: Map<string> = Object.create(null); let messageMap: Map<string> = Object.create(null);
defaultMessages[module] = messageMap; defaultMessages[module] = messageMap;
keys.map((key, i) => { keys.map((key, i) => {
total++;
if (typeof key === 'string') { if (typeof key === 'string') {
messageMap[key] = messages[i]; messageMap[key] = messages[i];
} else { } else {
@@ -500,7 +520,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
modules.forEach((module) => { modules.forEach((module) => {
let order = keysSection[module]; let order = keysSection[module];
let i18nFile = path.join(cwd, module) + '.i18n.json'; let i18nFile = path.join(cwd, module) + '.i18n.json';
let messages: Map<string> | null = null; let messages: Map<string> = null;
if (fs.existsSync(i18nFile)) { if (fs.existsSync(i18nFile)) {
let content = stripComments(fs.readFileSync(i18nFile, 'utf8')); let content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
messages = JSON.parse(content); messages = JSON.parse(content);
@@ -513,13 +533,13 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
} }
let localizedMessages: string[] = []; let localizedMessages: string[] = [];
order.forEach((keyInfo) => { order.forEach((keyInfo) => {
let key: string | null = null; let key: string = null;
if (typeof keyInfo === 'string') { if (typeof keyInfo === 'string') {
key = keyInfo; key = keyInfo;
} else { } else {
key = keyInfo.key; key = keyInfo.key;
} }
let message: string = messages![key]; let message: string = messages[key];
if (!message) { if (!message) {
if (process.env['VSCODE_BUILD_VERBOSE']) { if (process.env['VSCODE_BUILD_VERBOSE']) {
log(`No localized message found for key ${key} in module ${module}. Using default message.`); log(`No localized message found for key ${key} in module ${module}. Using default message.`);
@@ -804,8 +824,8 @@ export function createXlfFilesForIsl(): ThroughStream {
} }
export function pushXlfFiles(apiHostname: string, username: string, password: string): ThroughStream { export function pushXlfFiles(apiHostname: string, username: string, password: string): ThroughStream {
let tryGetPromises: Array<Promise<boolean>> = []; let tryGetPromises = [];
let updateCreatePromises: Array<Promise<boolean>> = []; let updateCreatePromises = [];
return through(function (this: ThroughStream, file: File) { return through(function (this: ThroughStream, file: File) {
const project = path.dirname(file.relative); const project = path.dirname(file.relative);
@@ -870,7 +890,7 @@ function getAllResources(project: string, apiHostname: string, username: string,
export function findObsoleteResources(apiHostname: string, username: string, password: string): ThroughStream { export function findObsoleteResources(apiHostname: string, username: string, password: string): ThroughStream {
let resourcesByProject: Map<string[]> = Object.create(null); let resourcesByProject: Map<string[]> = Object.create(null);
resourcesByProject[extensionsProject] = ([] as any[]).concat(externalExtensionsWithTranslations); // clone resourcesByProject[extensionsProject] = [].concat(externalExtensionsWithTranslations); // clone
return through(function (this: ThroughStream, file: File) { return through(function (this: ThroughStream, file: File) {
const project = path.dirname(file.relative); const project = path.dirname(file.relative);
@@ -887,7 +907,7 @@ export function findObsoleteResources(apiHostname: string, username: string, pas
const json = JSON.parse(fs.readFileSync('./build/lib/i18n.resources.json', 'utf8')); const json = JSON.parse(fs.readFileSync('./build/lib/i18n.resources.json', 'utf8'));
let i18Resources = [...json.editor, ...json.workbench].map((r: Resource) => r.project + '/' + r.name.replace(/\//g, '_')); let i18Resources = [...json.editor, ...json.workbench].map((r: Resource) => r.project + '/' + r.name.replace(/\//g, '_'));
let extractedResources: string[] = []; let extractedResources = [];
for (let project of [workbenchProject, editorProject]) { for (let project of [workbenchProject, editorProject]) {
for (let resource of resourcesByProject[project]) { for (let resource of resourcesByProject[project]) {
if (resource !== 'setup_messages') { if (resource !== 'setup_messages') {
@@ -900,7 +920,7 @@ export function findObsoleteResources(apiHostname: string, username: string, pas
console.log(`[i18n] Missing resources in file 'build/lib/i18n.resources.json': JSON.stringify(${extractedResources.filter(p => i18Resources.indexOf(p) === -1)})`); console.log(`[i18n] Missing resources in file 'build/lib/i18n.resources.json': JSON.stringify(${extractedResources.filter(p => i18Resources.indexOf(p) === -1)})`);
} }
let promises: Array<Promise<void>> = []; let promises = [];
for (let project in resourcesByProject) { for (let project in resourcesByProject) {
promises.push( promises.push(
getAllResources(project, apiHostname, username, password).then(resources => { getAllResources(project, apiHostname, username, password).then(resources => {
@@ -945,7 +965,7 @@ function tryGetResource(project: string, slug: string, apiHostname: string, cred
} }
function createResource(project: string, slug: string, xlfFile: File, apiHostname: string, credentials: any): Promise<any> { function createResource(project: string, slug: string, xlfFile: File, apiHostname: string, credentials: any): Promise<any> {
return new Promise((_resolve, reject) => { return new Promise((resolve, reject) => {
const data = JSON.stringify({ const data = JSON.stringify({
'content': xlfFile.contents.toString(), 'content': xlfFile.contents.toString(),
'name': slug, 'name': slug,
@@ -1036,8 +1056,8 @@ export function pullCoreAndExtensionsXlfFiles(apiHostname: string, username: str
// extensions // extensions
let extensionsToLocalize = Object.create(null); let extensionsToLocalize = Object.create(null);
glob.sync('./extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true); glob.sync('./extensions/**/*.nls.json', ).forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
glob.sync('./extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true); glob.sync('./extensions/*/node_modules/vscode-nls', ).forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
Object.keys(extensionsToLocalize).forEach(extension => { Object.keys(extensionsToLocalize).forEach(extension => {
_coreAndExtensionResources.push({ name: extension, project: extensionsProject }); _coreAndExtensionResources.push({ name: extension, project: extensionsProject });
@@ -1065,7 +1085,7 @@ function pullXlfFiles(apiHostname: string, username: string, password: string, l
let expectedTranslationsCount = resources.length; let expectedTranslationsCount = resources.length;
let translationsRetrieved = 0, called = false; let translationsRetrieved = 0, called = false;
return readable(function (_count: any, callback: any) { return readable(function (count, callback) {
// Mark end of stream when all resources were retrieved // Mark end of stream when all resources were retrieved
if (translationsRetrieved === expectedTranslationsCount) { if (translationsRetrieved === expectedTranslationsCount) {
return this.emit('end'); return this.emit('end');
@@ -1075,7 +1095,7 @@ function pullXlfFiles(apiHostname: string, username: string, password: string, l
called = true; called = true;
const stream = this; const stream = this;
resources.map(function (resource) { resources.map(function (resource) {
retrieveResource(language, resource, apiHostname, credentials).then((file: File | null) => { retrieveResource(language, resource, apiHostname, credentials).then((file: File) => {
if (file) { if (file) {
stream.emit('data', file); stream.emit('data', file);
} }
@@ -1087,10 +1107,10 @@ function pullXlfFiles(apiHostname: string, username: string, password: string, l
callback(); callback();
}); });
} }
const limiter = new Limiter<File | null>(NUMBER_OF_CONCURRENT_DOWNLOADS); const limiter = new Limiter<File>(NUMBER_OF_CONCURRENT_DOWNLOADS);
function retrieveResource(language: Language, resource: Resource, apiHostname: string, credentials: string): Promise<File | null> { function retrieveResource(language: Language, resource: Resource, apiHostname, credentials): Promise<File> {
return limiter.queue(() => new Promise<File | null>((resolve, reject) => { return limiter.queue(() => new Promise<File>((resolve, reject) => {
const slug = resource.name.replace(/\//g, '_'); const slug = resource.name.replace(/\//g, '_');
const project = resource.project; const project = resource.project;
let transifexLanguageId = language.id === 'ps' ? 'en' : language.transifexId || language.id; let transifexLanguageId = language.id === 'ps' ? 'en' : language.transifexId || language.id;
@@ -1192,10 +1212,10 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
let parsePromises: Promise<ParsedXLF[]>[] = []; let parsePromises: Promise<ParsedXLF[]>[] = [];
let mainPack: I18nPack = { version: i18nPackVersion, contents: {} }; let mainPack: I18nPack = { version: i18nPackVersion, contents: {} };
let extensionsPacks: Map<I18nPack> = {}; let extensionsPacks: Map<I18nPack> = {};
let errors: any[] = [];
return through(function (this: ThroughStream, xlf: File) { return through(function (this: ThroughStream, xlf: File) {
let project = path.dirname(xlf.relative); let stream = this;
let resource = path.basename(xlf.relative, '.xlf'); let project = path.dirname(xlf.path);
let resource = path.basename(xlf.path, '.xlf');
let contents = xlf.contents.toString(); let contents = xlf.contents.toString();
let parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents); let parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
parsePromises.push(parsePromise); parsePromises.push(parsePromise);
@@ -1222,15 +1242,10 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
} }
}); });
} }
).catch(reason => { );
errors.push(reason);
});
}, function () { }, function () {
Promise.all(parsePromises) Promise.all(parsePromises)
.then(() => { .then(() => {
if (errors.length > 0) {
throw errors;
}
const translatedMainFile = createI18nFile('./main', mainPack); const translatedMainFile = createI18nFile('./main', mainPack);
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' }); resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
@@ -1249,9 +1264,7 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
} }
this.queue(null); this.queue(null);
}) })
.catch((reason) => { .catch(reason => { throw new Error(reason); });
this.emit('error', reason);
});
}); });
} }
@@ -1272,15 +1285,11 @@ export function prepareIslFiles(language: Language, innoSetupConfig: InnoSetup):
stream.queue(translatedFile); stream.queue(translatedFile);
}); });
} }
).catch(reason => { );
this.emit('error', reason);
});
}, function () { }, function () {
Promise.all(parsePromises) Promise.all(parsePromises)
.then(() => { this.queue(null); }) .then(() => { this.queue(null); })
.catch(reason => { .catch(reason => { throw new Error(reason); });
this.emit('error', reason);
});
}); });
} }
@@ -1297,7 +1306,7 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
let firstChar = line.charAt(0); let firstChar = line.charAt(0);
if (firstChar === '[' || firstChar === ';') { if (firstChar === '[' || firstChar === ';') {
if (line === '; *** Inno Setup version 5.5.3+ English messages ***') { if (line === '; *** Inno Setup version 5.5.3+ English messages ***') {
content.push(`; *** Inno Setup version 5.5.3+ ${innoSetup.defaultInfo!.name} messages ***`); content.push(`; *** Inno Setup version 5.5.3+ ${innoSetup.defaultInfo.name} messages ***`);
} else { } else {
content.push(line); content.push(line);
} }
@@ -1307,9 +1316,9 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
let translated = line; let translated = line;
if (key) { if (key) {
if (key === 'LanguageName') { if (key === 'LanguageName') {
translated = `${key}=${innoSetup.defaultInfo!.name}`; translated = `${key}=${innoSetup.defaultInfo.name}`;
} else if (key === 'LanguageID') { } else if (key === 'LanguageID') {
translated = `${key}=${innoSetup.defaultInfo!.id}`; translated = `${key}=${innoSetup.defaultInfo.id}`;
} else if (key === 'LanguageCodePage') { } else if (key === 'LanguageCodePage') {
translated = `${key}=${innoSetup.codePage.substr(2)}`; translated = `${key}=${innoSetup.codePage.substr(2)}`;
} else { } else {
@@ -1330,14 +1339,14 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
return new File({ return new File({
path: filePath, path: filePath,
contents: iconv.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage) contents: iconv.encode(Buffer.from(content.join('\r\n'), 'utf8'), innoSetup.codePage)
}); });
} }
function encodeEntities(value: string): string { function encodeEntities(value: string): string {
let result: string[] = []; var result: string[] = [];
for (let i = 0; i < value.length; i++) { for (var i = 0; i < value.length; i++) {
let ch = value[i]; var ch = value[i];
switch (ch) { switch (ch) {
case '<': case '<':
result.push('&lt;'); result.push('&lt;');

View File

@@ -3,12 +3,13 @@
* Copyright (c) Microsoft Corporation. All rights reserved. * Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
const ts = require("typescript"); var ts = require("typescript");
const lazy = require("lazy.js"); var lazy = require("lazy.js");
const event_stream_1 = require("event-stream"); var event_stream_1 = require("event-stream");
const File = require("vinyl"); var File = require("vinyl");
const sm = require("source-map"); var sm = require("source-map");
const path = require("path"); var assign = require("object-assign");
var path = require("path");
var CollectStepResult; var CollectStepResult;
(function (CollectStepResult) { (function (CollectStepResult) {
CollectStepResult[CollectStepResult["Yes"] = 0] = "Yes"; CollectStepResult[CollectStepResult["Yes"] = 0] = "Yes";
@@ -17,9 +18,9 @@ var CollectStepResult;
CollectStepResult[CollectStepResult["NoAndRecurse"] = 3] = "NoAndRecurse"; CollectStepResult[CollectStepResult["NoAndRecurse"] = 3] = "NoAndRecurse";
})(CollectStepResult || (CollectStepResult = {})); })(CollectStepResult || (CollectStepResult = {}));
function collect(node, fn) { function collect(node, fn) {
const result = []; var result = [];
function loop(node) { function loop(node) {
const stepResult = fn(node); var stepResult = fn(node);
if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) { if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) {
result.push(node); result.push(node);
} }
@@ -31,45 +32,43 @@ function collect(node, fn) {
return result; return result;
} }
function clone(object) { function clone(object) {
const result = {}; var result = {};
for (const id in object) { for (var id in object) {
result[id] = object[id]; result[id] = object[id];
} }
return result; return result;
} }
function template(lines) { function template(lines) {
let indent = '', wrap = ''; var indent = '', wrap = '';
if (lines.length > 1) { if (lines.length > 1) {
indent = '\t'; indent = '\t';
wrap = '\n'; wrap = '\n';
} }
return `/*--------------------------------------------------------- return "/*---------------------------------------------------------\n * Copyright (C) Microsoft Corporation. All rights reserved.\n *--------------------------------------------------------*/\ndefine([], [" + (wrap + lines.map(function (l) { return indent + l; }).join(',\n') + wrap) + "]);";
* Copyright (C) Microsoft Corporation. All rights reserved.
*--------------------------------------------------------*/
define([], [${wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
} }
/** /**
* Returns a stream containing the patched JavaScript and source maps. * Returns a stream containing the patched JavaScript and source maps.
*/ */
function nls() { function nls() {
const input = event_stream_1.through(); var input = event_stream_1.through();
const output = input.pipe(event_stream_1.through(function (f) { var output = input.pipe(event_stream_1.through(function (f) {
var _this = this;
if (!f.sourceMap) { if (!f.sourceMap) {
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`)); return this.emit('error', new Error("File " + f.relative + " does not have sourcemaps."));
} }
let source = f.sourceMap.sources[0]; var source = f.sourceMap.sources[0];
if (!source) { if (!source) {
return this.emit('error', new Error(`File ${f.relative} does not have a source in the source map.`)); return this.emit('error', new Error("File " + f.relative + " does not have a source in the source map."));
} }
const root = f.sourceMap.sourceRoot; var root = f.sourceMap.sourceRoot;
if (root) { if (root) {
source = path.join(root, source); source = path.join(root, source);
} }
const typescript = f.sourceMap.sourcesContent[0]; var typescript = f.sourceMap.sourcesContent[0];
if (!typescript) { if (!typescript) {
return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`)); return this.emit('error', new Error("File " + f.relative + " does not have the original content in the source map."));
} }
nls.patchFiles(f, typescript).forEach(f => this.emit('data', f)); nls.patchFiles(f, typescript).forEach(function (f) { return _this.emit('data', f); });
})); }));
return event_stream_1.duplex(input, output); return event_stream_1.duplex(input, output);
} }
@@ -77,7 +76,8 @@ function isImportNode(node) {
return node.kind === ts.SyntaxKind.ImportDeclaration || node.kind === ts.SyntaxKind.ImportEqualsDeclaration; return node.kind === ts.SyntaxKind.ImportDeclaration || node.kind === ts.SyntaxKind.ImportEqualsDeclaration;
} }
(function (nls_1) { (function (nls_1) {
function fileFrom(file, contents, path = file.path) { function fileFrom(file, contents, path) {
if (path === void 0) { path = file.path; }
return new File({ return new File({
contents: Buffer.from(contents), contents: Buffer.from(contents),
base: file.base, base: file.base,
@@ -87,27 +87,29 @@ function isImportNode(node) {
} }
nls_1.fileFrom = fileFrom; nls_1.fileFrom = fileFrom;
function mappedPositionFrom(source, lc) { function mappedPositionFrom(source, lc) {
return { source, line: lc.line + 1, column: lc.character }; return { source: source, line: lc.line + 1, column: lc.character };
} }
nls_1.mappedPositionFrom = mappedPositionFrom; nls_1.mappedPositionFrom = mappedPositionFrom;
function lcFrom(position) { function lcFrom(position) {
return { line: position.line - 1, character: position.column }; return { line: position.line - 1, character: position.column };
} }
nls_1.lcFrom = lcFrom; nls_1.lcFrom = lcFrom;
class SingleFileServiceHost { var SingleFileServiceHost = /** @class */ (function () {
constructor(options, filename, contents) { function SingleFileServiceHost(options, filename, contents) {
var _this = this;
this.options = options; this.options = options;
this.filename = filename; this.filename = filename;
this.getCompilationSettings = () => this.options; this.getCompilationSettings = function () { return _this.options; };
this.getScriptFileNames = () => [this.filename]; this.getScriptFileNames = function () { return [_this.filename]; };
this.getScriptVersion = () => '1'; this.getScriptVersion = function () { return '1'; };
this.getScriptSnapshot = (name) => name === this.filename ? this.file : this.lib; this.getScriptSnapshot = function (name) { return name === _this.filename ? _this.file : _this.lib; };
this.getCurrentDirectory = () => ''; this.getCurrentDirectory = function () { return ''; };
this.getDefaultLibFileName = () => 'lib.d.ts'; this.getDefaultLibFileName = function () { return 'lib.d.ts'; };
this.file = ts.ScriptSnapshot.fromString(contents); this.file = ts.ScriptSnapshot.fromString(contents);
this.lib = ts.ScriptSnapshot.fromString(''); this.lib = ts.ScriptSnapshot.fromString('');
} }
} return SingleFileServiceHost;
}());
nls_1.SingleFileServiceHost = SingleFileServiceHost; nls_1.SingleFileServiceHost = SingleFileServiceHost;
function isCallExpressionWithinTextSpanCollectStep(textSpan, node) { function isCallExpressionWithinTextSpanCollectStep(textSpan, node) {
if (!ts.textSpanContainsTextSpan({ start: node.pos, length: node.end - node.pos }, textSpan)) { if (!ts.textSpanContainsTextSpan({ start: node.pos, length: node.end - node.pos }, textSpan)) {
@@ -115,96 +117,97 @@ function isImportNode(node) {
} }
return node.kind === ts.SyntaxKind.CallExpression ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse; return node.kind === ts.SyntaxKind.CallExpression ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse;
} }
function analyze(contents, options = {}) { function analyze(contents, options) {
const filename = 'file.ts'; if (options === void 0) { options = {}; }
const serviceHost = new SingleFileServiceHost(Object.assign(clone(options), { noResolve: true }), filename, contents); var filename = 'file.ts';
const service = ts.createLanguageService(serviceHost); var serviceHost = new SingleFileServiceHost(assign(clone(options), { noResolve: true }), filename, contents);
const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true); var service = ts.createLanguageService(serviceHost);
var sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
// all imports // all imports
const imports = lazy(collect(sourceFile, n => isImportNode(n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse)); var imports = lazy(collect(sourceFile, function (n) { return isImportNode(n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse; }));
// import nls = require('vs/nls'); // import nls = require('vs/nls');
const importEqualsDeclarations = imports var importEqualsDeclarations = imports
.filter(n => n.kind === ts.SyntaxKind.ImportEqualsDeclaration) .filter(function (n) { return n.kind === ts.SyntaxKind.ImportEqualsDeclaration; })
.map(n => n) .map(function (n) { return n; })
.filter(d => d.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) .filter(function (d) { return d.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference; })
.filter(d => d.moduleReference.expression.getText() === '\'vs/nls\''); .filter(function (d) { return d.moduleReference.expression.getText() === '\'vs/nls\''; });
// import ... from 'vs/nls'; // import ... from 'vs/nls';
const importDeclarations = imports var importDeclarations = imports
.filter(n => n.kind === ts.SyntaxKind.ImportDeclaration) .filter(function (n) { return n.kind === ts.SyntaxKind.ImportDeclaration; })
.map(n => n) .map(function (n) { return n; })
.filter(d => d.moduleSpecifier.kind === ts.SyntaxKind.StringLiteral) .filter(function (d) { return d.moduleSpecifier.kind === ts.SyntaxKind.StringLiteral; })
.filter(d => d.moduleSpecifier.getText() === '\'vs/nls\'') .filter(function (d) { return d.moduleSpecifier.getText() === '\'vs/nls\''; })
.filter(d => !!d.importClause && !!d.importClause.namedBindings); .filter(function (d) { return !!d.importClause && !!d.importClause.namedBindings; });
const nlsExpressions = importEqualsDeclarations var nlsExpressions = importEqualsDeclarations
.map(d => d.moduleReference.expression) .map(function (d) { return d.moduleReference.expression; })
.concat(importDeclarations.map(d => d.moduleSpecifier)) .concat(importDeclarations.map(function (d) { return d.moduleSpecifier; }))
.map(d => ({ .map(function (d) { return ({
start: ts.getLineAndCharacterOfPosition(sourceFile, d.getStart()), start: ts.getLineAndCharacterOfPosition(sourceFile, d.getStart()),
end: ts.getLineAndCharacterOfPosition(sourceFile, d.getEnd()) end: ts.getLineAndCharacterOfPosition(sourceFile, d.getEnd())
})); }); });
// `nls.localize(...)` calls // `nls.localize(...)` calls
const nlsLocalizeCallExpressions = importDeclarations var nlsLocalizeCallExpressions = importDeclarations
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport)) .filter(function (d) { return d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport; })
.map(d => d.importClause.namedBindings.name) .map(function (d) { return d.importClause.namedBindings.name; })
.concat(importEqualsDeclarations.map(d => d.name)) .concat(importEqualsDeclarations.map(function (d) { return d.name; }))
// find read-only references to `nls` // find read-only references to `nls`
.map(n => service.getReferencesAtPosition(filename, n.pos + 1)) .map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
.flatten() .flatten()
.filter(r => !r.isWriteAccess) .filter(function (r) { return !r.isWriteAccess; })
// find the deepest call expressions AST nodes that contain those references // find the deepest call expressions AST nodes that contain those references
.map(r => collect(sourceFile, n => isCallExpressionWithinTextSpanCollectStep(r.textSpan, n))) .map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
.map(a => lazy(a).last()) .map(function (a) { return lazy(a).last(); })
.filter(n => !!n) .filter(function (n) { return !!n; })
.map(n => n) .map(function (n) { return n; })
// only `localize` calls // only `localize` calls
.filter(n => n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && n.expression.name.getText() === 'localize'); .filter(function (n) { return n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && n.expression.name.getText() === 'localize'; });
// `localize` named imports // `localize` named imports
const allLocalizeImportDeclarations = importDeclarations var allLocalizeImportDeclarations = importDeclarations
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports)) .filter(function (d) { return d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports; })
.map(d => [].concat(d.importClause.namedBindings.elements)) .map(function (d) { return [].concat(d.importClause.namedBindings.elements); })
.flatten(); .flatten();
// `localize` read-only references // `localize` read-only references
const localizeReferences = allLocalizeImportDeclarations var localizeReferences = allLocalizeImportDeclarations
.filter(d => d.name.getText() === 'localize') .filter(function (d) { return d.name.getText() === 'localize'; })
.map(n => service.getReferencesAtPosition(filename, n.pos + 1)) .map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
.flatten() .flatten()
.filter(r => !r.isWriteAccess); .filter(function (r) { return !r.isWriteAccess; });
// custom named `localize` read-only references // custom named `localize` read-only references
const namedLocalizeReferences = allLocalizeImportDeclarations var namedLocalizeReferences = allLocalizeImportDeclarations
.filter(d => d.propertyName && d.propertyName.getText() === 'localize') .filter(function (d) { return d.propertyName && d.propertyName.getText() === 'localize'; })
.map(n => service.getReferencesAtPosition(filename, n.name.pos + 1)) .map(function (n) { return service.getReferencesAtPosition(filename, n.name.pos + 1); })
.flatten() .flatten()
.filter(r => !r.isWriteAccess); .filter(function (r) { return !r.isWriteAccess; });
// find the deepest call expressions AST nodes that contain those references // find the deepest call expressions AST nodes that contain those references
const localizeCallExpressions = localizeReferences var localizeCallExpressions = localizeReferences
.concat(namedLocalizeReferences) .concat(namedLocalizeReferences)
.map(r => collect(sourceFile, n => isCallExpressionWithinTextSpanCollectStep(r.textSpan, n))) .map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
.map(a => lazy(a).last()) .map(function (a) { return lazy(a).last(); })
.filter(n => !!n) .filter(function (n) { return !!n; })
.map(n => n); .map(function (n) { return n; });
// collect everything // collect everything
const localizeCalls = nlsLocalizeCallExpressions var localizeCalls = nlsLocalizeCallExpressions
.concat(localizeCallExpressions) .concat(localizeCallExpressions)
.map(e => e.arguments) .map(function (e) { return e.arguments; })
.filter(a => a.length > 1) .filter(function (a) { return a.length > 1; })
.sort((a, b) => a[0].getStart() - b[0].getStart()) .sort(function (a, b) { return a[0].getStart() - b[0].getStart(); })
.map(a => ({ .map(function (a) { return ({
keySpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getEnd()) }, keySpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getEnd()) },
key: a[0].getText(), key: a[0].getText(),
valueSpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getEnd()) }, valueSpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getEnd()) },
value: a[1].getText() value: a[1].getText()
})); }); });
return { return {
localizeCalls: localizeCalls.toArray(), localizeCalls: localizeCalls.toArray(),
nlsExpressions: nlsExpressions.toArray() nlsExpressions: nlsExpressions.toArray()
}; };
} }
nls_1.analyze = analyze; nls_1.analyze = analyze;
class TextModel { var TextModel = /** @class */ (function () {
constructor(contents) { function TextModel(contents) {
const regex = /\r\n|\r|\n/g; var regex = /\r\n|\r|\n/g;
let index = 0; var index = 0;
let match; var match;
this.lines = []; this.lines = [];
this.lineEndings = []; this.lineEndings = [];
while (match = regex.exec(contents)) { while (match = regex.exec(contents)) {
@@ -217,80 +220,85 @@ function isImportNode(node) {
this.lineEndings.push(''); this.lineEndings.push('');
} }
} }
get(index) { TextModel.prototype.get = function (index) {
return this.lines[index]; return this.lines[index];
} };
set(index, line) { TextModel.prototype.set = function (index, line) {
this.lines[index] = line; this.lines[index] = line;
} };
get lineCount() { Object.defineProperty(TextModel.prototype, "lineCount", {
return this.lines.length; get: function () {
} return this.lines.length;
},
enumerable: true,
configurable: true
});
/** /**
* Applies patch(es) to the model. * Applies patch(es) to the model.
* Multiple patches must be ordered. * Multiple patches must be ordered.
* Does not support patches spanning multiple lines. * Does not support patches spanning multiple lines.
*/ */
apply(patch) { TextModel.prototype.apply = function (patch) {
const startLineNumber = patch.span.start.line; var startLineNumber = patch.span.start.line;
const endLineNumber = patch.span.end.line; var endLineNumber = patch.span.end.line;
const startLine = this.lines[startLineNumber] || ''; var startLine = this.lines[startLineNumber] || '';
const endLine = this.lines[endLineNumber] || ''; var endLine = this.lines[endLineNumber] || '';
this.lines[startLineNumber] = [ this.lines[startLineNumber] = [
startLine.substring(0, patch.span.start.character), startLine.substring(0, patch.span.start.character),
patch.content, patch.content,
endLine.substring(patch.span.end.character) endLine.substring(patch.span.end.character)
].join(''); ].join('');
for (let i = startLineNumber + 1; i <= endLineNumber; i++) { for (var i = startLineNumber + 1; i <= endLineNumber; i++) {
this.lines[i] = ''; this.lines[i] = '';
} }
} };
toString() { TextModel.prototype.toString = function () {
return lazy(this.lines).zip(this.lineEndings) return lazy(this.lines).zip(this.lineEndings)
.flatten().toArray().join(''); .flatten().toArray().join('');
} };
} return TextModel;
}());
nls_1.TextModel = TextModel; nls_1.TextModel = TextModel;
function patchJavascript(patches, contents, moduleId) { function patchJavascript(patches, contents, moduleId) {
const model = new nls.TextModel(contents); var model = new nls.TextModel(contents);
// patch the localize calls // patch the localize calls
lazy(patches).reverse().each(p => model.apply(p)); lazy(patches).reverse().each(function (p) { return model.apply(p); });
// patch the 'vs/nls' imports // patch the 'vs/nls' imports
const firstLine = model.get(0); var firstLine = model.get(0);
const patchedFirstLine = firstLine.replace(/(['"])vs\/nls\1/g, `$1vs/nls!${moduleId}$1`); var patchedFirstLine = firstLine.replace(/(['"])vs\/nls\1/g, "$1vs/nls!" + moduleId + "$1");
model.set(0, patchedFirstLine); model.set(0, patchedFirstLine);
return model.toString(); return model.toString();
} }
nls_1.patchJavascript = patchJavascript; nls_1.patchJavascript = patchJavascript;
function patchSourcemap(patches, rsm, smc) { function patchSourcemap(patches, rsm, smc) {
const smg = new sm.SourceMapGenerator({ var smg = new sm.SourceMapGenerator({
file: rsm.file, file: rsm.file,
sourceRoot: rsm.sourceRoot sourceRoot: rsm.sourceRoot
}); });
patches = patches.reverse(); patches = patches.reverse();
let currentLine = -1; var currentLine = -1;
let currentLineDiff = 0; var currentLineDiff = 0;
let source = null; var source = null;
smc.eachMapping(m => { smc.eachMapping(function (m) {
const patch = patches[patches.length - 1]; var patch = patches[patches.length - 1];
const original = { line: m.originalLine, column: m.originalColumn }; var original = { line: m.originalLine, column: m.originalColumn };
const generated = { line: m.generatedLine, column: m.generatedColumn }; var generated = { line: m.generatedLine, column: m.generatedColumn };
if (currentLine !== generated.line) { if (currentLine !== generated.line) {
currentLineDiff = 0; currentLineDiff = 0;
} }
currentLine = generated.line; currentLine = generated.line;
generated.column += currentLineDiff; generated.column += currentLineDiff;
if (patch && m.generatedLine - 1 === patch.span.end.line && m.generatedColumn === patch.span.end.character) { if (patch && m.generatedLine - 1 === patch.span.end.line && m.generatedColumn === patch.span.end.character) {
const originalLength = patch.span.end.character - patch.span.start.character; var originalLength = patch.span.end.character - patch.span.start.character;
const modifiedLength = patch.content.length; var modifiedLength = patch.content.length;
const lengthDiff = modifiedLength - originalLength; var lengthDiff = modifiedLength - originalLength;
currentLineDiff += lengthDiff; currentLineDiff += lengthDiff;
generated.column += lengthDiff; generated.column += lengthDiff;
patches.pop(); patches.pop();
} }
source = rsm.sourceRoot ? path.relative(rsm.sourceRoot, m.source) : m.source; source = rsm.sourceRoot ? path.relative(rsm.sourceRoot, m.source) : m.source;
source = source.replace(/\\/g, '/'); source = source.replace(/\\/g, '/');
smg.addMapping({ source, name: m.name, original, generated }); smg.addMapping({ source: source, name: m.name, original: original, generated: generated });
}, null, sm.SourceMapConsumer.GENERATED_ORDER); }, null, sm.SourceMapConsumer.GENERATED_ORDER);
if (source) { if (source) {
smg.setSourceContent(source, smc.sourceContentFor(source)); smg.setSourceContent(source, smc.sourceContentFor(source));
@@ -299,47 +307,47 @@ function isImportNode(node) {
} }
nls_1.patchSourcemap = patchSourcemap; nls_1.patchSourcemap = patchSourcemap;
function patch(moduleId, typescript, javascript, sourcemap) { function patch(moduleId, typescript, javascript, sourcemap) {
const { localizeCalls, nlsExpressions } = analyze(typescript); var _a = analyze(typescript), localizeCalls = _a.localizeCalls, nlsExpressions = _a.nlsExpressions;
if (localizeCalls.length === 0) { if (localizeCalls.length === 0) {
return { javascript, sourcemap }; return { javascript: javascript, sourcemap: sourcemap };
} }
const nlsKeys = template(localizeCalls.map(lc => lc.key)); var nlsKeys = template(localizeCalls.map(function (lc) { return lc.key; }));
const nls = template(localizeCalls.map(lc => lc.value)); var nls = template(localizeCalls.map(function (lc) { return lc.value; }));
const smc = new sm.SourceMapConsumer(sourcemap); var smc = new sm.SourceMapConsumer(sourcemap);
const positionFrom = mappedPositionFrom.bind(null, sourcemap.sources[0]); var positionFrom = mappedPositionFrom.bind(null, sourcemap.sources[0]);
let i = 0; var i = 0;
// build patches // build patches
const patches = lazy(localizeCalls) var patches = lazy(localizeCalls)
.map(lc => ([ .map(function (lc) { return ([
{ range: lc.keySpan, content: '' + (i++) }, { range: lc.keySpan, content: '' + (i++) },
{ range: lc.valueSpan, content: 'null' } { range: lc.valueSpan, content: 'null' }
])) ]); })
.flatten() .flatten()
.map(c => { .map(function (c) {
const start = lcFrom(smc.generatedPositionFor(positionFrom(c.range.start))); var start = lcFrom(smc.generatedPositionFor(positionFrom(c.range.start)));
const end = lcFrom(smc.generatedPositionFor(positionFrom(c.range.end))); var end = lcFrom(smc.generatedPositionFor(positionFrom(c.range.end)));
return { span: { start, end }, content: c.content }; return { span: { start: start, end: end }, content: c.content };
}) })
.toArray(); .toArray();
javascript = patchJavascript(patches, javascript, moduleId); javascript = patchJavascript(patches, javascript, moduleId);
// since imports are not within the sourcemap information, // since imports are not within the sourcemap information,
// we must do this MacGyver style // we must do this MacGyver style
if (nlsExpressions.length) { if (nlsExpressions.length) {
javascript = javascript.replace(/^define\(.*$/m, line => { javascript = javascript.replace(/^define\(.*$/m, function (line) {
return line.replace(/(['"])vs\/nls\1/g, `$1vs/nls!${moduleId}$1`); return line.replace(/(['"])vs\/nls\1/g, "$1vs/nls!" + moduleId + "$1");
}); });
} }
sourcemap = patchSourcemap(patches, sourcemap, smc); sourcemap = patchSourcemap(patches, sourcemap, smc);
return { javascript, sourcemap, nlsKeys, nls }; return { javascript: javascript, sourcemap: sourcemap, nlsKeys: nlsKeys, nls: nls };
} }
nls_1.patch = patch; nls_1.patch = patch;
function patchFiles(javascriptFile, typescript) { function patchFiles(javascriptFile, typescript) {
// hack? // hack?
const moduleId = javascriptFile.relative var moduleId = javascriptFile.relative
.replace(/\.js$/, '') .replace(/\.js$/, '')
.replace(/\\/g, '/'); .replace(/\\/g, '/');
const { javascript, sourcemap, nlsKeys, nls } = patch(moduleId, typescript, javascriptFile.contents.toString(), javascriptFile.sourceMap); var _a = patch(moduleId, typescript, javascriptFile.contents.toString(), javascriptFile.sourceMap), javascript = _a.javascript, sourcemap = _a.sourcemap, nlsKeys = _a.nlsKeys, nls = _a.nls;
const result = [fileFrom(javascriptFile, javascript)]; var result = [fileFrom(javascriptFile, javascript)];
result[0].sourceMap = sourcemap; result[0].sourceMap = sourcemap;
if (nlsKeys) { if (nlsKeys) {
result.push(fileFrom(javascriptFile, nlsKeys, javascriptFile.path.replace(/\.js$/, '.nls.keys.js'))); result.push(fileFrom(javascriptFile, nlsKeys, javascriptFile.path.replace(/\.js$/, '.nls.keys.js')));

View File

@@ -6,9 +6,10 @@
import * as ts from 'typescript'; import * as ts from 'typescript';
import * as lazy from 'lazy.js'; import * as lazy from 'lazy.js';
import { duplex, through } from 'event-stream'; import { duplex, through } from 'event-stream';
import * as File from 'vinyl'; import File = require('vinyl');
import * as sm from 'source-map'; import * as sm from 'source-map';
import * as path from 'path'; import assign = require('object-assign');
import path = require('path');
declare class FileSourceMap extends File { declare class FileSourceMap extends File {
public sourceMap: sm.RawSourceMap; public sourceMap: sm.RawSourceMap;
@@ -25,7 +26,7 @@ function collect(node: ts.Node, fn: (node: ts.Node) => CollectStepResult): ts.No
const result: ts.Node[] = []; const result: ts.Node[] = [];
function loop(node: ts.Node) { function loop(node: ts.Node) {
const stepResult = fn(node); var stepResult = fn(node);
if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) { if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) {
result.push(node); result.push(node);
@@ -41,8 +42,8 @@ function collect(node: ts.Node, fn: (node: ts.Node) => CollectStepResult): ts.No
} }
function clone<T>(object: T): T { function clone<T>(object: T): T {
const result = <T>{}; var result = <T>{};
for (const id in object) { for (var id in object) {
result[id] = object[id]; result[id] = object[id];
} }
return result; return result;
@@ -66,8 +67,8 @@ define([], [${ wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
* Returns a stream containing the patched JavaScript and source maps. * Returns a stream containing the patched JavaScript and source maps.
*/ */
function nls(): NodeJS.ReadWriteStream { function nls(): NodeJS.ReadWriteStream {
const input = through(); var input = through();
const output = input.pipe(through(function (f: FileSourceMap) { var output = input.pipe(through(function (f: FileSourceMap) {
if (!f.sourceMap) { if (!f.sourceMap) {
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`)); return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`));
} }
@@ -82,7 +83,7 @@ function nls(): NodeJS.ReadWriteStream {
source = path.join(root, source); source = path.join(root, source);
} }
const typescript = f.sourceMap.sourcesContent![0]; const typescript = f.sourceMap.sourcesContent[0];
if (!typescript) { if (!typescript) {
return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`)); return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`));
} }
@@ -173,7 +174,7 @@ module nls {
export function analyze(contents: string, options: ts.CompilerOptions = {}): ILocalizeAnalysisResult { export function analyze(contents: string, options: ts.CompilerOptions = {}): ILocalizeAnalysisResult {
const filename = 'file.ts'; const filename = 'file.ts';
const serviceHost = new SingleFileServiceHost(Object.assign(clone(options), { noResolve: true }), filename, contents); const serviceHost = new SingleFileServiceHost(assign(clone(options), { noResolve: true }), filename, contents);
const service = ts.createLanguageService(serviceHost); const service = ts.createLanguageService(serviceHost);
const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true); const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
@@ -205,8 +206,8 @@ module nls {
// `nls.localize(...)` calls // `nls.localize(...)` calls
const nlsLocalizeCallExpressions = importDeclarations const nlsLocalizeCallExpressions = importDeclarations
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport)) .filter(d => d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport)
.map(d => (<ts.NamespaceImport>d.importClause!.namedBindings).name) .map(d => (<ts.NamespaceImport>d.importClause.namedBindings).name)
.concat(importEqualsDeclarations.map(d => d.name)) .concat(importEqualsDeclarations.map(d => d.name))
// find read-only references to `nls` // find read-only references to `nls`
@@ -225,8 +226,8 @@ module nls {
// `localize` named imports // `localize` named imports
const allLocalizeImportDeclarations = importDeclarations const allLocalizeImportDeclarations = importDeclarations
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports)) .filter(d => d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports)
.map(d => ([] as any[]).concat((<ts.NamedImports>d.importClause!.namedBindings!).elements)) .map(d => [].concat((<ts.NamedImports>d.importClause.namedBindings).elements))
.flatten(); .flatten();
// `localize` read-only references // `localize` read-only references
@@ -278,7 +279,7 @@ module nls {
constructor(contents: string) { constructor(contents: string) {
const regex = /\r\n|\r|\n/g; const regex = /\r\n|\r|\n/g;
let index = 0; let index = 0;
let match: RegExpExecArray | null; let match: RegExpExecArray;
this.lines = []; this.lines = [];
this.lineEndings = []; this.lineEndings = [];
@@ -359,7 +360,7 @@ module nls {
patches = patches.reverse(); patches = patches.reverse();
let currentLine = -1; let currentLine = -1;
let currentLineDiff = 0; let currentLineDiff = 0;
let source: string | null = null; let source = null;
smc.eachMapping(m => { smc.eachMapping(m => {
const patch = patches[patches.length - 1]; const patch = patches[patches.length - 1];

View File

@@ -4,31 +4,29 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
'use strict'; 'use strict';
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const es = require("event-stream"); var path = require("path");
const gulp = require("gulp"); var gulp = require("gulp");
const concat = require("gulp-concat"); var sourcemaps = require("gulp-sourcemaps");
const minifyCSS = require("gulp-cssnano"); var filter = require("gulp-filter");
const filter = require("gulp-filter"); var minifyCSS = require("gulp-cssnano");
const flatmap = require("gulp-flatmap"); var uglify = require("gulp-uglify");
const sourcemaps = require("gulp-sourcemaps"); var composer = require("gulp-uglify/composer");
const uglify = require("gulp-uglify"); var uglifyes = require("uglify-es");
const composer = require("gulp-uglify/composer"); var es = require("event-stream");
const gulpUtil = require("gulp-util"); var concat = require("gulp-concat");
const path = require("path"); var VinylFile = require("vinyl");
const pump = require("pump"); var bundle = require("./bundle");
const uglifyes = require("uglify-es"); var util = require("./util");
const VinylFile = require("vinyl"); var gulpUtil = require("gulp-util");
const bundle = require("./bundle"); var flatmap = require("gulp-flatmap");
const i18n_1 = require("./i18n"); var pump = require("pump");
const stats_1 = require("./stats"); var REPO_ROOT_PATH = path.join(__dirname, '../..');
const util = require("./util");
const REPO_ROOT_PATH = path.join(__dirname, '../..');
function log(prefix, message) { function log(prefix, message) {
gulpUtil.log(gulpUtil.colors.cyan('[' + prefix + ']'), message); gulpUtil.log(gulpUtil.colors.cyan('[' + prefix + ']'), message);
} }
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
function loaderConfig(emptyPaths) { function loaderConfig(emptyPaths) {
const result = { var result = {
paths: { paths: {
'vs': 'out-build/vs', 'vs': 'out-build/vs',
'sql': 'out-build/sql', 'sql': 'out-build/sql',
@@ -40,26 +38,26 @@ function loaderConfig(emptyPaths) {
return result; return result;
} }
exports.loaderConfig = loaderConfig; exports.loaderConfig = loaderConfig;
const IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i; var IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i;
function loader(src, bundledFileHeader, bundleLoader) { function loader(src, bundledFileHeader, bundleLoader) {
let sources = [ var sources = [
`${src}/vs/loader.js` src + "/vs/loader.js"
]; ];
if (bundleLoader) { if (bundleLoader) {
sources = sources.concat([ sources = sources.concat([
`${src}/vs/css.js`, src + "/vs/css.js",
`${src}/vs/nls.js` src + "/vs/nls.js"
]); ]);
} }
let isFirst = true; var isFirst = true;
return (gulp return (gulp
.src(sources, { base: `${src}` }) .src(sources, { base: "" + src })
.pipe(es.through(function (data) { .pipe(es.through(function (data) {
if (isFirst) { if (isFirst) {
isFirst = false; isFirst = false;
this.emit('data', new VinylFile({ this.emit('data', new VinylFile({
path: 'fake', path: 'fake',
base: undefined, base: '',
contents: Buffer.from(bundledFileHeader) contents: Buffer.from(bundledFileHeader)
})); }));
this.emit('data', data); this.emit('data', data);
@@ -76,12 +74,12 @@ function loader(src, bundledFileHeader, bundleLoader) {
}))); })));
} }
function toConcatStream(src, bundledFileHeader, sources, dest) { function toConcatStream(src, bundledFileHeader, sources, dest) {
const useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest); var useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
// If a bundle ends up including in any of the sources our copyright, then // If a bundle ends up including in any of the sources our copyright, then
// insert a fake source at the beginning of each bundle with our copyright // insert a fake source at the beginning of each bundle with our copyright
let containsOurCopyright = false; var containsOurCopyright = false;
for (let i = 0, len = sources.length; i < len; i++) { for (var i = 0, len = sources.length; i < len; i++) {
const fileContents = sources[i].contents; var fileContents = sources[i].contents;
if (IS_OUR_COPYRIGHT_REGEXP.test(fileContents)) { if (IS_OUR_COPYRIGHT_REGEXP.test(fileContents)) {
containsOurCopyright = true; containsOurCopyright = true;
break; break;
@@ -93,9 +91,9 @@ function toConcatStream(src, bundledFileHeader, sources, dest) {
contents: bundledFileHeader contents: bundledFileHeader
}); });
} }
const treatedSources = sources.map(function (source) { var treatedSources = sources.map(function (source) {
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : ''; var root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
const base = source.path ? root + `/${src}` : undefined; var base = source.path ? root + ("/" + src) : '';
return new VinylFile({ return new VinylFile({
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake', path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
base: base, base: base,
@@ -104,8 +102,7 @@ function toConcatStream(src, bundledFileHeader, sources, dest) {
}); });
return es.readArray(treatedSources) return es.readArray(treatedSources)
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through()) .pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
.pipe(concat(dest)) .pipe(concat(dest));
.pipe(stats_1.createStatsStream(dest));
} }
function toBundleStream(src, bundledFileHeader, bundles) { function toBundleStream(src, bundledFileHeader, bundles) {
return es.merge(bundles.map(function (bundle) { return es.merge(bundles.map(function (bundle) {
@@ -113,33 +110,33 @@ function toBundleStream(src, bundledFileHeader, bundles) {
})); }));
} }
function optimizeTask(opts) { function optimizeTask(opts) {
const src = opts.src; var src = opts.src;
const entryPoints = opts.entryPoints; var entryPoints = opts.entryPoints;
const otherSources = opts.otherSources; var otherSources = opts.otherSources;
const resources = opts.resources; var resources = opts.resources;
const loaderConfig = opts.loaderConfig; var loaderConfig = opts.loaderConfig;
const bundledFileHeader = opts.header; var bundledFileHeader = opts.header;
const bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader); var bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader);
const out = opts.out; var out = opts.out;
return function () { return function () {
const bundlesStream = es.through(); // this stream will contain the bundled files var bundlesStream = es.through(); // this stream will contain the bundled files
const resourcesStream = es.through(); // this stream will contain the resources var resourcesStream = es.through(); // this stream will contain the resources
const bundleInfoStream = es.through(); // this stream will contain bundleInfo.json var bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
bundle.bundle(entryPoints, loaderConfig, function (err, result) { bundle.bundle(entryPoints, loaderConfig, function (err, result) {
if (err || !result) { if (err) {
return bundlesStream.emit('error', JSON.stringify(err)); return bundlesStream.emit('error', JSON.stringify(err));
} }
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream); toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
// Remove css inlined resources // Remove css inlined resources
const filteredResources = resources.slice(); var filteredResources = resources.slice();
result.cssInlinedResources.forEach(function (resource) { result.cssInlinedResources.forEach(function (resource) {
if (process.env['VSCODE_BUILD_VERBOSE']) { if (process.env['VSCODE_BUILD_VERBOSE']) {
log('optimizer', 'excluding inlined: ' + resource); log('optimizer', 'excluding inlined: ' + resource);
} }
filteredResources.push('!' + resource); filteredResources.push('!' + resource);
}); });
gulp.src(filteredResources, { base: `${src}` }).pipe(resourcesStream); gulp.src(filteredResources, { base: "" + src }).pipe(resourcesStream);
const bundleInfoArray = []; var bundleInfoArray = [];
if (opts.bundleInfo) { if (opts.bundleInfo) {
bundleInfoArray.push(new VinylFile({ bundleInfoArray.push(new VinylFile({
path: 'bundleInfo.json', path: 'bundleInfo.json',
@@ -149,9 +146,9 @@ function optimizeTask(opts) {
} }
es.readArray(bundleInfoArray).pipe(bundleInfoStream); es.readArray(bundleInfoArray).pipe(bundleInfoStream);
}); });
const otherSourcesStream = es.through(); var otherSourcesStream = es.through();
const otherSourcesStreamArr = []; var otherSourcesStreamArr = [];
gulp.src(otherSources, { base: `${src}` }) gulp.src(otherSources, { base: "" + src })
.pipe(es.through(function (data) { .pipe(es.through(function (data) {
otherSourcesStreamArr.push(toConcatStream(src, bundledFileHeader, [data], data.relative)); otherSourcesStreamArr.push(toConcatStream(src, bundledFileHeader, [data], data.relative));
}, function () { }, function () {
@@ -162,17 +159,13 @@ function optimizeTask(opts) {
es.merge(otherSourcesStreamArr).pipe(otherSourcesStream); es.merge(otherSourcesStreamArr).pipe(otherSourcesStream);
} }
})); }));
const result = es.merge(loader(src, bundledFileHeader, bundleLoader), bundlesStream, otherSourcesStream, resourcesStream, bundleInfoStream); var result = es.merge(loader(src, bundledFileHeader, bundleLoader), bundlesStream, otherSourcesStream, resourcesStream, bundleInfoStream);
return result return result
.pipe(sourcemaps.write('./', { .pipe(sourcemaps.write('./', {
sourceRoot: undefined, sourceRoot: null,
addComment: true, addComment: true,
includeContent: true includeContent: true
})) }))
.pipe(opts.languages && opts.languages.length ? i18n_1.processNlsFiles({
fileHeader: bundledFileHeader,
languages: opts.languages
}) : es.through())
.pipe(gulp.dest(out)); .pipe(gulp.dest(out));
}; };
} }
@@ -182,14 +175,14 @@ exports.optimizeTask = optimizeTask;
* to have a file "context" to include our copyright only once per file. * to have a file "context" to include our copyright only once per file.
*/ */
function uglifyWithCopyrights() { function uglifyWithCopyrights() {
const preserveComments = (f) => { var preserveComments = function (f) {
return (_node, comment) => { return function (node, comment) {
const text = comment.value; var text = comment.value;
const type = comment.type; var type = comment.type;
if (/@minifier_do_not_preserve/.test(text)) { if (/@minifier_do_not_preserve/.test(text)) {
return false; return false;
} }
const isOurCopyright = IS_OUR_COPYRIGHT_REGEXP.test(text); var isOurCopyright = IS_OUR_COPYRIGHT_REGEXP.test(text);
if (isOurCopyright) { if (isOurCopyright) {
if (f.__hasOurCopyright) { if (f.__hasOurCopyright) {
return false; return false;
@@ -207,10 +200,10 @@ function uglifyWithCopyrights() {
return false; return false;
}; };
}; };
const minify = composer(uglifyes); var minify = composer(uglifyes);
const input = es.through(); var input = es.through();
const output = input var output = input
.pipe(flatmap((stream, f) => { .pipe(flatmap(function (stream, f) {
return stream.pipe(minify({ return stream.pipe(minify({
output: { output: {
comments: preserveComments(f), comments: preserveComments(f),
@@ -221,18 +214,18 @@ function uglifyWithCopyrights() {
return es.duplex(input, output); return es.duplex(input, output);
} }
function minifyTask(src, sourceMapBaseUrl) { function minifyTask(src, sourceMapBaseUrl) {
const sourceMappingURL = sourceMapBaseUrl ? ((f) => `${sourceMapBaseUrl}/${f.relative}.map`) : undefined; var sourceMappingURL = sourceMapBaseUrl && (function (f) { return sourceMapBaseUrl + "/" + f.relative + ".map"; });
return cb => { return function (cb) {
const jsFilter = filter('**/*.js', { restore: true }); var jsFilter = filter('**/*.js', { restore: true });
const cssFilter = filter('**/*.css', { restore: true }); var cssFilter = filter('**/*.css', { restore: true });
pump(gulp.src([src + '/**', '!' + src + '/**/*.map']), jsFilter, sourcemaps.init({ loadMaps: true }), uglifyWithCopyrights(), jsFilter.restore, cssFilter, minifyCSS({ reduceIdents: false }), cssFilter.restore, sourcemaps.write('./', { pump(gulp.src([src + '/**', '!' + src + '/**/*.map']), jsFilter, sourcemaps.init({ loadMaps: true }), uglifyWithCopyrights(), jsFilter.restore, cssFilter, minifyCSS({ reduceIdents: false }), cssFilter.restore, sourcemaps.write('./', {
sourceMappingURL, sourceMappingURL: sourceMappingURL,
sourceRoot: undefined, sourceRoot: null,
includeContent: true, includeContent: true,
addComment: true addComment: true
}), gulp.dest(src + '-min'), (err) => { }), gulp.dest(src + '-min'), function (err) {
if (err instanceof uglify.GulpUglifyError) { if (err instanceof uglify.GulpUglifyError) {
console.error(`Uglify error in '${err.cause && err.cause.filename}'`); console.error("Uglify error in '" + (err.cause && err.cause.filename) + "'");
} }
cb(err); cb(err);
}); });

View File

@@ -5,25 +5,24 @@
'use strict'; 'use strict';
import * as es from 'event-stream'; import * as path from 'path';
import * as gulp from 'gulp'; import * as gulp from 'gulp';
import * as concat from 'gulp-concat';
import * as minifyCSS from 'gulp-cssnano';
import * as filter from 'gulp-filter';
import * as flatmap from 'gulp-flatmap';
import * as sourcemaps from 'gulp-sourcemaps'; import * as sourcemaps from 'gulp-sourcemaps';
import * as filter from 'gulp-filter';
import * as minifyCSS from 'gulp-cssnano';
import * as uglify from 'gulp-uglify'; import * as uglify from 'gulp-uglify';
import * as composer from 'gulp-uglify/composer'; import * as composer from 'gulp-uglify/composer';
import * as gulpUtil from 'gulp-util';
import * as path from 'path';
import * as pump from 'pump';
import * as sm from 'source-map';
import * as uglifyes from 'uglify-es'; import * as uglifyes from 'uglify-es';
import * as es from 'event-stream';
import * as concat from 'gulp-concat';
import * as VinylFile from 'vinyl'; import * as VinylFile from 'vinyl';
import * as bundle from './bundle'; import * as bundle from './bundle';
import { Language, processNlsFiles } from './i18n';
import { createStatsStream } from './stats';
import * as util from './util'; import * as util from './util';
import * as gulpUtil from 'gulp-util';
import * as flatmap from 'gulp-flatmap';
import * as pump from 'pump';
import * as sm from 'source-map';
import { Language } from './i18n';
const REPO_ROOT_PATH = path.join(__dirname, '../..'); const REPO_ROOT_PATH = path.join(__dirname, '../..');
@@ -33,7 +32,7 @@ function log(prefix: string, message: string): void {
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
export function loaderConfig(emptyPaths?: string[]) { export function loaderConfig(emptyPaths?: string[]) {
const result: any = { const result = {
paths: { paths: {
'vs': 'out-build/vs', 'vs': 'out-build/vs',
'sql': 'out-build/sql', 'sql': 'out-build/sql',
@@ -73,7 +72,7 @@ function loader(src: string, bundledFileHeader: string, bundleLoader: boolean):
isFirst = false; isFirst = false;
this.emit('data', new VinylFile({ this.emit('data', new VinylFile({
path: 'fake', path: 'fake',
base: undefined, base: '',
contents: Buffer.from(bundledFileHeader) contents: Buffer.from(bundledFileHeader)
})); }));
this.emit('data', data); this.emit('data', data);
@@ -113,7 +112,7 @@ function toConcatStream(src: string, bundledFileHeader: string, sources: bundle.
const treatedSources = sources.map(function (source) { const treatedSources = sources.map(function (source) {
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : ''; const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
const base = source.path ? root + `/${src}` : undefined; const base = source.path ? root + `/${src}` : '';
return new VinylFile({ return new VinylFile({
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake', path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
@@ -124,11 +123,10 @@ function toConcatStream(src: string, bundledFileHeader: string, sources: bundle.
return es.readArray(treatedSources) return es.readArray(treatedSources)
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through()) .pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
.pipe(concat(dest)) .pipe(concat(dest));
.pipe(createStatsStream(dest));
} }
function toBundleStream(src: string, bundledFileHeader: string, bundles: bundle.IConcatFile[]): NodeJS.ReadWriteStream { function toBundleStream(src:string, bundledFileHeader: string, bundles: bundle.IConcatFile[]): NodeJS.ReadWriteStream {
return es.merge(bundles.map(function (bundle) { return es.merge(bundles.map(function (bundle) {
return toConcatStream(src, bundledFileHeader, bundle.sources, bundle.dest); return toConcatStream(src, bundledFileHeader, bundle.sources, bundle.dest);
})); }));
@@ -190,7 +188,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
const bundleInfoStream = es.through(); // this stream will contain bundleInfo.json const bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
bundle.bundle(entryPoints, loaderConfig, function (err, result) { bundle.bundle(entryPoints, loaderConfig, function (err, result) {
if (err || !result) { return bundlesStream.emit('error', JSON.stringify(err)); } if (err) { return bundlesStream.emit('error', JSON.stringify(err)); }
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream); toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
@@ -239,14 +237,10 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
return result return result
.pipe(sourcemaps.write('./', { .pipe(sourcemaps.write('./', {
sourceRoot: undefined, sourceRoot: null,
addComment: true, addComment: true,
includeContent: true includeContent: true
})) }))
.pipe(opts.languages && opts.languages.length ? processNlsFiles({
fileHeader: bundledFileHeader,
languages: opts.languages
}) : es.through())
.pipe(gulp.dest(out)); .pipe(gulp.dest(out));
}; };
} }
@@ -260,7 +254,7 @@ declare class FileWithCopyright extends VinylFile {
*/ */
function uglifyWithCopyrights(): NodeJS.ReadWriteStream { function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
const preserveComments = (f: FileWithCopyright) => { const preserveComments = (f: FileWithCopyright) => {
return (_node: any, comment: { value: string; type: string; }) => { return (node, comment: { value: string; type: string; }) => {
const text = comment.value; const text = comment.value;
const type = comment.type; const type = comment.type;
@@ -288,7 +282,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
}; };
}; };
const minify = (composer as any)(uglifyes); const minify = composer(uglifyes);
const input = es.through(); const input = es.through();
const output = input const output = input
.pipe(flatmap((stream, f) => { .pipe(flatmap((stream, f) => {
@@ -304,7 +298,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
} }
export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) => void { export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) => void {
const sourceMappingURL = sourceMapBaseUrl ? ((f: any) => `${sourceMapBaseUrl}/${f.relative}.map`) : undefined; const sourceMappingURL = sourceMapBaseUrl && (f => `${sourceMapBaseUrl}/${f.relative}.map`);
return cb => { return cb => {
const jsFilter = filter('**/*.js', { restore: true }); const jsFilter = filter('**/*.js', { restore: true });
@@ -321,13 +315,13 @@ export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) =>
cssFilter.restore, cssFilter.restore,
sourcemaps.write('./', { sourcemaps.write('./', {
sourceMappingURL, sourceMappingURL,
sourceRoot: undefined, sourceRoot: null,
includeContent: true, includeContent: true,
addComment: true addComment: true
} as any), }),
gulp.dest(src + '-min') gulp.dest(src + '-min')
, (err: any) => { , (err: any) => {
if (err instanceof (uglify as any).GulpUglifyError) { if (err instanceof uglify.GulpUglifyError) {
console.error(`Uglify error in '${err.cause && err.cause.filename}'`); console.error(`Uglify error in '${err.cause && err.cause.filename}'`);
} }

View File

@@ -4,20 +4,20 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
'use strict'; 'use strict';
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const es = require("event-stream"); var es = require("event-stream");
const _ = require("underscore"); var _ = require("underscore");
const util = require("gulp-util"); var util = require("gulp-util");
const fs = require("fs"); var fs = require("fs");
const path = require("path"); var path = require("path");
const allErrors = []; var allErrors = [];
let startTime = null; var startTime = null;
let count = 0; var count = 0;
function onStart() { function onStart() {
if (count++ > 0) { if (count++ > 0) {
return; return;
} }
startTime = new Date().getTime(); startTime = new Date().getTime();
util.log(`Starting ${util.colors.green('compilation')}...`); util.log("Starting " + util.colors.green('compilation') + "...");
} }
function onEnd() { function onEnd() {
if (--count > 0) { if (--count > 0) {
@@ -25,7 +25,7 @@ function onEnd() {
} }
log(); log();
} }
const buildLogPath = path.join(path.dirname(path.dirname(__dirname)), '.build', 'log'); var buildLogPath = path.join(path.dirname(path.dirname(__dirname)), '.build', 'log');
try { try {
fs.mkdirSync(path.dirname(buildLogPath)); fs.mkdirSync(path.dirname(buildLogPath));
} }
@@ -33,52 +33,61 @@ catch (err) {
// ignore // ignore
} }
function log() { function log() {
const errors = _.flatten(allErrors); var errors = _.flatten(allErrors);
const seen = new Set(); var seen = new Set();
errors.map(err => { errors.map(function (err) {
if (!seen.has(err)) { if (!seen.has(err)) {
seen.add(err); seen.add(err);
util.log(`${util.colors.red('Error')}: ${err}`); util.log(util.colors.red('Error') + ": " + err);
} }
}); });
const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/; var regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
const messages = errors var messages = errors
.map(err => regex.exec(err)) .map(function (err) { return regex.exec(err); })
.filter(match => !!match) .filter(function (match) { return !!match; })
.map(x => x) .map(function (_a) {
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message })); var path = _a[1], line = _a[2], column = _a[3], message = _a[4];
return ({ path: path, line: parseInt(line), column: parseInt(column), message: message });
});
try { try {
fs.writeFileSync(buildLogPath, JSON.stringify(messages)); fs.writeFileSync(buildLogPath, JSON.stringify(messages));
} }
catch (err) { catch (err) {
//noop //noop
} }
util.log(`Finished ${util.colors.green('compilation')} with ${errors.length} errors after ${util.colors.magenta((new Date().getTime() - startTime) + ' ms')}`); util.log("Finished " + util.colors.green('compilation') + " with " + errors.length + " errors after " + util.colors.magenta((new Date().getTime() - startTime) + ' ms'));
} }
function createReporter() { function createReporter() {
const errors = []; var errors = [];
allErrors.push(errors); allErrors.push(errors);
const result = (err) => errors.push(err); var ReportFunc = /** @class */ (function () {
result.hasErrors = () => errors.length > 0; function ReportFunc(err) {
result.end = (emitError) => { errors.push(err);
errors.length = 0; }
onStart(); ReportFunc.hasErrors = function () {
return es.through(undefined, function () { return errors.length > 0;
onEnd(); };
if (emitError && errors.length > 0) { ReportFunc.end = function (emitError) {
if (!errors.__logged__) { errors.length = 0;
log(); onStart();
return es.through(null, function () {
onEnd();
if (emitError && errors.length > 0) {
errors.__logged__ = true;
if (!errors.__logged__) {
log();
}
var err = new Error("Found " + errors.length + " errors");
err.__reporter__ = true;
this.emit('error', err);
} }
errors.__logged__ = true; else {
const err = new Error(`Found ${errors.length} errors`); this.emit('end');
err.__reporter__ = true; }
this.emit('error', err); });
} };
else { return ReportFunc;
this.emit('end'); }());
} return ReportFunc;
});
};
return result;
} }
exports.createReporter = createReporter; exports.createReporter = createReporter;

View File

@@ -12,7 +12,7 @@ import * as fs from 'fs';
import * as path from 'path'; import * as path from 'path';
const allErrors: string[][] = []; const allErrors: string[][] = [];
let startTime: number | null = null; let startTime: number = null;
let count = 0; let count = 0;
function onStart(): void { function onStart(): void {
@@ -55,7 +55,6 @@ function log(): void {
const messages = errors const messages = errors
.map(err => regex.exec(err)) .map(err => regex.exec(err))
.filter(match => !!match) .filter(match => !!match)
.map(x => x as string[])
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message })); .map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message }));
try { try {
@@ -65,7 +64,7 @@ function log(): void {
//noop //noop
} }
util.log(`Finished ${util.colors.green('compilation')} with ${errors.length} errors after ${util.colors.magenta((new Date().getTime() - startTime!) + ' ms')}`); util.log(`Finished ${util.colors.green('compilation')} with ${errors.length} errors after ${util.colors.magenta((new Date().getTime() - startTime) + ' ms')}`);
} }
export interface IReporter { export interface IReporter {
@@ -78,32 +77,38 @@ export function createReporter(): IReporter {
const errors: string[] = []; const errors: string[] = [];
allErrors.push(errors); allErrors.push(errors);
const result = (err: string) => errors.push(err); class ReportFunc {
constructor(err: string) {
errors.push(err);
}
result.hasErrors = () => errors.length > 0; static hasErrors(): boolean {
return errors.length > 0;
}
result.end = (emitError: boolean): NodeJS.ReadWriteStream => { static end(emitError: boolean): NodeJS.ReadWriteStream {
errors.length = 0; errors.length = 0;
onStart(); onStart();
return es.through(undefined, function () { return es.through(null, function () {
onEnd(); onEnd();
if (emitError && errors.length > 0) { if (emitError && errors.length > 0) {
if (!(errors as any).__logged__) { (errors as any).__logged__ = true;
log();
if (!(errors as any).__logged__) {
log();
}
const err = new Error(`Found ${errors.length} errors`);
(err as any).__reporter__ = true;
this.emit('error', err);
} else {
this.emit('end');
} }
});
}
}
(errors as any).__logged__ = true; return <IReporter><any>ReportFunc;
const err = new Error(`Found ${errors.length} errors`);
(err as any).__reporter__ = true;
this.emit('error', err);
} else {
this.emit('end');
}
});
};
return result;
} }

View File

@@ -5,51 +5,35 @@
'use strict'; 'use strict';
var snaps; var snaps;
(function (snaps) { (function (snaps) {
const fs = require('fs'); var fs = require('fs');
const path = require('path'); var path = require('path');
const os = require('os'); var os = require('os');
const cp = require('child_process'); var cp = require('child_process');
const mksnapshot = path.join(__dirname, `../../node_modules/.bin/${process.platform === 'win32' ? 'mksnapshot.cmd' : 'mksnapshot'}`); var mksnapshot = path.join(__dirname, "../../node_modules/.bin/" + (process.platform === 'win32' ? 'mksnapshot.cmd' : 'mksnapshot'));
const product = require('../../product.json'); var product = require('../../product.json');
const arch = (process.argv.join('').match(/--arch=(.*)/) || [])[1]; var arch = (process.argv.join('').match(/--arch=(.*)/) || [])[1];
// //
let loaderFilepath; var loaderFilepath;
let startupBlobFilepath; var startupBlobFilepath;
switch (process.platform) { switch (process.platform) {
case 'darwin': case 'darwin':
loaderFilepath = `VSCode-darwin/${product.nameLong}.app/Contents/Resources/app/out/vs/loader.js`; loaderFilepath = "VSCode-darwin/" + product.nameLong + ".app/Contents/Resources/app/out/vs/loader.js";
startupBlobFilepath = `VSCode-darwin/${product.nameLong}.app/Contents/Frameworks/Electron Framework.framework/Resources/snapshot_blob.bin`; startupBlobFilepath = "VSCode-darwin/" + product.nameLong + ".app/Contents/Frameworks/Electron Framework.framework/Resources/snapshot_blob.bin";
break; break;
case 'win32': case 'win32':
case 'linux': case 'linux':
loaderFilepath = `VSCode-${process.platform}-${arch}/resources/app/out/vs/loader.js`; loaderFilepath = "VSCode-" + process.platform + "-" + arch + "/resources/app/out/vs/loader.js";
startupBlobFilepath = `VSCode-${process.platform}-${arch}/snapshot_blob.bin`; startupBlobFilepath = "VSCode-" + process.platform + "-" + arch + "/snapshot_blob.bin";
break;
default:
throw new Error('Unknown platform');
} }
loaderFilepath = path.join(__dirname, '../../../', loaderFilepath); loaderFilepath = path.join(__dirname, '../../../', loaderFilepath);
startupBlobFilepath = path.join(__dirname, '../../../', startupBlobFilepath); startupBlobFilepath = path.join(__dirname, '../../../', startupBlobFilepath);
snapshotLoader(loaderFilepath, startupBlobFilepath); snapshotLoader(loaderFilepath, startupBlobFilepath);
function snapshotLoader(loaderFilepath, startupBlobFilepath) { function snapshotLoader(loaderFilepath, startupBlobFilepath) {
const inputFile = fs.readFileSync(loaderFilepath); var inputFile = fs.readFileSync(loaderFilepath);
const wrappedInputFile = ` var wrappedInputFile = "\n\t\tvar Monaco_Loader_Init;\n\t\t(function() {\n\t\t\tvar doNotInitLoader = true;\n\t\t\t" + inputFile.toString() + ";\n\t\t\tMonaco_Loader_Init = function() {\n\t\t\t\tAMDLoader.init();\n\t\t\t\tCSSLoaderPlugin.init();\n\t\t\t\tNLSLoaderPlugin.init();\n\n\t\t\t\treturn { define, require };\n\t\t\t}\n\t\t})();\n\t\t";
var Monaco_Loader_Init; var wrappedInputFilepath = path.join(os.tmpdir(), 'wrapped-loader.js');
(function() {
var doNotInitLoader = true;
${inputFile.toString()};
Monaco_Loader_Init = function() {
AMDLoader.init();
CSSLoaderPlugin.init();
NLSLoaderPlugin.init();
return { define, require };
}
})();
`;
const wrappedInputFilepath = path.join(os.tmpdir(), 'wrapped-loader.js');
console.log(wrappedInputFilepath); console.log(wrappedInputFilepath);
fs.writeFileSync(wrappedInputFilepath, wrappedInputFile); fs.writeFileSync(wrappedInputFilepath, wrappedInputFile);
cp.execFileSync(mksnapshot, [wrappedInputFilepath, `--startup_blob`, startupBlobFilepath]); cp.execFileSync(mksnapshot, [wrappedInputFilepath, "--startup_blob", startupBlobFilepath]);
} }
})(snaps || (snaps = {})); })(snaps || (snaps = {}));

View File

@@ -30,10 +30,6 @@ namespace snaps {
case 'linux': case 'linux':
loaderFilepath = `VSCode-${process.platform}-${arch}/resources/app/out/vs/loader.js`; loaderFilepath = `VSCode-${process.platform}-${arch}/resources/app/out/vs/loader.js`;
startupBlobFilepath = `VSCode-${process.platform}-${arch}/snapshot_blob.bin`; startupBlobFilepath = `VSCode-${process.platform}-${arch}/snapshot_blob.bin`;
break;
default:
throw new Error('Unknown platform');
} }
loaderFilepath = path.join(__dirname, '../../../', loaderFilepath); loaderFilepath = path.join(__dirname, '../../../', loaderFilepath);

View File

@@ -4,13 +4,14 @@
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const ts = require("typescript"); var ts = require("typescript");
const fs = require("fs"); var fs = require("fs");
const path = require("path"); var path = require("path");
const tss = require("./treeshaking"); var tss = require("./treeshaking");
const REPO_ROOT = path.join(__dirname, '../../'); var REPO_ROOT = path.join(__dirname, '../../');
const SRC_DIR = path.join(REPO_ROOT, 'src'); var SRC_DIR = path.join(REPO_ROOT, 'src');
let dirCache = {}; var OUT_EDITOR = path.join(REPO_ROOT, 'out-editor');
var dirCache = {};
function writeFile(filePath, contents) { function writeFile(filePath, contents) {
function ensureDirs(dirPath) { function ensureDirs(dirPath) {
if (dirCache[dirPath]) { if (dirCache[dirPath]) {
@@ -27,49 +28,32 @@ function writeFile(filePath, contents) {
fs.writeFileSync(filePath, contents); fs.writeFileSync(filePath, contents);
} }
function extractEditor(options) { function extractEditor(options) {
const tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.json')).toString()); var result = tss.shake(options);
let compilerOptions; for (var fileName in result) {
if (tsConfig.extends) {
compilerOptions = Object.assign({}, require(path.join(options.sourcesRoot, tsConfig.extends)).compilerOptions, tsConfig.compilerOptions);
}
else {
compilerOptions = tsConfig.compilerOptions;
}
tsConfig.compilerOptions = compilerOptions;
compilerOptions.noUnusedLocals = false;
compilerOptions.preserveConstEnums = false;
compilerOptions.declaration = false;
compilerOptions.moduleResolution = ts.ModuleResolutionKind.Classic;
delete compilerOptions.types;
delete tsConfig.extends;
tsConfig.exclude = [];
options.compilerOptions = compilerOptions;
let result = tss.shake(options);
for (let fileName in result) {
if (result.hasOwnProperty(fileName)) { if (result.hasOwnProperty(fileName)) {
writeFile(path.join(options.destRoot, fileName), result[fileName]); writeFile(path.join(options.destRoot, fileName), result[fileName]);
} }
} }
let copied = {}; var copied = {};
const copyFile = (fileName) => { var copyFile = function (fileName) {
if (copied[fileName]) { if (copied[fileName]) {
return; return;
} }
copied[fileName] = true; copied[fileName] = true;
const srcPath = path.join(options.sourcesRoot, fileName); var srcPath = path.join(options.sourcesRoot, fileName);
const dstPath = path.join(options.destRoot, fileName); var dstPath = path.join(options.destRoot, fileName);
writeFile(dstPath, fs.readFileSync(srcPath)); writeFile(dstPath, fs.readFileSync(srcPath));
}; };
const writeOutputFile = (fileName, contents) => { var writeOutputFile = function (fileName, contents) {
writeFile(path.join(options.destRoot, fileName), contents); writeFile(path.join(options.destRoot, fileName), contents);
}; };
for (let fileName in result) { for (var fileName in result) {
if (result.hasOwnProperty(fileName)) { if (result.hasOwnProperty(fileName)) {
const fileContents = result[fileName]; var fileContents = result[fileName];
const info = ts.preProcessFile(fileContents); var info = ts.preProcessFile(fileContents);
for (let i = info.importedFiles.length - 1; i >= 0; i--) { for (var i = info.importedFiles.length - 1; i >= 0; i--) {
const importedFileName = info.importedFiles[i].fileName; var importedFileName = info.importedFiles[i].fileName;
let importedFilePath; var importedFilePath = void 0;
if (/^vs\/css!/.test(importedFileName)) { if (/^vs\/css!/.test(importedFileName)) {
importedFilePath = importedFileName.substr('vs/css!'.length) + '.css'; importedFilePath = importedFileName.substr('vs/css!'.length) + '.css';
} }
@@ -90,187 +74,215 @@ function extractEditor(options) {
} }
} }
} }
delete tsConfig.compilerOptions.moduleResolution; var tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.json')).toString());
tsConfig.compilerOptions.noUnusedLocals = false;
writeOutputFile('tsconfig.json', JSON.stringify(tsConfig, null, '\t')); writeOutputFile('tsconfig.json', JSON.stringify(tsConfig, null, '\t'));
[ [
'vs/css.build.js', 'vs/css.build.js',
'vs/css.d.ts', 'vs/css.d.ts',
'vs/css.js', 'vs/css.js',
'vs/loader.js', 'vs/loader.js',
'vs/monaco.d.ts',
'vs/nls.build.js', 'vs/nls.build.js',
'vs/nls.d.ts', 'vs/nls.d.ts',
'vs/nls.js', 'vs/nls.js',
'vs/nls.mock.ts', 'vs/nls.mock.ts',
'typings/lib.ie11_safe_es6.d.ts',
'typings/thenable.d.ts',
'typings/es6-promise.d.ts',
'typings/require.d.ts',
].forEach(copyFile); ].forEach(copyFile);
} }
exports.extractEditor = extractEditor; exports.extractEditor = extractEditor;
function createESMSourcesAndResources2(options) { function createESMSourcesAndResources(options) {
const SRC_FOLDER = path.join(REPO_ROOT, options.srcFolder); var OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
const OUT_FOLDER = path.join(REPO_ROOT, options.outFolder); var OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder); var in_queue = Object.create(null);
const getDestAbsoluteFilePath = (file) => { var queue = [];
let dest = options.renames[file.replace(/\\/g, '/')] || file; var enqueue = function (module) {
if (dest === 'tsconfig.json') { if (in_queue[module]) {
return path.join(OUT_FOLDER, `tsconfig.json`); return;
} }
if (/\.ts$/.test(dest)) { in_queue[module] = true;
return path.join(OUT_FOLDER, dest); queue.push(module);
}
return path.join(OUT_RESOURCES_FOLDER, dest);
}; };
const allFiles = walkDirRecursive(SRC_FOLDER); var seenDir = {};
for (const file of allFiles) { var createDirectoryRecursive = function (dir) {
if (options.ignores.indexOf(file.replace(/\\/g, '/')) >= 0) { if (seenDir[dir]) {
continue; return;
} }
if (file === 'tsconfig.json') { var lastSlash = dir.lastIndexOf('/');
const tsConfig = JSON.parse(fs.readFileSync(path.join(SRC_FOLDER, file)).toString()); if (lastSlash === -1) {
tsConfig.compilerOptions.module = 'es6'; lastSlash = dir.lastIndexOf('\\');
tsConfig.compilerOptions.outDir = path.join(path.relative(OUT_FOLDER, OUT_RESOURCES_FOLDER), 'vs').replace(/\\/g, '/');
write(getDestAbsoluteFilePath(file), JSON.stringify(tsConfig, null, '\t'));
continue;
} }
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file)) { if (lastSlash !== -1) {
// Transport the files directly createDirectoryRecursive(dir.substring(0, lastSlash));
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
continue;
} }
if (/\.ts$/.test(file)) { seenDir[dir] = true;
// Transform the .ts file try {
let fileContents = fs.readFileSync(path.join(SRC_FOLDER, file)).toString(); fs.mkdirSync(dir);
const info = ts.preProcessFile(fileContents); }
for (let i = info.importedFiles.length - 1; i >= 0; i--) { catch (err) { }
const importedFilename = info.importedFiles[i].fileName; };
const pos = info.importedFiles[i].pos; seenDir[REPO_ROOT] = true;
const end = info.importedFiles[i].end; var toggleComments = function (fileContents) {
let importedFilepath; var lines = fileContents.split(/\r\n|\r|\n/);
if (/^vs\/css!/.test(importedFilename)) { var mode = 0;
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css'; for (var i = 0; i < lines.length; i++) {
var line = lines[i];
if (mode === 0) {
if (/\/\/ ESM-comment-begin/.test(line)) {
mode = 1;
continue;
} }
else { if (/\/\/ ESM-uncomment-begin/.test(line)) {
importedFilepath = importedFilename; mode = 2;
continue;
} }
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) { continue;
importedFilepath = path.join(path.dirname(file), importedFilepath);
}
let relativePath;
if (importedFilepath === path.dirname(file).replace(/\\/g, '/')) {
relativePath = '../' + path.basename(path.dirname(file));
}
else if (importedFilepath === path.dirname(path.dirname(file)).replace(/\\/g, '/')) {
relativePath = '../../' + path.basename(path.dirname(path.dirname(file)));
}
else {
relativePath = path.relative(path.dirname(file), importedFilepath);
}
relativePath = relativePath.replace(/\\/g, '/');
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
relativePath = './' + relativePath;
}
fileContents = (fileContents.substring(0, pos + 1)
+ relativePath
+ fileContents.substring(end + 1));
} }
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) { if (mode === 1) {
return `import * as ${m1} from ${m2};`; if (/\/\/ ESM-comment-end/.test(line)) {
}); mode = 0;
write(getDestAbsoluteFilePath(file), fileContents); continue;
continue; }
} lines[i] = '// ' + line;
console.log(`UNKNOWN FILE: ${file}`); continue;
}
function walkDirRecursive(dir) {
if (dir.charAt(dir.length - 1) !== '/' || dir.charAt(dir.length - 1) !== '\\') {
dir += '/';
}
let result = [];
_walkDirRecursive(dir, result, dir.length);
return result;
}
function _walkDirRecursive(dir, result, trimPos) {
const files = fs.readdirSync(dir);
for (let i = 0; i < files.length; i++) {
const file = path.join(dir, files[i]);
if (fs.statSync(file).isDirectory()) {
_walkDirRecursive(file, result, trimPos);
} }
else { if (mode === 2) {
result.push(file.substr(trimPos)); if (/\/\/ ESM-uncomment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) {
return indent;
});
} }
} }
} return lines.join('\n');
function write(absoluteFilePath, contents) { };
if (/(\.ts$)|(\.js$)/.test(absoluteFilePath)) { var write = function (filePath, contents) {
var absoluteFilePath;
if (/\.ts$/.test(filePath)) {
absoluteFilePath = path.join(OUT_FOLDER, filePath);
}
else {
absoluteFilePath = path.join(OUT_RESOURCES_FOLDER, filePath);
}
createDirectoryRecursive(path.dirname(absoluteFilePath));
if (/(\.ts$)|(\.js$)/.test(filePath)) {
contents = toggleComments(contents.toString()); contents = toggleComments(contents.toString());
} }
writeFile(absoluteFilePath, contents); fs.writeFileSync(absoluteFilePath, contents);
function toggleComments(fileContents) { };
let lines = fileContents.split(/\r\n|\r|\n/); options.entryPoints.forEach(function (entryPoint) { return enqueue(entryPoint); });
let mode = 0; while (queue.length > 0) {
for (let i = 0; i < lines.length; i++) { var module_1 = queue.shift();
const line = lines[i]; if (transportCSS(module_1, enqueue, write)) {
if (mode === 0) { continue;
if (/\/\/ ESM-comment-begin/.test(line)) {
mode = 1;
continue;
}
if (/\/\/ ESM-uncomment-begin/.test(line)) {
mode = 2;
continue;
}
continue;
}
if (mode === 1) {
if (/\/\/ ESM-comment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = '// ' + line;
continue;
}
if (mode === 2) {
if (/\/\/ ESM-uncomment-end/.test(line)) {
mode = 0;
continue;
}
lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) {
return indent;
});
}
}
return lines.join('\n');
} }
if (transportResource(options, module_1, enqueue, write)) {
continue;
}
if (transportDTS(options, module_1, enqueue, write)) {
continue;
}
var filename = void 0;
if (options.redirects[module_1]) {
filename = path.join(SRC_DIR, options.redirects[module_1] + '.ts');
}
else {
filename = path.join(SRC_DIR, module_1 + '.ts');
}
var fileContents = fs.readFileSync(filename).toString();
var info = ts.preProcessFile(fileContents);
for (var i = info.importedFiles.length - 1; i >= 0; i--) {
var importedFilename = info.importedFiles[i].fileName;
var pos = info.importedFiles[i].pos;
var end = info.importedFiles[i].end;
var importedFilepath = void 0;
if (/^vs\/css!/.test(importedFilename)) {
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css';
}
else {
importedFilepath = importedFilename;
}
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) {
importedFilepath = path.join(path.dirname(module_1), importedFilepath);
}
enqueue(importedFilepath);
var relativePath = void 0;
if (importedFilepath === path.dirname(module_1)) {
relativePath = '../' + path.basename(path.dirname(module_1));
}
else if (importedFilepath === path.dirname(path.dirname(module_1))) {
relativePath = '../../' + path.basename(path.dirname(path.dirname(module_1)));
}
else {
relativePath = path.relative(path.dirname(module_1), importedFilepath);
}
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
relativePath = './' + relativePath;
}
fileContents = (fileContents.substring(0, pos + 1)
+ relativePath
+ fileContents.substring(end + 1));
}
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) {
return "import * as " + m1 + " from " + m2 + ";";
});
fileContents = fileContents.replace(/Thenable/g, 'PromiseLike');
write(module_1 + '.ts', fileContents);
} }
var esm_opts = {
"compilerOptions": {
"outDir": path.relative(path.dirname(OUT_FOLDER), OUT_RESOURCES_FOLDER),
"rootDir": "src",
"module": "es6",
"target": "es5",
"experimentalDecorators": true,
"lib": [
"dom",
"es5",
"es2015.collection",
"es2015.promise"
],
"types": []
}
};
fs.writeFileSync(path.join(path.dirname(OUT_FOLDER), 'tsconfig.json'), JSON.stringify(esm_opts, null, '\t'));
var monacodts = fs.readFileSync(path.join(SRC_DIR, 'vs/monaco.d.ts')).toString();
fs.writeFileSync(path.join(OUT_FOLDER, 'vs/monaco.d.ts'), monacodts);
} }
exports.createESMSourcesAndResources2 = createESMSourcesAndResources2; exports.createESMSourcesAndResources = createESMSourcesAndResources;
function transportCSS(module, enqueue, write) { function transportCSS(module, enqueue, write) {
if (!/\.css/.test(module)) { if (!/\.css/.test(module)) {
return false; return false;
} }
const filename = path.join(SRC_DIR, module); var filename = path.join(SRC_DIR, module);
const fileContents = fs.readFileSync(filename).toString(); var fileContents = fs.readFileSync(filename).toString();
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148 var inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
const inlineResourcesLimit = 300000; //3000; // see https://github.com/Microsoft/monaco-editor/issues/336 var inlineResourcesLimit = 300000; //3000; // see https://github.com/Microsoft/monaco-editor/issues/336
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit); var newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
write(module, newContents); write(module, newContents);
return true; return true;
function _rewriteOrInlineUrls(contents, forceBase64, inlineByteLimit) { function _rewriteOrInlineUrls(contents, forceBase64, inlineByteLimit) {
return _replaceURL(contents, (url) => { return _replaceURL(contents, function (url) {
let imagePath = path.join(path.dirname(module), url); var imagePath = path.join(path.dirname(module), url);
let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath)); var fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
if (fileContents.length < inlineByteLimit) { if (fileContents.length < inlineByteLimit) {
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png'; var MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
let DATA = ';base64,' + fileContents.toString('base64'); var DATA = ';base64,' + fileContents.toString('base64');
if (!forceBase64 && /\.svg$/.test(url)) { if (!forceBase64 && /\.svg$/.test(url)) {
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris // .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
let newText = fileContents.toString() var newText = fileContents.toString()
.replace(/"/g, '\'') .replace(/"/g, '\'')
.replace(/</g, '%3C') .replace(/</g, '%3C')
.replace(/>/g, '%3E') .replace(/>/g, '%3E')
.replace(/&/g, '%26') .replace(/&/g, '%26')
.replace(/#/g, '%23') .replace(/#/g, '%23')
.replace(/\s+/g, ' '); .replace(/\s+/g, ' ');
let encodedData = ',' + newText; var encodedData = ',' + newText;
if (encodedData.length < DATA.length) { if (encodedData.length < DATA.length) {
DATA = encodedData; DATA = encodedData;
} }
@@ -283,8 +295,12 @@ function transportCSS(module, enqueue, write) {
} }
function _replaceURL(contents, replacer) { function _replaceURL(contents, replacer) {
// Use ")" as the terminator as quotes are oftentimes not used at all // Use ")" as the terminator as quotes are oftentimes not used at all
return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, (_, ...matches) => { return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, function (_) {
let url = matches[0]; var matches = [];
for (var _i = 1; _i < arguments.length; _i++) {
matches[_i - 1] = arguments[_i];
}
var url = matches[0];
// Eliminate starting quotes (the initial whitespace is not captured) // Eliminate starting quotes (the initial whitespace is not captured)
if (url.charAt(0) === '"' || url.charAt(0) === '\'') { if (url.charAt(0) === '"' || url.charAt(0) === '\'') {
url = url.substring(1); url = url.substring(1);
@@ -307,3 +323,27 @@ function transportCSS(module, enqueue, write) {
return haystack.length >= needle.length && haystack.substr(0, needle.length) === needle; return haystack.length >= needle.length && haystack.substr(0, needle.length) === needle;
} }
} }
function transportResource(options, module, enqueue, write) {
if (!/\.svg/.test(module)) {
return false;
}
write(module, fs.readFileSync(path.join(SRC_DIR, module)));
return true;
}
function transportDTS(options, module, enqueue, write) {
if (options.redirects[module] && fs.existsSync(path.join(SRC_DIR, options.redirects[module] + '.ts'))) {
return false;
}
if (!fs.existsSync(path.join(SRC_DIR, module + '.d.ts'))) {
return false;
}
write(module + '.d.ts', fs.readFileSync(path.join(SRC_DIR, module + '.d.ts')));
var filename;
if (options.redirects[module]) {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, options.redirects[module] + '.js')));
}
else {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, module + '.js')));
}
return true;
}

View File

@@ -10,6 +10,7 @@ import * as tss from './treeshaking';
const REPO_ROOT = path.join(__dirname, '../../'); const REPO_ROOT = path.join(__dirname, '../../');
const SRC_DIR = path.join(REPO_ROOT, 'src'); const SRC_DIR = path.join(REPO_ROOT, 'src');
const OUT_EDITOR = path.join(REPO_ROOT, 'out-editor');
let dirCache: { [dir: string]: boolean; } = {}; let dirCache: { [dir: string]: boolean; } = {};
@@ -31,33 +32,13 @@ function writeFile(filePath: string, contents: Buffer | string): void {
} }
export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: string }): void { export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: string }): void {
const tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.json')).toString());
let compilerOptions: { [key: string]: any };
if (tsConfig.extends) {
compilerOptions = Object.assign({}, require(path.join(options.sourcesRoot, tsConfig.extends)).compilerOptions, tsConfig.compilerOptions);
} else {
compilerOptions = tsConfig.compilerOptions;
}
tsConfig.compilerOptions = compilerOptions;
compilerOptions.noUnusedLocals = false;
compilerOptions.preserveConstEnums = false;
compilerOptions.declaration = false;
compilerOptions.moduleResolution = ts.ModuleResolutionKind.Classic;
delete compilerOptions.types;
delete tsConfig.extends;
tsConfig.exclude = [];
options.compilerOptions = compilerOptions;
let result = tss.shake(options); let result = tss.shake(options);
for (let fileName in result) { for (let fileName in result) {
if (result.hasOwnProperty(fileName)) { if (result.hasOwnProperty(fileName)) {
writeFile(path.join(options.destRoot, fileName), result[fileName]); writeFile(path.join(options.destRoot, fileName), result[fileName]);
} }
} }
let copied: { [fileName: string]: boolean; } = {}; let copied: { [fileName:string]: boolean; } = {};
const copyFile = (fileName: string) => { const copyFile = (fileName: string) => {
if (copied[fileName]) { if (copied[fileName]) {
return; return;
@@ -67,7 +48,7 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
const dstPath = path.join(options.destRoot, fileName); const dstPath = path.join(options.destRoot, fileName);
writeFile(dstPath, fs.readFileSync(srcPath)); writeFile(dstPath, fs.readFileSync(srcPath));
}; };
const writeOutputFile = (fileName: string, contents: string | Buffer) => { const writeOutputFile = (fileName: string, contents: string) => {
writeFile(path.join(options.destRoot, fileName), contents); writeFile(path.join(options.destRoot, fileName), contents);
}; };
for (let fileName in result) { for (let fileName in result) {
@@ -99,7 +80,8 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
} }
} }
delete tsConfig.compilerOptions.moduleResolution; const tsConfig = JSON.parse(fs.readFileSync(path.join(options.sourcesRoot, 'tsconfig.json')).toString());
tsConfig.compilerOptions.noUnusedLocals = false;
writeOutputFile('tsconfig.json', JSON.stringify(tsConfig, null, '\t')); writeOutputFile('tsconfig.json', JSON.stringify(tsConfig, null, '\t'));
[ [
@@ -107,177 +89,203 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
'vs/css.d.ts', 'vs/css.d.ts',
'vs/css.js', 'vs/css.js',
'vs/loader.js', 'vs/loader.js',
'vs/monaco.d.ts',
'vs/nls.build.js', 'vs/nls.build.js',
'vs/nls.d.ts', 'vs/nls.d.ts',
'vs/nls.js', 'vs/nls.js',
'vs/nls.mock.ts', 'vs/nls.mock.ts',
'typings/lib.ie11_safe_es6.d.ts',
'typings/thenable.d.ts',
'typings/es6-promise.d.ts',
'typings/require.d.ts',
].forEach(copyFile); ].forEach(copyFile);
} }
export interface IOptions2 { export interface IOptions {
srcFolder: string; entryPoints: string[];
outFolder: string; outFolder: string;
outResourcesFolder: string; outResourcesFolder: string;
ignores: string[]; redirects: { [module: string]: string; };
renames: { [filename: string]: string; };
} }
export function createESMSourcesAndResources2(options: IOptions2): void { export function createESMSourcesAndResources(options: IOptions): void {
const SRC_FOLDER = path.join(REPO_ROOT, options.srcFolder);
const OUT_FOLDER = path.join(REPO_ROOT, options.outFolder); const OUT_FOLDER = path.join(REPO_ROOT, options.outFolder);
const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder); const OUT_RESOURCES_FOLDER = path.join(REPO_ROOT, options.outResourcesFolder);
const getDestAbsoluteFilePath = (file: string): string => { let in_queue: { [module: string]: boolean; } = Object.create(null);
let dest = options.renames[file.replace(/\\/g, '/')] || file; let queue: string[] = [];
if (dest === 'tsconfig.json') {
return path.join(OUT_FOLDER, `tsconfig.json`); const enqueue = (module: string) => {
if (in_queue[module]) {
return;
} }
if (/\.ts$/.test(dest)) { in_queue[module] = true;
return path.join(OUT_FOLDER, dest); queue.push(module);
}
return path.join(OUT_RESOURCES_FOLDER, dest);
}; };
const allFiles = walkDirRecursive(SRC_FOLDER); const seenDir: { [key: string]: boolean; } = {};
for (const file of allFiles) { const createDirectoryRecursive = (dir: string) => {
if (seenDir[dir]) {
if (options.ignores.indexOf(file.replace(/\\/g, '/')) >= 0) { return;
continue;
} }
if (file === 'tsconfig.json') { let lastSlash = dir.lastIndexOf('/');
const tsConfig = JSON.parse(fs.readFileSync(path.join(SRC_FOLDER, file)).toString()); if (lastSlash === -1) {
tsConfig.compilerOptions.module = 'es6'; lastSlash = dir.lastIndexOf('\\');
tsConfig.compilerOptions.outDir = path.join(path.relative(OUT_FOLDER, OUT_RESOURCES_FOLDER), 'vs').replace(/\\/g, '/');
write(getDestAbsoluteFilePath(file), JSON.stringify(tsConfig, null, '\t'));
continue;
} }
if (lastSlash !== -1) {
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file)) { createDirectoryRecursive(dir.substring(0, lastSlash));
// Transport the files directly
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
continue;
} }
seenDir[dir] = true;
try { fs.mkdirSync(dir); } catch (err) { }
};
if (/\.ts$/.test(file)) { seenDir[REPO_ROOT] = true;
// Transform the .ts file
let fileContents = fs.readFileSync(path.join(SRC_FOLDER, file)).toString();
const info = ts.preProcessFile(fileContents); const toggleComments = (fileContents: string) => {
let lines = fileContents.split(/\r\n|\r|\n/);
let mode = 0;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
for (let i = info.importedFiles.length - 1; i >= 0; i--) { if (mode === 0) {
const importedFilename = info.importedFiles[i].fileName; if (/\/\/ ESM-comment-begin/.test(line)) {
const pos = info.importedFiles[i].pos; mode = 1;
const end = info.importedFiles[i].end; continue;
let importedFilepath: string;
if (/^vs\/css!/.test(importedFilename)) {
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css';
} else {
importedFilepath = importedFilename;
} }
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) { if (/\/\/ ESM-uncomment-begin/.test(line)) {
importedFilepath = path.join(path.dirname(file), importedFilepath); mode = 2;
continue;
} }
continue;
let relativePath: string;
if (importedFilepath === path.dirname(file).replace(/\\/g, '/')) {
relativePath = '../' + path.basename(path.dirname(file));
} else if (importedFilepath === path.dirname(path.dirname(file)).replace(/\\/g, '/')) {
relativePath = '../../' + path.basename(path.dirname(path.dirname(file)));
} else {
relativePath = path.relative(path.dirname(file), importedFilepath);
}
relativePath = relativePath.replace(/\\/g, '/');
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
relativePath = './' + relativePath;
}
fileContents = (
fileContents.substring(0, pos + 1)
+ relativePath
+ fileContents.substring(end + 1)
);
} }
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) { if (mode === 1) {
return `import * as ${m1} from ${m2};`; if (/\/\/ ESM-comment-end/.test(line)) {
}); mode = 0;
continue;
}
lines[i] = '// ' + line;
continue;
}
write(getDestAbsoluteFilePath(file), fileContents); if (mode === 2) {
continue; if (/\/\/ ESM-uncomment-end/.test(line)) {
} mode = 0;
continue;
console.log(`UNKNOWN FILE: ${file}`); }
} lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) {
return indent;
});
function walkDirRecursive(dir: string): string[] {
if (dir.charAt(dir.length - 1) !== '/' || dir.charAt(dir.length - 1) !== '\\') {
dir += '/';
}
let result: string[] = [];
_walkDirRecursive(dir, result, dir.length);
return result;
}
function _walkDirRecursive(dir: string, result: string[], trimPos: number): void {
const files = fs.readdirSync(dir);
for (let i = 0; i < files.length; i++) {
const file = path.join(dir, files[i]);
if (fs.statSync(file).isDirectory()) {
_walkDirRecursive(file, result, trimPos);
} else {
result.push(file.substr(trimPos));
} }
} }
}
function write(absoluteFilePath: string, contents: string | Buffer): void { return lines.join('\n');
if (/(\.ts$)|(\.js$)/.test(absoluteFilePath)) { };
const write = (filePath: string, contents: string | Buffer) => {
let absoluteFilePath: string;
if (/\.ts$/.test(filePath)) {
absoluteFilePath = path.join(OUT_FOLDER, filePath);
} else {
absoluteFilePath = path.join(OUT_RESOURCES_FOLDER, filePath);
}
createDirectoryRecursive(path.dirname(absoluteFilePath));
if (/(\.ts$)|(\.js$)/.test(filePath)) {
contents = toggleComments(contents.toString()); contents = toggleComments(contents.toString());
} }
writeFile(absoluteFilePath, contents); fs.writeFileSync(absoluteFilePath, contents);
};
function toggleComments(fileContents: string): string { options.entryPoints.forEach((entryPoint) => enqueue(entryPoint));
let lines = fileContents.split(/\r\n|\r|\n/);
let mode = 0;
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
if (mode === 0) {
if (/\/\/ ESM-comment-begin/.test(line)) {
mode = 1;
continue;
}
if (/\/\/ ESM-uncomment-begin/.test(line)) {
mode = 2;
continue;
}
continue;
}
if (mode === 1) { while (queue.length > 0) {
if (/\/\/ ESM-comment-end/.test(line)) { const module = queue.shift();
mode = 0; if (transportCSS(module, enqueue, write)) {
continue; continue;
} }
lines[i] = '// ' + line; if (transportResource(options, module, enqueue, write)) {
continue; continue;
} }
if (transportDTS(options, module, enqueue, write)) {
continue;
}
if (mode === 2) { let filename: string;
if (/\/\/ ESM-uncomment-end/.test(line)) { if (options.redirects[module]) {
mode = 0; filename = path.join(SRC_DIR, options.redirects[module] + '.ts');
continue; } else {
} filename = path.join(SRC_DIR, module + '.ts');
lines[i] = line.replace(/^(\s*)\/\/ ?/, function (_, indent) { }
return indent; let fileContents = fs.readFileSync(filename).toString();
});
} const info = ts.preProcessFile(fileContents);
for (let i = info.importedFiles.length - 1; i >= 0; i--) {
const importedFilename = info.importedFiles[i].fileName;
const pos = info.importedFiles[i].pos;
const end = info.importedFiles[i].end;
let importedFilepath: string;
if (/^vs\/css!/.test(importedFilename)) {
importedFilepath = importedFilename.substr('vs/css!'.length) + '.css';
} else {
importedFilepath = importedFilename;
}
if (/(^\.\/)|(^\.\.\/)/.test(importedFilepath)) {
importedFilepath = path.join(path.dirname(module), importedFilepath);
} }
return lines.join('\n'); enqueue(importedFilepath);
let relativePath: string;
if (importedFilepath === path.dirname(module)) {
relativePath = '../' + path.basename(path.dirname(module));
} else if (importedFilepath === path.dirname(path.dirname(module))) {
relativePath = '../../' + path.basename(path.dirname(path.dirname(module)));
} else {
relativePath = path.relative(path.dirname(module), importedFilepath);
}
if (!/(^\.\/)|(^\.\.\/)/.test(relativePath)) {
relativePath = './' + relativePath;
}
fileContents = (
fileContents.substring(0, pos + 1)
+ relativePath
+ fileContents.substring(end + 1)
);
} }
fileContents = fileContents.replace(/import ([a-zA-z0-9]+) = require\(('[^']+')\);/g, function (_, m1, m2) {
return `import * as ${m1} from ${m2};`;
});
fileContents = fileContents.replace(/Thenable/g, 'PromiseLike');
write(module + '.ts', fileContents);
} }
const esm_opts = {
"compilerOptions": {
"outDir": path.relative(path.dirname(OUT_FOLDER), OUT_RESOURCES_FOLDER),
"rootDir": "src",
"module": "es6",
"target": "es5",
"experimentalDecorators": true,
"lib": [
"dom",
"es5",
"es2015.collection",
"es2015.promise"
],
"types": [
]
}
};
fs.writeFileSync(path.join(path.dirname(OUT_FOLDER), 'tsconfig.json'), JSON.stringify(esm_opts, null, '\t'));
const monacodts = fs.readFileSync(path.join(SRC_DIR, 'vs/monaco.d.ts')).toString();
fs.writeFileSync(path.join(OUT_FOLDER, 'vs/monaco.d.ts'), monacodts);
} }
function transportCSS(module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean { function transportCSS(module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
@@ -329,7 +337,7 @@ function transportCSS(module: string, enqueue: (module: string) => void, write:
function _replaceURL(contents: string, replacer: (url: string) => string): string { function _replaceURL(contents: string, replacer: (url: string) => string): string {
// Use ")" as the terminator as quotes are oftentimes not used at all // Use ")" as the terminator as quotes are oftentimes not used at all
return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, (_: string, ...matches: string[]) => { return contents.replace(/url\(\s*([^\)]+)\s*\)?/g, (_: string, ...matches: string[]) => {
let url = matches[0]; var url = matches[0];
// Eliminate starting quotes (the initial whitespace is not captured) // Eliminate starting quotes (the initial whitespace is not captured)
if (url.charAt(0) === '"' || url.charAt(0) === '\'') { if (url.charAt(0) === '"' || url.charAt(0) === '\'') {
url = url.substring(1); url = url.substring(1);
@@ -355,3 +363,33 @@ function transportCSS(module: string, enqueue: (module: string) => void, write:
return haystack.length >= needle.length && haystack.substr(0, needle.length) === needle; return haystack.length >= needle.length && haystack.substr(0, needle.length) === needle;
} }
} }
function transportResource(options: IOptions, module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
if (!/\.svg/.test(module)) {
return false;
}
write(module, fs.readFileSync(path.join(SRC_DIR, module)));
return true;
}
function transportDTS(options: IOptions, module: string, enqueue: (module: string) => void, write: (path: string, contents: string | Buffer) => void): boolean {
if (options.redirects[module] && fs.existsSync(path.join(SRC_DIR, options.redirects[module] + '.ts'))) {
return false;
}
if (!fs.existsSync(path.join(SRC_DIR, module + '.d.ts'))) {
return false;
}
write(module + '.d.ts', fs.readFileSync(path.join(SRC_DIR, module + '.d.ts')));
let filename: string;
if (options.redirects[module]) {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, options.redirects[module] + '.js')));
} else {
write(module + '.js', fs.readFileSync(path.join(SRC_DIR, module + '.js')));
}
return true;
}

View File

@@ -1,135 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const es = require("event-stream");
const util = require("gulp-util");
const appInsights = require("applicationinsights");
class Entry {
constructor(name, totalCount, totalSize) {
this.name = name;
this.totalCount = totalCount;
this.totalSize = totalSize;
}
toString(pretty) {
if (!pretty) {
if (this.totalCount === 1) {
return `${this.name}: ${this.totalSize} bytes`;
}
else {
return `${this.name}: ${this.totalCount} files with ${this.totalSize} bytes`;
}
}
else {
if (this.totalCount === 1) {
return `Stats for '${util.colors.grey(this.name)}': ${Math.round(this.totalSize / 1204)}KB`;
}
else {
const count = this.totalCount < 100
? util.colors.green(this.totalCount.toString())
: util.colors.red(this.totalCount.toString());
return `Stats for '${util.colors.grey(this.name)}': ${count} files, ${Math.round(this.totalSize / 1204)}KB`;
}
}
}
}
const _entries = new Map();
function createStatsStream(group, log) {
const entry = new Entry(group, 0, 0);
_entries.set(entry.name, entry);
return es.through(function (data) {
const file = data;
if (typeof file.path === 'string') {
entry.totalCount += 1;
if (Buffer.isBuffer(file.contents)) {
entry.totalSize += file.contents.length;
}
else if (file.stat && typeof file.stat.size === 'number') {
entry.totalSize += file.stat.size;
}
else {
// funky file...
}
}
this.emit('data', data);
}, function () {
if (log) {
if (entry.totalCount === 1) {
util.log(`Stats for '${util.colors.grey(entry.name)}': ${Math.round(entry.totalSize / 1204)}KB`);
}
else {
const count = entry.totalCount < 100
? util.colors.green(entry.totalCount.toString())
: util.colors.red(entry.totalCount.toString());
util.log(`Stats for '${util.colors.grey(entry.name)}': ${count} files, ${Math.round(entry.totalSize / 1204)}KB`);
}
}
this.emit('end');
});
}
exports.createStatsStream = createStatsStream;
function submitAllStats(productJson, commit) {
const sorted = [];
// move entries for single files to the front
_entries.forEach(value => {
if (value.totalCount === 1) {
sorted.unshift(value);
}
else {
sorted.push(value);
}
});
// print to console
for (const entry of sorted) {
console.log(entry.toString(true));
}
// send data as telementry event when the
// product is configured to send telemetry
if (!productJson || !productJson.aiConfig || typeof productJson.aiConfig.asimovKey !== 'string') {
return Promise.resolve(false);
}
return new Promise(resolve => {
try {
const sizes = {};
const counts = {};
for (const entry of sorted) {
sizes[entry.name] = entry.totalSize;
counts[entry.name] = entry.totalCount;
}
appInsights.setup(productJson.aiConfig.asimovKey)
.setAutoCollectConsole(false)
.setAutoCollectExceptions(false)
.setAutoCollectPerformance(false)
.setAutoCollectRequests(false)
.setAutoCollectDependencies(false)
.setAutoDependencyCorrelation(false)
.start();
appInsights.defaultClient.config.endpointUrl = 'https://vortex.data.microsoft.com/collect/v1';
/* __GDPR__
"monacoworkbench/packagemetrics" : {
"commit" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
"size" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
"count" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" }
}
*/
appInsights.defaultClient.trackEvent({
name: 'monacoworkbench/packagemetrics',
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
});
appInsights.defaultClient.flush({
callback: () => {
appInsights.dispose();
resolve(true);
}
});
}
catch (err) {
console.error('ERROR sending build stats as telemetry event!');
console.error(err);
resolve(false);
}
});
}
exports.submitAllStats = submitAllStats;

View File

@@ -1,147 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as es from 'event-stream';
import * as util from 'gulp-util';
import * as File from 'vinyl';
import * as appInsights from 'applicationinsights';
class Entry {
constructor(readonly name: string, public totalCount: number, public totalSize: number) { }
toString(pretty?: boolean): string {
if (!pretty) {
if (this.totalCount === 1) {
return `${this.name}: ${this.totalSize} bytes`;
} else {
return `${this.name}: ${this.totalCount} files with ${this.totalSize} bytes`;
}
} else {
if (this.totalCount === 1) {
return `Stats for '${util.colors.grey(this.name)}': ${Math.round(this.totalSize / 1204)}KB`;
} else {
const count = this.totalCount < 100
? util.colors.green(this.totalCount.toString())
: util.colors.red(this.totalCount.toString());
return `Stats for '${util.colors.grey(this.name)}': ${count} files, ${Math.round(this.totalSize / 1204)}KB`;
}
}
}
}
const _entries = new Map<string, Entry>();
export function createStatsStream(group: string, log?: boolean): es.ThroughStream {
const entry = new Entry(group, 0, 0);
_entries.set(entry.name, entry);
return es.through(function (data) {
const file = data as File;
if (typeof file.path === 'string') {
entry.totalCount += 1;
if (Buffer.isBuffer(file.contents)) {
entry.totalSize += file.contents.length;
} else if (file.stat && typeof file.stat.size === 'number') {
entry.totalSize += file.stat.size;
} else {
// funky file...
}
}
this.emit('data', data);
}, function () {
if (log) {
if (entry.totalCount === 1) {
util.log(`Stats for '${util.colors.grey(entry.name)}': ${Math.round(entry.totalSize / 1204)}KB`);
} else {
const count = entry.totalCount < 100
? util.colors.green(entry.totalCount.toString())
: util.colors.red(entry.totalCount.toString());
util.log(`Stats for '${util.colors.grey(entry.name)}': ${count} files, ${Math.round(entry.totalSize / 1204)}KB`);
}
}
this.emit('end');
});
}
export function submitAllStats(productJson: any, commit: string): Promise<boolean> {
const sorted: Entry[] = [];
// move entries for single files to the front
_entries.forEach(value => {
if (value.totalCount === 1) {
sorted.unshift(value);
} else {
sorted.push(value);
}
});
// print to console
for (const entry of sorted) {
console.log(entry.toString(true));
}
// send data as telementry event when the
// product is configured to send telemetry
if (!productJson || !productJson.aiConfig || typeof productJson.aiConfig.asimovKey !== 'string') {
return Promise.resolve(false);
}
return new Promise(resolve => {
try {
const sizes: any = {};
const counts: any = {};
for (const entry of sorted) {
sizes[entry.name] = entry.totalSize;
counts[entry.name] = entry.totalCount;
}
appInsights.setup(productJson.aiConfig.asimovKey)
.setAutoCollectConsole(false)
.setAutoCollectExceptions(false)
.setAutoCollectPerformance(false)
.setAutoCollectRequests(false)
.setAutoCollectDependencies(false)
.setAutoDependencyCorrelation(false)
.start();
appInsights.defaultClient.config.endpointUrl = 'https://vortex.data.microsoft.com/collect/v1';
/* __GDPR__
"monacoworkbench/packagemetrics" : {
"commit" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
"size" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" },
"count" : {"classification": "SystemMetaData", "purpose": "PerformanceAndHealth" }
}
*/
appInsights.defaultClient.trackEvent({
name: 'monacoworkbench/packagemetrics',
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
});
appInsights.defaultClient.flush({
callback: () => {
appInsights.dispose();
resolve(true);
}
});
} catch (err) {
console.error('ERROR sending build stats as telemetry event!');
console.error(err);
resolve(false);
}
});
}

View File

@@ -4,30 +4,30 @@
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const assert = require("assert"); var assert = require("assert");
const i18n = require("../i18n"); var i18n = require("../i18n");
suite('XLF Parser Tests', () => { suite('XLF Parser Tests', function () {
const sampleXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &amp;</source></trans-unit></body></file></xliff>'; var sampleXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &amp;</source></trans-unit></body></file></xliff>';
const sampleTranslatedXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" target-language="ru" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source><target>Кнопка #1</target></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &amp;</source><target>Кнопка #2 &amp;</target></trans-unit></body></file></xliff>'; var sampleTranslatedXlf = '<?xml version="1.0" encoding="utf-8"?><xliff version="1.2" xmlns="urn:oasis:names:tc:xliff:document:1.2"><file original="vs/base/common/keybinding" source-language="en" target-language="ru" datatype="plaintext"><body><trans-unit id="key1"><source xml:lang="en">Key #1</source><target>Кнопка #1</target></trans-unit><trans-unit id="key2"><source xml:lang="en">Key #2 &amp;</source><target>Кнопка #2 &amp;</target></trans-unit></body></file></xliff>';
const originalFilePath = 'vs/base/common/keybinding'; var originalFilePath = 'vs/base/common/keybinding';
const keys = ['key1', 'key2']; var keys = ['key1', 'key2'];
const messages = ['Key #1', 'Key #2 &']; var messages = ['Key #1', 'Key #2 &'];
const translatedMessages = { key1: 'Кнопка #1', key2: 'Кнопка #2 &' }; var translatedMessages = { key1: 'Кнопка #1', key2: 'Кнопка #2 &' };
test('Keys & messages to XLF conversion', () => { test('Keys & messages to XLF conversion', function () {
const xlf = new i18n.XLF('vscode-workbench'); var xlf = new i18n.XLF('vscode-workbench');
xlf.addFile(originalFilePath, keys, messages); xlf.addFile(originalFilePath, keys, messages);
const xlfString = xlf.toString(); var xlfString = xlf.toString();
assert.strictEqual(xlfString.replace(/\s{2,}/g, ''), sampleXlf); assert.strictEqual(xlfString.replace(/\s{2,}/g, ''), sampleXlf);
}); });
test('XLF to keys & messages conversion', () => { test('XLF to keys & messages conversion', function () {
i18n.XLF.parse(sampleTranslatedXlf).then(function (resolvedFiles) { i18n.XLF.parse(sampleTranslatedXlf).then(function (resolvedFiles) {
assert.deepEqual(resolvedFiles[0].messages, translatedMessages); assert.deepEqual(resolvedFiles[0].messages, translatedMessages);
assert.strictEqual(resolvedFiles[0].originalFilePath, originalFilePath); assert.strictEqual(resolvedFiles[0].originalFilePath, originalFilePath);
}); });
}); });
test('JSON file source path to Transifex resource match', () => { test('JSON file source path to Transifex resource match', function () {
const editorProject = 'vscode-editor', workbenchProject = 'vscode-workbench'; var editorProject = 'vscode-editor', workbenchProject = 'vscode-workbench';
const platform = { name: 'vs/platform', project: editorProject }, editorContrib = { name: 'vs/editor/contrib', project: editorProject }, editor = { name: 'vs/editor', project: editorProject }, base = { name: 'vs/base', project: editorProject }, code = { name: 'vs/code', project: workbenchProject }, workbenchParts = { name: 'vs/workbench/parts/html', project: workbenchProject }, workbenchServices = { name: 'vs/workbench/services/files', project: workbenchProject }, workbench = { name: 'vs/workbench', project: workbenchProject }; var platform = { name: 'vs/platform', project: editorProject }, editorContrib = { name: 'vs/editor/contrib', project: editorProject }, editor = { name: 'vs/editor', project: editorProject }, base = { name: 'vs/base', project: editorProject }, code = { name: 'vs/code', project: workbenchProject }, workbenchParts = { name: 'vs/workbench/parts/html', project: workbenchProject }, workbenchServices = { name: 'vs/workbench/services/files', project: workbenchProject }, workbench = { name: 'vs/workbench', project: workbenchProject };
assert.deepEqual(i18n.getResource('vs/platform/actions/browser/menusExtensionPoint'), platform); assert.deepEqual(i18n.getResource('vs/platform/actions/browser/menusExtensionPoint'), platform);
assert.deepEqual(i18n.getResource('vs/editor/contrib/clipboard/browser/clipboard'), editorContrib); assert.deepEqual(i18n.getResource('vs/editor/contrib/clipboard/browser/clipboard'), editorContrib);
assert.deepEqual(i18n.getResource('vs/editor/common/modes/modesRegistry'), editor); assert.deepEqual(i18n.getResource('vs/editor/common/modes/modesRegistry'), editor);

View File

@@ -15,7 +15,7 @@ suite('XLF Parser Tests', () => {
const translatedMessages = { key1: 'Кнопка #1', key2: 'Кнопка #2 &' }; const translatedMessages = { key1: 'Кнопка #1', key2: 'Кнопка #2 &' };
test('Keys & messages to XLF conversion', () => { test('Keys & messages to XLF conversion', () => {
const xlf = new i18n.XLF('vscode-workbench'); let xlf = new i18n.XLF('vscode-workbench');
xlf.addFile(originalFilePath, keys, messages); xlf.addFile(originalFilePath, keys, messages);
const xlfString = xlf.toString(); const xlfString = xlf.toString();

View File

@@ -4,48 +4,18 @@
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
'use strict'; 'use strict';
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs"); var fs = require("fs");
const path = require("path"); var path = require("path");
const ts = require("typescript"); var ts = require("typescript");
const TYPESCRIPT_LIB_FOLDER = path.dirname(require.resolve('typescript/lib/lib.d.ts')); var TYPESCRIPT_LIB_FOLDER = path.dirname(require.resolve('typescript/lib/lib.d.ts'));
var ShakeLevel; var ShakeLevel;
(function (ShakeLevel) { (function (ShakeLevel) {
ShakeLevel[ShakeLevel["Files"] = 0] = "Files"; ShakeLevel[ShakeLevel["Files"] = 0] = "Files";
ShakeLevel[ShakeLevel["InnerFile"] = 1] = "InnerFile"; ShakeLevel[ShakeLevel["InnerFile"] = 1] = "InnerFile";
ShakeLevel[ShakeLevel["ClassMembers"] = 2] = "ClassMembers"; ShakeLevel[ShakeLevel["ClassMembers"] = 2] = "ClassMembers";
})(ShakeLevel = exports.ShakeLevel || (exports.ShakeLevel = {})); })(ShakeLevel = exports.ShakeLevel || (exports.ShakeLevel = {}));
function printDiagnostics(diagnostics) {
for (const diag of diagnostics) {
let result = '';
if (diag.file) {
result += `${diag.file.fileName}: `;
}
if (diag.file && diag.start) {
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
result += `- ${location.line + 1},${location.character} - `;
}
result += JSON.stringify(diag.messageText);
console.log(result);
}
}
function shake(options) { function shake(options) {
const languageService = createTypeScriptLanguageService(options); var languageService = createTypeScriptLanguageService(options);
const program = languageService.getProgram();
const globalDiagnostics = program.getGlobalDiagnostics();
if (globalDiagnostics.length > 0) {
printDiagnostics(globalDiagnostics);
throw new Error(`Compilation Errors encountered.`);
}
const syntacticDiagnostics = program.getSyntacticDiagnostics();
if (syntacticDiagnostics.length > 0) {
printDiagnostics(syntacticDiagnostics);
throw new Error(`Compilation Errors encountered.`);
}
const semanticDiagnostics = program.getSemanticDiagnostics();
if (semanticDiagnostics.length > 0) {
printDiagnostics(semanticDiagnostics);
throw new Error(`Compilation Errors encountered.`);
}
markNodes(languageService, options); markNodes(languageService, options);
return generateResult(languageService, options.shakeLevel); return generateResult(languageService, options.shakeLevel);
} }
@@ -53,104 +23,93 @@ exports.shake = shake;
//#region Discovery, LanguageService & Setup //#region Discovery, LanguageService & Setup
function createTypeScriptLanguageService(options) { function createTypeScriptLanguageService(options) {
// Discover referenced files // Discover referenced files
const FILES = discoverAndReadFiles(options); var FILES = discoverAndReadFiles(options);
// Add fake usage files // Add fake usage files
options.inlineEntryPoints.forEach((inlineEntryPoint, index) => { options.inlineEntryPoints.forEach(function (inlineEntryPoint, index) {
FILES[`inlineEntryPoint.${index}.ts`] = inlineEntryPoint; FILES["inlineEntryPoint:" + index + ".ts"] = inlineEntryPoint;
});
// Add additional typings
options.typings.forEach((typing) => {
const filePath = path.join(options.sourcesRoot, typing);
FILES[typing] = fs.readFileSync(filePath).toString();
}); });
// Resolve libs // Resolve libs
const RESOLVED_LIBS = {}; var RESOLVED_LIBS = {};
options.libs.forEach((filename) => { options.libs.forEach(function (filename) {
const filepath = path.join(TYPESCRIPT_LIB_FOLDER, filename); var filepath = path.join(TYPESCRIPT_LIB_FOLDER, filename);
RESOLVED_LIBS[`defaultLib:${filename}`] = fs.readFileSync(filepath).toString(); RESOLVED_LIBS["defaultLib:" + filename] = fs.readFileSync(filepath).toString();
}); });
const compilerOptions = ts.convertCompilerOptionsFromJson(options.compilerOptions, options.sourcesRoot).options; var host = new TypeScriptLanguageServiceHost(RESOLVED_LIBS, FILES, options.compilerOptions);
const host = new TypeScriptLanguageServiceHost(RESOLVED_LIBS, FILES, compilerOptions);
return ts.createLanguageService(host); return ts.createLanguageService(host);
} }
/** /**
* Read imports and follow them until all files have been handled * Read imports and follow them until all files have been handled
*/ */
function discoverAndReadFiles(options) { function discoverAndReadFiles(options) {
const FILES = {}; var FILES = {};
const in_queue = Object.create(null); var in_queue = Object.create(null);
const queue = []; var queue = [];
const enqueue = (moduleId) => { var enqueue = function (moduleId) {
if (in_queue[moduleId]) { if (in_queue[moduleId]) {
return; return;
} }
in_queue[moduleId] = true; in_queue[moduleId] = true;
queue.push(moduleId); queue.push(moduleId);
}; };
options.entryPoints.forEach((entryPoint) => enqueue(entryPoint)); options.entryPoints.forEach(function (entryPoint) { return enqueue(entryPoint); });
while (queue.length > 0) { while (queue.length > 0) {
const moduleId = queue.shift(); var moduleId = queue.shift();
const dts_filename = path.join(options.sourcesRoot, moduleId + '.d.ts'); var dts_filename = path.join(options.sourcesRoot, moduleId + '.d.ts');
if (fs.existsSync(dts_filename)) { if (fs.existsSync(dts_filename)) {
const dts_filecontents = fs.readFileSync(dts_filename).toString(); var dts_filecontents = fs.readFileSync(dts_filename).toString();
FILES[`${moduleId}.d.ts`] = dts_filecontents; FILES[moduleId + '.d.ts'] = dts_filecontents;
continue; continue;
} }
const js_filename = path.join(options.sourcesRoot, moduleId + '.js'); var ts_filename = void 0;
if (fs.existsSync(js_filename)) {
// This is an import for a .js file, so ignore it...
continue;
}
let ts_filename;
if (options.redirects[moduleId]) { if (options.redirects[moduleId]) {
ts_filename = path.join(options.sourcesRoot, options.redirects[moduleId] + '.ts'); ts_filename = path.join(options.sourcesRoot, options.redirects[moduleId] + '.ts');
} }
else { else {
ts_filename = path.join(options.sourcesRoot, moduleId + '.ts'); ts_filename = path.join(options.sourcesRoot, moduleId + '.ts');
} }
const ts_filecontents = fs.readFileSync(ts_filename).toString(); var ts_filecontents = fs.readFileSync(ts_filename).toString();
const info = ts.preProcessFile(ts_filecontents); var info = ts.preProcessFile(ts_filecontents);
for (let i = info.importedFiles.length - 1; i >= 0; i--) { for (var i = info.importedFiles.length - 1; i >= 0; i--) {
const importedFileName = info.importedFiles[i].fileName; var importedFileName = info.importedFiles[i].fileName;
if (options.importIgnorePattern.test(importedFileName)) { if (options.importIgnorePattern.test(importedFileName)) {
// Ignore vs/css! imports // Ignore vs/css! imports
continue; continue;
} }
let importedModuleId = importedFileName; var importedModuleId = importedFileName;
if (/(^\.\/)|(^\.\.\/)/.test(importedModuleId)) { if (/(^\.\/)|(^\.\.\/)/.test(importedModuleId)) {
importedModuleId = path.join(path.dirname(moduleId), importedModuleId); importedModuleId = path.join(path.dirname(moduleId), importedModuleId);
} }
enqueue(importedModuleId); enqueue(importedModuleId);
} }
FILES[`${moduleId}.ts`] = ts_filecontents; FILES[moduleId + '.ts'] = ts_filecontents;
} }
return FILES; return FILES;
} }
/** /**
* A TypeScript language service host * A TypeScript language service host
*/ */
class TypeScriptLanguageServiceHost { var TypeScriptLanguageServiceHost = /** @class */ (function () {
constructor(libs, files, compilerOptions) { function TypeScriptLanguageServiceHost(libs, files, compilerOptions) {
this._libs = libs; this._libs = libs;
this._files = files; this._files = files;
this._compilerOptions = compilerOptions; this._compilerOptions = compilerOptions;
} }
// --- language service host --------------- // --- language service host ---------------
getCompilationSettings() { TypeScriptLanguageServiceHost.prototype.getCompilationSettings = function () {
return this._compilerOptions; return this._compilerOptions;
} };
getScriptFileNames() { TypeScriptLanguageServiceHost.prototype.getScriptFileNames = function () {
return ([] return ([]
.concat(Object.keys(this._libs)) .concat(Object.keys(this._libs))
.concat(Object.keys(this._files))); .concat(Object.keys(this._files)));
} };
getScriptVersion(_fileName) { TypeScriptLanguageServiceHost.prototype.getScriptVersion = function (fileName) {
return '1'; return '1';
} };
getProjectVersion() { TypeScriptLanguageServiceHost.prototype.getProjectVersion = function () {
return '1'; return '1';
} };
getScriptSnapshot(fileName) { TypeScriptLanguageServiceHost.prototype.getScriptSnapshot = function (fileName) {
if (this._files.hasOwnProperty(fileName)) { if (this._files.hasOwnProperty(fileName)) {
return ts.ScriptSnapshot.fromString(this._files[fileName]); return ts.ScriptSnapshot.fromString(this._files[fileName]);
} }
@@ -160,20 +119,21 @@ class TypeScriptLanguageServiceHost {
else { else {
return ts.ScriptSnapshot.fromString(''); return ts.ScriptSnapshot.fromString('');
} }
} };
getScriptKind(_fileName) { TypeScriptLanguageServiceHost.prototype.getScriptKind = function (fileName) {
return ts.ScriptKind.TS; return ts.ScriptKind.TS;
} };
getCurrentDirectory() { TypeScriptLanguageServiceHost.prototype.getCurrentDirectory = function () {
return ''; return '';
} };
getDefaultLibFileName(_options) { TypeScriptLanguageServiceHost.prototype.getDefaultLibFileName = function (options) {
return 'defaultLib:lib.d.ts'; return 'defaultLib:lib.d.ts';
} };
isDefaultLibFileName(fileName) { TypeScriptLanguageServiceHost.prototype.isDefaultLibFileName = function (fileName) {
return fileName === this.getDefaultLibFileName(this._compilerOptions); return fileName === this.getDefaultLibFileName(this._compilerOptions);
} };
} return TypeScriptLanguageServiceHost;
}());
//#endregion //#endregion
//#region Tree Shaking //#region Tree Shaking
var NodeColor; var NodeColor;
@@ -190,7 +150,7 @@ function setColor(node, color) {
} }
function nodeOrParentIsBlack(node) { function nodeOrParentIsBlack(node) {
while (node) { while (node) {
const color = getColor(node); var color = getColor(node);
if (color === 2 /* Black */) { if (color === 2 /* Black */) {
return true; return true;
} }
@@ -202,7 +162,8 @@ function nodeOrChildIsBlack(node) {
if (getColor(node) === 2 /* Black */) { if (getColor(node) === 2 /* Black */) {
return true; return true;
} }
for (const child of node.getChildren()) { for (var _i = 0, _a = node.getChildren(); _i < _a.length; _i++) {
var child = _a[_i];
if (nodeOrChildIsBlack(child)) { if (nodeOrChildIsBlack(child)) {
return true; return true;
} }
@@ -210,22 +171,19 @@ function nodeOrChildIsBlack(node) {
return false; return false;
} }
function markNodes(languageService, options) { function markNodes(languageService, options) {
const program = languageService.getProgram(); var program = languageService.getProgram();
if (!program) {
throw new Error('Could not get program from language service');
}
if (options.shakeLevel === 0 /* Files */) { if (options.shakeLevel === 0 /* Files */) {
// Mark all source files Black // Mark all source files Black
program.getSourceFiles().forEach((sourceFile) => { program.getSourceFiles().forEach(function (sourceFile) {
setColor(sourceFile, 2 /* Black */); setColor(sourceFile, 2 /* Black */);
}); });
return; return;
} }
const black_queue = []; var black_queue = [];
const gray_queue = []; var gray_queue = [];
const sourceFilesLoaded = {}; var sourceFilesLoaded = {};
function enqueueTopLevelModuleStatements(sourceFile) { function enqueueTopLevelModuleStatements(sourceFile) {
sourceFile.forEachChild((node) => { sourceFile.forEachChild(function (node) {
if (ts.isImportDeclaration(node)) { if (ts.isImportDeclaration(node)) {
if (!node.importClause && ts.isStringLiteral(node.moduleSpecifier)) { if (!node.importClause && ts.isStringLiteral(node.moduleSpecifier)) {
setColor(node, 2 /* Black */); setColor(node, 2 /* Black */);
@@ -234,7 +192,7 @@ function markNodes(languageService, options) {
return; return;
} }
if (ts.isExportDeclaration(node)) { if (ts.isExportDeclaration(node)) {
if (node.moduleSpecifier && ts.isStringLiteral(node.moduleSpecifier)) { if (ts.isStringLiteral(node.moduleSpecifier)) {
setColor(node, 2 /* Black */); setColor(node, 2 /* Black */);
enqueueImport(node, node.moduleSpecifier.text); enqueueImport(node, node.moduleSpecifier.text);
} }
@@ -262,7 +220,7 @@ function markNodes(languageService, options) {
gray_queue.push(node); gray_queue.push(node);
} }
function enqueue_black(node) { function enqueue_black(node) {
const previousColor = getColor(node); var previousColor = getColor(node);
if (previousColor === 2 /* Black */) { if (previousColor === 2 /* Black */) {
return; return;
} }
@@ -280,12 +238,12 @@ function markNodes(languageService, options) {
if (nodeOrParentIsBlack(node)) { if (nodeOrParentIsBlack(node)) {
return; return;
} }
const fileName = node.getSourceFile().fileName; var fileName = node.getSourceFile().fileName;
if (/^defaultLib:/.test(fileName) || /\.d\.ts$/.test(fileName)) { if (/^defaultLib:/.test(fileName) || /\.d\.ts$/.test(fileName)) {
setColor(node, 2 /* Black */); setColor(node, 2 /* Black */);
return; return;
} }
const sourceFile = node.getSourceFile(); var sourceFile = node.getSourceFile();
if (!sourceFilesLoaded[sourceFile.fileName]) { if (!sourceFilesLoaded[sourceFile.fileName]) {
sourceFilesLoaded[sourceFile.fileName] = true; sourceFilesLoaded[sourceFile.fileName] = true;
enqueueTopLevelModuleStatements(sourceFile); enqueueTopLevelModuleStatements(sourceFile);
@@ -296,15 +254,12 @@ function markNodes(languageService, options) {
setColor(node, 2 /* Black */); setColor(node, 2 /* Black */);
black_queue.push(node); black_queue.push(node);
if (options.shakeLevel === 2 /* ClassMembers */ && (ts.isMethodDeclaration(node) || ts.isMethodSignature(node) || ts.isPropertySignature(node) || ts.isGetAccessor(node) || ts.isSetAccessor(node))) { if (options.shakeLevel === 2 /* ClassMembers */ && (ts.isMethodDeclaration(node) || ts.isMethodSignature(node) || ts.isPropertySignature(node) || ts.isGetAccessor(node) || ts.isSetAccessor(node))) {
const references = languageService.getReferencesAtPosition(node.getSourceFile().fileName, node.name.pos + node.name.getLeadingTriviaWidth()); var references = languageService.getReferencesAtPosition(node.getSourceFile().fileName, node.name.pos + node.name.getLeadingTriviaWidth());
if (references) { if (references) {
for (let i = 0, len = references.length; i < len; i++) { for (var i = 0, len = references.length; i < len; i++) {
const reference = references[i]; var reference = references[i];
const referenceSourceFile = program.getSourceFile(reference.fileName); var referenceSourceFile = program.getSourceFile(reference.fileName);
if (!referenceSourceFile) { var referenceNode = getTokenAtPosition(referenceSourceFile, reference.textSpan.start, false, false);
continue;
}
const referenceNode = getTokenAtPosition(referenceSourceFile, reference.textSpan.start, false, false);
if (ts.isMethodDeclaration(referenceNode.parent) if (ts.isMethodDeclaration(referenceNode.parent)
|| ts.isPropertyDeclaration(referenceNode.parent) || ts.isPropertyDeclaration(referenceNode.parent)
|| ts.isGetAccessor(referenceNode.parent) || ts.isGetAccessor(referenceNode.parent)
@@ -316,9 +271,9 @@ function markNodes(languageService, options) {
} }
} }
function enqueueFile(filename) { function enqueueFile(filename) {
const sourceFile = program.getSourceFile(filename); var sourceFile = program.getSourceFile(filename);
if (!sourceFile) { if (!sourceFile) {
console.warn(`Cannot find source file ${filename}`); console.warn("Cannot find source file " + filename);
return; return;
} }
enqueue_black(sourceFile); enqueue_black(sourceFile);
@@ -328,8 +283,8 @@ function markNodes(languageService, options) {
// this import should be ignored // this import should be ignored
return; return;
} }
const nodeSourceFile = node.getSourceFile(); var nodeSourceFile = node.getSourceFile();
let fullPath; var fullPath;
if (/(^\.\/)|(^\.\.\/)/.test(importText)) { if (/(^\.\/)|(^\.\.\/)/.test(importText)) {
fullPath = path.join(path.dirname(nodeSourceFile.fileName), importText) + '.ts'; fullPath = path.join(path.dirname(nodeSourceFile.fileName), importText) + '.ts';
} }
@@ -338,25 +293,25 @@ function markNodes(languageService, options) {
} }
enqueueFile(fullPath); enqueueFile(fullPath);
} }
options.entryPoints.forEach(moduleId => enqueueFile(moduleId + '.ts')); options.entryPoints.forEach(function (moduleId) { return enqueueFile(moduleId + '.ts'); });
// Add fake usage files // Add fake usage files
options.inlineEntryPoints.forEach((_, index) => enqueueFile(`inlineEntryPoint.${index}.ts`)); options.inlineEntryPoints.forEach(function (_, index) { return enqueueFile("inlineEntryPoint:" + index + ".ts"); });
let step = 0; var step = 0;
const checker = program.getTypeChecker(); var checker = program.getTypeChecker();
while (black_queue.length > 0 || gray_queue.length > 0) { var _loop_1 = function () {
++step; ++step;
let node; var node = void 0;
if (step % 100 === 0) { if (step % 100 === 0) {
console.log(`${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`); console.log(step + "/" + (step + black_queue.length + gray_queue.length) + " (" + black_queue.length + ", " + gray_queue.length + ")");
} }
if (black_queue.length === 0) { if (black_queue.length === 0) {
for (let i = 0; i < gray_queue.length; i++) { for (var i = 0; i < gray_queue.length; i++) {
const node = gray_queue[i]; var node_1 = gray_queue[i];
const nodeParent = node.parent; var nodeParent = node_1.parent;
if ((ts.isClassDeclaration(nodeParent) || ts.isInterfaceDeclaration(nodeParent)) && nodeOrChildIsBlack(nodeParent)) { if ((ts.isClassDeclaration(nodeParent) || ts.isInterfaceDeclaration(nodeParent)) && nodeOrChildIsBlack(nodeParent)) {
gray_queue.splice(i, 1); gray_queue.splice(i, 1);
black_queue.push(node); black_queue.push(node_1);
setColor(node, 2 /* Black */); setColor(node_1, 2 /* Black */);
i--; i--;
} }
} }
@@ -365,18 +320,17 @@ function markNodes(languageService, options) {
node = black_queue.shift(); node = black_queue.shift();
} }
else { else {
// only gray nodes remaining... return "break";
break;
} }
const nodeSourceFile = node.getSourceFile(); var nodeSourceFile = node.getSourceFile();
const loop = (node) => { var loop = function (node) {
const [symbol, symbolImportNode] = getRealNodeSymbol(checker, node); var _a = getRealNodeSymbol(checker, node), symbol = _a[0], symbolImportNode = _a[1];
if (symbolImportNode) { if (symbolImportNode) {
setColor(symbolImportNode, 2 /* Black */); setColor(symbolImportNode, 2 /* Black */);
} }
if (symbol && !nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol)) { if (symbol && !nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol)) {
for (let i = 0, len = symbol.declarations.length; i < len; i++) { for (var i = 0, len = symbol.declarations.length; i < len; i++) {
const declaration = symbol.declarations[i]; var declaration = symbol.declarations[i];
if (ts.isSourceFile(declaration)) { if (ts.isSourceFile(declaration)) {
// Do not enqueue full source files // Do not enqueue full source files
// (they can be the declaration of a module import) // (they can be the declaration of a module import)
@@ -384,9 +338,9 @@ function markNodes(languageService, options) {
} }
if (options.shakeLevel === 2 /* ClassMembers */ && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration))) { if (options.shakeLevel === 2 /* ClassMembers */ && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration))) {
enqueue_black(declaration.name); enqueue_black(declaration.name);
for (let j = 0; j < declaration.members.length; j++) { for (var j = 0; j < declaration.members.length; j++) {
const member = declaration.members[j]; var member = declaration.members[j];
const memberName = member.name ? member.name.getText() : null; var memberName = member.name ? member.name.getText() : null;
if (ts.isConstructorDeclaration(member) if (ts.isConstructorDeclaration(member)
|| ts.isConstructSignatureDeclaration(member) || ts.isConstructSignatureDeclaration(member)
|| ts.isIndexSignatureDeclaration(member) || ts.isIndexSignatureDeclaration(member)
@@ -400,7 +354,8 @@ function markNodes(languageService, options) {
} }
// queue the heritage clauses // queue the heritage clauses
if (declaration.heritageClauses) { if (declaration.heritageClauses) {
for (let heritageClause of declaration.heritageClauses) { for (var _i = 0, _b = declaration.heritageClauses; _i < _b.length; _i++) {
var heritageClause = _b[_i];
enqueue_black(heritageClause); enqueue_black(heritageClause);
} }
} }
@@ -413,12 +368,17 @@ function markNodes(languageService, options) {
node.forEachChild(loop); node.forEachChild(loop);
}; };
node.forEachChild(loop); node.forEachChild(loop);
};
while (black_queue.length > 0 || gray_queue.length > 0) {
var state_1 = _loop_1();
if (state_1 === "break")
break;
} }
} }
function nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol) { function nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol) {
for (let i = 0, len = symbol.declarations.length; i < len; i++) { for (var i = 0, len = symbol.declarations.length; i < len; i++) {
const declaration = symbol.declarations[i]; var declaration = symbol.declarations[i];
const declarationSourceFile = declaration.getSourceFile(); var declarationSourceFile = declaration.getSourceFile();
if (nodeSourceFile === declarationSourceFile) { if (nodeSourceFile === declarationSourceFile) {
if (declaration.pos <= node.pos && node.end <= declaration.end) { if (declaration.pos <= node.pos && node.end <= declaration.end) {
return true; return true;
@@ -428,28 +388,25 @@ function nodeIsInItsOwnDeclaration(nodeSourceFile, node, symbol) {
return false; return false;
} }
function generateResult(languageService, shakeLevel) { function generateResult(languageService, shakeLevel) {
const program = languageService.getProgram(); var program = languageService.getProgram();
if (!program) { var result = {};
throw new Error('Could not get program from language service'); var writeFile = function (filePath, contents) {
}
let result = {};
const writeFile = (filePath, contents) => {
result[filePath] = contents; result[filePath] = contents;
}; };
program.getSourceFiles().forEach((sourceFile) => { program.getSourceFiles().forEach(function (sourceFile) {
const fileName = sourceFile.fileName; var fileName = sourceFile.fileName;
if (/^defaultLib:/.test(fileName)) { if (/^defaultLib:/.test(fileName)) {
return; return;
} }
const destination = fileName; var destination = fileName;
if (/\.d\.ts$/.test(fileName)) { if (/\.d\.ts$/.test(fileName)) {
if (nodeOrChildIsBlack(sourceFile)) { if (nodeOrChildIsBlack(sourceFile)) {
writeFile(destination, sourceFile.text); writeFile(destination, sourceFile.text);
} }
return; return;
} }
let text = sourceFile.text; var text = sourceFile.text;
let result = ''; var result = '';
function keep(node) { function keep(node) {
result += text.substring(node.pos, node.end); result += text.substring(node.pos, node.end);
} }
@@ -478,23 +435,24 @@ function generateResult(languageService, shakeLevel) {
} }
} }
else { else {
let survivingImports = []; var survivingImports = [];
for (const importNode of node.importClause.namedBindings.elements) { for (var i = 0; i < node.importClause.namedBindings.elements.length; i++) {
var importNode = node.importClause.namedBindings.elements[i];
if (getColor(importNode) === 2 /* Black */) { if (getColor(importNode) === 2 /* Black */) {
survivingImports.push(importNode.getFullText(sourceFile)); survivingImports.push(importNode.getFullText(sourceFile));
} }
} }
const leadingTriviaWidth = node.getLeadingTriviaWidth(); var leadingTriviaWidth = node.getLeadingTriviaWidth();
const leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth); var leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth);
if (survivingImports.length > 0) { if (survivingImports.length > 0) {
if (node.importClause && node.importClause.name && getColor(node.importClause) === 2 /* Black */) { if (node.importClause && getColor(node.importClause) === 2 /* Black */) {
return write(`${leadingTrivia}import ${node.importClause.name.text}, {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`); return write(leadingTrivia + "import " + node.importClause.name.text + ", {" + survivingImports.join(',') + " } from" + node.moduleSpecifier.getFullText(sourceFile) + ";");
} }
return write(`${leadingTrivia}import {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`); return write(leadingTrivia + "import {" + survivingImports.join(',') + " } from" + node.moduleSpecifier.getFullText(sourceFile) + ";");
} }
else { else {
if (node.importClause && node.importClause.name && getColor(node.importClause) === 2 /* Black */) { if (node.importClause && getColor(node.importClause) === 2 /* Black */) {
return write(`${leadingTrivia}import ${node.importClause.name.text} from${node.moduleSpecifier.getFullText(sourceFile)};`); return write(leadingTrivia + "import " + node.importClause.name.text + " from" + node.moduleSpecifier.getFullText(sourceFile) + ";");
} }
} }
} }
@@ -506,10 +464,10 @@ function generateResult(languageService, shakeLevel) {
} }
} }
if (shakeLevel === 2 /* ClassMembers */ && (ts.isClassDeclaration(node) || ts.isInterfaceDeclaration(node)) && nodeOrChildIsBlack(node)) { if (shakeLevel === 2 /* ClassMembers */ && (ts.isClassDeclaration(node) || ts.isInterfaceDeclaration(node)) && nodeOrChildIsBlack(node)) {
let toWrite = node.getFullText(); var toWrite = node.getFullText();
for (let i = node.members.length - 1; i >= 0; i--) { for (var i = node.members.length - 1; i >= 0; i--) {
const member = node.members[i]; var member = node.members[i];
if (getColor(member) === 2 /* Black */ || !member.name) { if (getColor(member) === 2 /* Black */) {
// keep method // keep method
continue; continue;
} }
@@ -517,8 +475,8 @@ function generateResult(languageService, shakeLevel) {
// TODO: keep all members ending with `Brand`... // TODO: keep all members ending with `Brand`...
continue; continue;
} }
let pos = member.pos - node.pos; var pos = member.pos - node.pos;
let end = member.end - node.pos; var end = member.end - node.pos;
toWrite = toWrite.substring(0, pos) + toWrite.substring(end); toWrite = toWrite.substring(0, pos) + toWrite.substring(end);
} }
return write(toWrite); return write(toWrite);
@@ -550,9 +508,68 @@ function generateResult(languageService, shakeLevel) {
* Returns the node's symbol and the `import` node (if the symbol resolved from a different module) * Returns the node's symbol and the `import` node (if the symbol resolved from a different module)
*/ */
function getRealNodeSymbol(checker, node) { function getRealNodeSymbol(checker, node) {
const getPropertySymbolsFromContextualType = ts.getPropertySymbolsFromContextualType; /**
const getContainingObjectLiteralElement = ts.getContainingObjectLiteralElement; * Returns the containing object literal property declaration given a possible name node, e.g. "a" in x = { "a": 1 }
const getNameFromPropertyName = ts.getNameFromPropertyName; */
/* @internal */
function getContainingObjectLiteralElement(node) {
switch (node.kind) {
case ts.SyntaxKind.StringLiteral:
case ts.SyntaxKind.NumericLiteral:
if (node.parent.kind === ts.SyntaxKind.ComputedPropertyName) {
return ts.isObjectLiteralElement(node.parent.parent) ? node.parent.parent : undefined;
}
// falls through
case ts.SyntaxKind.Identifier:
return ts.isObjectLiteralElement(node.parent) &&
(node.parent.parent.kind === ts.SyntaxKind.ObjectLiteralExpression || node.parent.parent.kind === ts.SyntaxKind.JsxAttributes) &&
node.parent.name === node ? node.parent : undefined;
}
return undefined;
}
function getPropertySymbolsFromType(type, propName) {
function getTextOfPropertyName(name) {
function isStringOrNumericLiteral(node) {
var kind = node.kind;
return kind === ts.SyntaxKind.StringLiteral
|| kind === ts.SyntaxKind.NumericLiteral;
}
switch (name.kind) {
case ts.SyntaxKind.Identifier:
return name.text;
case ts.SyntaxKind.StringLiteral:
case ts.SyntaxKind.NumericLiteral:
return name.text;
case ts.SyntaxKind.ComputedPropertyName:
return isStringOrNumericLiteral(name.expression) ? name.expression.text : undefined;
}
}
var name = getTextOfPropertyName(propName);
if (name && type) {
var result = [];
var symbol_1 = type.getProperty(name);
if (type.flags & ts.TypeFlags.Union) {
for (var _i = 0, _a = type.types; _i < _a.length; _i++) {
var t = _a[_i];
var symbol_2 = t.getProperty(name);
if (symbol_2) {
result.push(symbol_2);
}
}
return result;
}
if (symbol_1) {
result.push(symbol_1);
return result;
}
}
return undefined;
}
function getPropertySymbolsFromContextualType(typeChecker, node) {
var objectLiteral = node.parent;
var contextualType = typeChecker.getContextualType(objectLiteral);
return getPropertySymbolsFromType(contextualType, node.name);
}
// Go to the original declaration for cases: // Go to the original declaration for cases:
// //
// (1) when the aliased symbol was declared in the location(parent). // (1) when the aliased symbol was declared in the location(parent).
@@ -580,15 +597,10 @@ function getRealNodeSymbol(checker, node) {
return [null, null]; return [null, null];
} }
} }
const { parent } = node; var symbol = checker.getSymbolAtLocation(node);
let symbol = checker.getSymbolAtLocation(node); var importNode = null;
let importNode = null;
// If this is an alias, and the request came at the declaration location
// get the aliased symbol instead. This allows for goto def on an import e.g.
// import {A, B} from "mod";
// to jump to the implementation directly.
if (symbol && symbol.flags & ts.SymbolFlags.Alias && shouldSkipAlias(node, symbol.declarations[0])) { if (symbol && symbol.flags & ts.SymbolFlags.Alias && shouldSkipAlias(node, symbol.declarations[0])) {
const aliased = checker.getAliasedSymbol(symbol); var aliased = checker.getAliasedSymbol(symbol);
if (aliased.declarations) { if (aliased.declarations) {
// We should mark the import as visited // We should mark the import as visited
importNode = symbol.declarations[0]; importNode = symbol.declarations[0];
@@ -615,22 +627,13 @@ function getRealNodeSymbol(checker, node) {
// pr/*destination*/op1: number // pr/*destination*/op1: number
// } // }
// bar<Test>(({pr/*goto*/op1})=>{}); // bar<Test>(({pr/*goto*/op1})=>{});
if (ts.isPropertyName(node) && ts.isBindingElement(parent) && ts.isObjectBindingPattern(parent.parent) && if (ts.isPropertyName(node) && ts.isBindingElement(node.parent) && ts.isObjectBindingPattern(node.parent.parent) &&
(node === (parent.propertyName || parent.name))) { (node === (node.parent.propertyName || node.parent.name))) {
const name = getNameFromPropertyName(node); var type = checker.getTypeAtLocation(node.parent.parent);
const type = checker.getTypeAtLocation(parent.parent); if (type) {
if (name && type) { var propSymbols = getPropertySymbolsFromType(type, node);
if (type.isUnion()) { if (propSymbols) {
const prop = type.types[0].getProperty(name); symbol = propSymbols[0];
if (prop) {
symbol = prop;
}
}
else {
const prop = type.getProperty(name);
if (prop) {
symbol = prop;
}
} }
} }
} }
@@ -643,14 +646,11 @@ function getRealNodeSymbol(checker, node) {
// } // }
// function Foo(arg: Props) {} // function Foo(arg: Props) {}
// Foo( { pr/*1*/op1: 10, prop2: false }) // Foo( { pr/*1*/op1: 10, prop2: false })
const element = getContainingObjectLiteralElement(node); var element = getContainingObjectLiteralElement(node);
if (element) { if (element && checker.getContextualType(element.parent)) {
const contextualType = element && checker.getContextualType(element.parent); var propertySymbols = getPropertySymbolsFromContextualType(checker, element);
if (contextualType) { if (propertySymbols) {
const propertySymbols = getPropertySymbolsFromContextualType(element, checker, contextualType, /*unionSymbolOk*/ false); symbol = propertySymbols[0];
if (propertySymbols) {
symbol = propertySymbols[0];
}
} }
} }
} }
@@ -661,16 +661,17 @@ function getRealNodeSymbol(checker, node) {
} }
/** Get the token whose text contains the position */ /** Get the token whose text contains the position */
function getTokenAtPosition(sourceFile, position, allowPositionInLeadingTrivia, includeEndPosition) { function getTokenAtPosition(sourceFile, position, allowPositionInLeadingTrivia, includeEndPosition) {
let current = sourceFile; var current = sourceFile;
outer: while (true) { outer: while (true) {
// find the child that contains 'position' // find the child that contains 'position'
for (const child of current.getChildren()) { for (var _i = 0, _a = current.getChildren(); _i < _a.length; _i++) {
const start = allowPositionInLeadingTrivia ? child.getFullStart() : child.getStart(sourceFile, /*includeJsDoc*/ true); var child = _a[_i];
var start = allowPositionInLeadingTrivia ? child.getFullStart() : child.getStart(sourceFile, /*includeJsDoc*/ true);
if (start > position) { if (start > position) {
// If this child begins after position, then all subsequent children will as well. // If this child begins after position, then all subsequent children will as well.
break; break;
} }
const end = child.getEnd(); var end = child.getEnd();
if (position < end || (position === end && (child.kind === ts.SyntaxKind.EndOfFileToken || includeEndPosition))) { if (position < end || (position === end && (child.kind === ts.SyntaxKind.EndOfFileToken || includeEndPosition))) {
current = child; current = child;
continue outer; continue outer;

View File

@@ -36,14 +36,10 @@ export interface ITreeShakingOptions {
* e.g. `lib.d.ts`, `lib.es2015.collection.d.ts` * e.g. `lib.d.ts`, `lib.es2015.collection.d.ts`
*/ */
libs: string[]; libs: string[];
/**
* Other .d.ts files
*/
typings: string[];
/** /**
* TypeScript compiler options. * TypeScript compiler options.
*/ */
compilerOptions?: any; compilerOptions: ts.CompilerOptions;
/** /**
* The shake level to perform. * The shake level to perform.
*/ */
@@ -60,42 +56,8 @@ export interface ITreeShakingResult {
[file: string]: string; [file: string]: string;
} }
function printDiagnostics(diagnostics: ReadonlyArray<ts.Diagnostic>): void {
for (const diag of diagnostics) {
let result = '';
if (diag.file) {
result += `${diag.file.fileName}: `;
}
if (diag.file && diag.start) {
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
result += `- ${location.line + 1},${location.character} - `;
}
result += JSON.stringify(diag.messageText);
console.log(result);
}
}
export function shake(options: ITreeShakingOptions): ITreeShakingResult { export function shake(options: ITreeShakingOptions): ITreeShakingResult {
const languageService = createTypeScriptLanguageService(options); const languageService = createTypeScriptLanguageService(options);
const program = languageService.getProgram()!;
const globalDiagnostics = program.getGlobalDiagnostics();
if (globalDiagnostics.length > 0) {
printDiagnostics(globalDiagnostics);
throw new Error(`Compilation Errors encountered.`);
}
const syntacticDiagnostics = program.getSyntacticDiagnostics();
if (syntacticDiagnostics.length > 0) {
printDiagnostics(syntacticDiagnostics);
throw new Error(`Compilation Errors encountered.`);
}
const semanticDiagnostics = program.getSemanticDiagnostics();
if (semanticDiagnostics.length > 0) {
printDiagnostics(semanticDiagnostics);
throw new Error(`Compilation Errors encountered.`);
}
markNodes(languageService, options); markNodes(languageService, options);
@@ -109,13 +71,7 @@ function createTypeScriptLanguageService(options: ITreeShakingOptions): ts.Langu
// Add fake usage files // Add fake usage files
options.inlineEntryPoints.forEach((inlineEntryPoint, index) => { options.inlineEntryPoints.forEach((inlineEntryPoint, index) => {
FILES[`inlineEntryPoint.${index}.ts`] = inlineEntryPoint; FILES[`inlineEntryPoint:${index}.ts`] = inlineEntryPoint;
});
// Add additional typings
options.typings.forEach((typing) => {
const filePath = path.join(options.sourcesRoot, typing);
FILES[typing] = fs.readFileSync(filePath).toString();
}); });
// Resolve libs // Resolve libs
@@ -125,9 +81,7 @@ function createTypeScriptLanguageService(options: ITreeShakingOptions): ts.Langu
RESOLVED_LIBS[`defaultLib:${filename}`] = fs.readFileSync(filepath).toString(); RESOLVED_LIBS[`defaultLib:${filename}`] = fs.readFileSync(filepath).toString();
}); });
const compilerOptions = ts.convertCompilerOptionsFromJson(options.compilerOptions, options.sourcesRoot).options; const host = new TypeScriptLanguageServiceHost(RESOLVED_LIBS, FILES, options.compilerOptions);
const host = new TypeScriptLanguageServiceHost(RESOLVED_LIBS, FILES, compilerOptions);
return ts.createLanguageService(host); return ts.createLanguageService(host);
} }
@@ -151,17 +105,11 @@ function discoverAndReadFiles(options: ITreeShakingOptions): IFileMap {
options.entryPoints.forEach((entryPoint) => enqueue(entryPoint)); options.entryPoints.forEach((entryPoint) => enqueue(entryPoint));
while (queue.length > 0) { while (queue.length > 0) {
const moduleId = queue.shift()!; const moduleId = queue.shift();
const dts_filename = path.join(options.sourcesRoot, moduleId + '.d.ts'); const dts_filename = path.join(options.sourcesRoot, moduleId + '.d.ts');
if (fs.existsSync(dts_filename)) { if (fs.existsSync(dts_filename)) {
const dts_filecontents = fs.readFileSync(dts_filename).toString(); const dts_filecontents = fs.readFileSync(dts_filename).toString();
FILES[`${moduleId}.d.ts`] = dts_filecontents; FILES[moduleId + '.d.ts'] = dts_filecontents;
continue;
}
const js_filename = path.join(options.sourcesRoot, moduleId + '.js');
if (fs.existsSync(js_filename)) {
// This is an import for a .js file, so ignore it...
continue; continue;
} }
@@ -188,7 +136,7 @@ function discoverAndReadFiles(options: ITreeShakingOptions): IFileMap {
enqueue(importedModuleId); enqueue(importedModuleId);
} }
FILES[`${moduleId}.ts`] = ts_filecontents; FILES[moduleId + '.ts'] = ts_filecontents;
} }
return FILES; return FILES;
@@ -219,12 +167,12 @@ class TypeScriptLanguageServiceHost implements ts.LanguageServiceHost {
} }
getScriptFileNames(): string[] { getScriptFileNames(): string[] {
return ( return (
([] as string[]) []
.concat(Object.keys(this._libs)) .concat(Object.keys(this._libs))
.concat(Object.keys(this._files)) .concat(Object.keys(this._files))
); );
} }
getScriptVersion(_fileName: string): string { getScriptVersion(fileName: string): string {
return '1'; return '1';
} }
getProjectVersion(): string { getProjectVersion(): string {
@@ -239,13 +187,13 @@ class TypeScriptLanguageServiceHost implements ts.LanguageServiceHost {
return ts.ScriptSnapshot.fromString(''); return ts.ScriptSnapshot.fromString('');
} }
} }
getScriptKind(_fileName: string): ts.ScriptKind { getScriptKind(fileName: string): ts.ScriptKind {
return ts.ScriptKind.TS; return ts.ScriptKind.TS;
} }
getCurrentDirectory(): string { getCurrentDirectory(): string {
return ''; return '';
} }
getDefaultLibFileName(_options: ts.CompilerOptions): string { getDefaultLibFileName(options: ts.CompilerOptions): string {
return 'defaultLib:lib.d.ts'; return 'defaultLib:lib.d.ts';
} }
isDefaultLibFileName(fileName: string): boolean { isDefaultLibFileName(fileName: string): boolean {
@@ -292,9 +240,6 @@ function nodeOrChildIsBlack(node: ts.Node): boolean {
function markNodes(languageService: ts.LanguageService, options: ITreeShakingOptions) { function markNodes(languageService: ts.LanguageService, options: ITreeShakingOptions) {
const program = languageService.getProgram(); const program = languageService.getProgram();
if (!program) {
throw new Error('Could not get program from language service');
}
if (options.shakeLevel === ShakeLevel.Files) { if (options.shakeLevel === ShakeLevel.Files) {
// Mark all source files Black // Mark all source files Black
@@ -321,7 +266,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
} }
if (ts.isExportDeclaration(node)) { if (ts.isExportDeclaration(node)) {
if (node.moduleSpecifier && ts.isStringLiteral(node.moduleSpecifier)) { if (ts.isStringLiteral(node.moduleSpecifier)) {
setColor(node, NodeColor.Black); setColor(node, NodeColor.Black);
enqueueImport(node, node.moduleSpecifier.text); enqueueImport(node, node.moduleSpecifier.text);
} }
@@ -404,11 +349,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
if (references) { if (references) {
for (let i = 0, len = references.length; i < len; i++) { for (let i = 0, len = references.length; i < len; i++) {
const reference = references[i]; const reference = references[i];
const referenceSourceFile = program!.getSourceFile(reference.fileName); const referenceSourceFile = program.getSourceFile(reference.fileName);
if (!referenceSourceFile) {
continue;
}
const referenceNode = getTokenAtPosition(referenceSourceFile, reference.textSpan.start, false, false); const referenceNode = getTokenAtPosition(referenceSourceFile, reference.textSpan.start, false, false);
if ( if (
ts.isMethodDeclaration(referenceNode.parent) ts.isMethodDeclaration(referenceNode.parent)
@@ -424,7 +365,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
} }
function enqueueFile(filename: string): void { function enqueueFile(filename: string): void {
const sourceFile = program!.getSourceFile(filename); const sourceFile = program.getSourceFile(filename);
if (!sourceFile) { if (!sourceFile) {
console.warn(`Cannot find source file ${filename}`); console.warn(`Cannot find source file ${filename}`);
return; return;
@@ -450,7 +391,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
options.entryPoints.forEach(moduleId => enqueueFile(moduleId + '.ts')); options.entryPoints.forEach(moduleId => enqueueFile(moduleId + '.ts'));
// Add fake usage files // Add fake usage files
options.inlineEntryPoints.forEach((_, index) => enqueueFile(`inlineEntryPoint.${index}.ts`)); options.inlineEntryPoints.forEach((_, index) => enqueueFile(`inlineEntryPoint:${index}.ts`));
let step = 0; let step = 0;
@@ -460,11 +401,11 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
let node: ts.Node; let node: ts.Node;
if (step % 100 === 0) { if (step % 100 === 0) {
console.log(`${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`); console.log(`${step}/${step+black_queue.length+gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
} }
if (black_queue.length === 0) { if (black_queue.length === 0) {
for (let i = 0; i< gray_queue.length; i++) { for (let i = 0; i < gray_queue.length; i++) {
const node = gray_queue[i]; const node = gray_queue[i];
const nodeParent = node.parent; const nodeParent = node.parent;
if ((ts.isClassDeclaration(nodeParent) || ts.isInterfaceDeclaration(nodeParent)) && nodeOrChildIsBlack(nodeParent)) { if ((ts.isClassDeclaration(nodeParent) || ts.isInterfaceDeclaration(nodeParent)) && nodeOrChildIsBlack(nodeParent)) {
@@ -477,7 +418,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
} }
if (black_queue.length > 0) { if (black_queue.length > 0) {
node = black_queue.shift()!; node = black_queue.shift();
} else { } else {
// only gray nodes remaining... // only gray nodes remaining...
break; break;
@@ -500,7 +441,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
} }
if (options.shakeLevel === ShakeLevel.ClassMembers && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration))) { if (options.shakeLevel === ShakeLevel.ClassMembers && (ts.isClassDeclaration(declaration) || ts.isInterfaceDeclaration(declaration))) {
enqueue_black(declaration.name!); enqueue_black(declaration.name);
for (let j = 0; j < declaration.members.length; j++) { for (let j = 0; j < declaration.members.length; j++) {
const member = declaration.members[j]; const member = declaration.members[j];
@@ -552,9 +493,6 @@ function nodeIsInItsOwnDeclaration(nodeSourceFile: ts.SourceFile, node: ts.Node,
function generateResult(languageService: ts.LanguageService, shakeLevel: ShakeLevel): ITreeShakingResult { function generateResult(languageService: ts.LanguageService, shakeLevel: ShakeLevel): ITreeShakingResult {
const program = languageService.getProgram(); const program = languageService.getProgram();
if (!program) {
throw new Error('Could not get program from language service');
}
let result: ITreeShakingResult = {}; let result: ITreeShakingResult = {};
const writeFile = (filePath: string, contents: string): void => { const writeFile = (filePath: string, contents: string): void => {
@@ -609,7 +547,8 @@ function generateResult(languageService: ts.LanguageService, shakeLevel: ShakeLe
} }
} else { } else {
let survivingImports: string[] = []; let survivingImports: string[] = [];
for (const importNode of node.importClause.namedBindings.elements) { for (let i = 0; i < node.importClause.namedBindings.elements.length; i++) {
const importNode = node.importClause.namedBindings.elements[i];
if (getColor(importNode) === NodeColor.Black) { if (getColor(importNode) === NodeColor.Black) {
survivingImports.push(importNode.getFullText(sourceFile)); survivingImports.push(importNode.getFullText(sourceFile));
} }
@@ -617,12 +556,12 @@ function generateResult(languageService: ts.LanguageService, shakeLevel: ShakeLe
const leadingTriviaWidth = node.getLeadingTriviaWidth(); const leadingTriviaWidth = node.getLeadingTriviaWidth();
const leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth); const leadingTrivia = sourceFile.text.substr(node.pos, leadingTriviaWidth);
if (survivingImports.length > 0) { if (survivingImports.length > 0) {
if (node.importClause && node.importClause.name && getColor(node.importClause) === NodeColor.Black) { if (node.importClause && getColor(node.importClause) === NodeColor.Black) {
return write(`${leadingTrivia}import ${node.importClause.name.text}, {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`); return write(`${leadingTrivia}import ${node.importClause.name.text}, {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
} }
return write(`${leadingTrivia}import {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`); return write(`${leadingTrivia}import {${survivingImports.join(',')} } from${node.moduleSpecifier.getFullText(sourceFile)};`);
} else { } else {
if (node.importClause && node.importClause.name && getColor(node.importClause) === NodeColor.Black) { if (node.importClause && getColor(node.importClause) === NodeColor.Black) {
return write(`${leadingTrivia}import ${node.importClause.name.text} from${node.moduleSpecifier.getFullText(sourceFile)};`); return write(`${leadingTrivia}import ${node.importClause.name.text} from${node.moduleSpecifier.getFullText(sourceFile)};`);
} }
} }
@@ -638,7 +577,7 @@ function generateResult(languageService: ts.LanguageService, shakeLevel: ShakeLe
let toWrite = node.getFullText(); let toWrite = node.getFullText();
for (let i = node.members.length - 1; i >= 0; i--) { for (let i = node.members.length - 1; i >= 0; i--) {
const member = node.members[i]; const member = node.members[i];
if (getColor(member) === NodeColor.Black || !member.name) { if (getColor(member) === NodeColor.Black) {
// keep method // keep method
continue; continue;
} }
@@ -686,13 +625,74 @@ function generateResult(languageService: ts.LanguageService, shakeLevel: ShakeLe
/** /**
* Returns the node's symbol and the `import` node (if the symbol resolved from a different module) * Returns the node's symbol and the `import` node (if the symbol resolved from a different module)
*/ */
function getRealNodeSymbol(checker: ts.TypeChecker, node: ts.Node): [ts.Symbol | null, ts.Declaration | null] { function getRealNodeSymbol(checker: ts.TypeChecker, node: ts.Node): [ts.Symbol, ts.Declaration] {
/**
* Returns the containing object literal property declaration given a possible name node, e.g. "a" in x = { "a": 1 }
*/
/* @internal */
function getContainingObjectLiteralElement(node: ts.Node): ts.ObjectLiteralElement | undefined {
switch (node.kind) {
case ts.SyntaxKind.StringLiteral:
case ts.SyntaxKind.NumericLiteral:
if (node.parent.kind === ts.SyntaxKind.ComputedPropertyName) {
return ts.isObjectLiteralElement(node.parent.parent) ? node.parent.parent : undefined;
}
// falls through
case ts.SyntaxKind.Identifier:
return ts.isObjectLiteralElement(node.parent) &&
(node.parent.parent.kind === ts.SyntaxKind.ObjectLiteralExpression || node.parent.parent.kind === ts.SyntaxKind.JsxAttributes) &&
node.parent.name === node ? node.parent : undefined;
}
return undefined;
}
// Use some TypeScript internals to avoid code duplication function getPropertySymbolsFromType(type: ts.Type, propName: ts.PropertyName) {
type ObjectLiteralElementWithName = ts.ObjectLiteralElement & { name: ts.PropertyName; parent: ts.ObjectLiteralExpression | ts.JsxAttributes }; function getTextOfPropertyName(name: ts.PropertyName): string {
const getPropertySymbolsFromContextualType: (node: ObjectLiteralElementWithName, checker: ts.TypeChecker, contextualType: ts.Type, unionSymbolOk: boolean) => ReadonlyArray<ts.Symbol> = (<any>ts).getPropertySymbolsFromContextualType;
const getContainingObjectLiteralElement: (node: ts.Node) => ObjectLiteralElementWithName | undefined = (<any>ts).getContainingObjectLiteralElement; function isStringOrNumericLiteral(node: ts.Node): node is ts.StringLiteral | ts.NumericLiteral {
const getNameFromPropertyName: (name: ts.PropertyName) => string | undefined = (<any>ts).getNameFromPropertyName; const kind = node.kind;
return kind === ts.SyntaxKind.StringLiteral
|| kind === ts.SyntaxKind.NumericLiteral;
}
switch (name.kind) {
case ts.SyntaxKind.Identifier:
return name.text;
case ts.SyntaxKind.StringLiteral:
case ts.SyntaxKind.NumericLiteral:
return name.text;
case ts.SyntaxKind.ComputedPropertyName:
return isStringOrNumericLiteral(name.expression) ? name.expression.text : undefined!;
}
}
const name = getTextOfPropertyName(propName);
if (name && type) {
const result: ts.Symbol[] = [];
const symbol = type.getProperty(name);
if (type.flags & ts.TypeFlags.Union) {
for (const t of (<ts.UnionType>type).types) {
const symbol = t.getProperty(name);
if (symbol) {
result.push(symbol);
}
}
return result;
}
if (symbol) {
result.push(symbol);
return result;
}
}
return undefined;
}
function getPropertySymbolsFromContextualType(typeChecker: ts.TypeChecker, node: ts.ObjectLiteralElement): ts.Symbol[] {
const objectLiteral = <ts.ObjectLiteralExpression | ts.JsxAttributes>node.parent;
const contextualType = typeChecker.getContextualType(objectLiteral)!;
return getPropertySymbolsFromType(contextualType, node.name!)!;
}
// Go to the original declaration for cases: // Go to the original declaration for cases:
// //
@@ -723,14 +723,8 @@ function getRealNodeSymbol(checker: ts.TypeChecker, node: ts.Node): [ts.Symbol |
} }
} }
const { parent } = node;
let symbol = checker.getSymbolAtLocation(node); let symbol = checker.getSymbolAtLocation(node);
let importNode: ts.Declaration | null = null; let importNode: ts.Declaration = null;
// If this is an alias, and the request came at the declaration location
// get the aliased symbol instead. This allows for goto def on an import e.g.
// import {A, B} from "mod";
// to jump to the implementation directly.
if (symbol && symbol.flags & ts.SymbolFlags.Alias && shouldSkipAlias(node, symbol.declarations[0])) { if (symbol && symbol.flags & ts.SymbolFlags.Alias && shouldSkipAlias(node, symbol.declarations[0])) {
const aliased = checker.getAliasedSymbol(symbol); const aliased = checker.getAliasedSymbol(symbol);
if (aliased.declarations) { if (aliased.declarations) {
@@ -761,21 +755,13 @@ function getRealNodeSymbol(checker: ts.TypeChecker, node: ts.Node): [ts.Symbol |
// pr/*destination*/op1: number // pr/*destination*/op1: number
// } // }
// bar<Test>(({pr/*goto*/op1})=>{}); // bar<Test>(({pr/*goto*/op1})=>{});
if (ts.isPropertyName(node) && ts.isBindingElement(parent) && ts.isObjectBindingPattern(parent.parent) && if (ts.isPropertyName(node) && ts.isBindingElement(node.parent) && ts.isObjectBindingPattern(node.parent.parent) &&
(node === (parent.propertyName || parent.name))) { (node === (node.parent.propertyName || node.parent.name))) {
const name = getNameFromPropertyName(node); const type = checker.getTypeAtLocation(node.parent.parent);
const type = checker.getTypeAtLocation(parent.parent); if (type) {
if (name && type) { const propSymbols = getPropertySymbolsFromType(type, node);
if (type.isUnion()) { if (propSymbols) {
const prop = type.types[0].getProperty(name); symbol = propSymbols[0];
if (prop) {
symbol = prop;
}
} else {
const prop = type.getProperty(name);
if (prop) {
symbol = prop;
}
} }
} }
} }
@@ -790,13 +776,10 @@ function getRealNodeSymbol(checker: ts.TypeChecker, node: ts.Node): [ts.Symbol |
// function Foo(arg: Props) {} // function Foo(arg: Props) {}
// Foo( { pr/*1*/op1: 10, prop2: false }) // Foo( { pr/*1*/op1: 10, prop2: false })
const element = getContainingObjectLiteralElement(node); const element = getContainingObjectLiteralElement(node);
if (element) { if (element && checker.getContextualType(element.parent as ts.Expression)) {
const contextualType = element && checker.getContextualType(element.parent); const propertySymbols = getPropertySymbolsFromContextualType(checker, element);
if (contextualType) { if (propertySymbols) {
const propertySymbols = getPropertySymbolsFromContextualType(element, checker, contextualType, /*unionSymbolOk*/ false); symbol = propertySymbols[0];
if (propertySymbols) {
symbol = propertySymbols[0];
}
} }
} }
} }

View File

@@ -3,30 +3,48 @@
* Copyright (c) Microsoft Corporation. All rights reserved. * Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const path_1 = require("path"); var path_1 = require("path");
const Lint = require("tslint"); var Lint = require("tslint");
class Rule extends Lint.Rules.AbstractRule { var Rule = /** @class */ (function (_super) {
apply(sourceFile) { __extends(Rule, _super);
function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
return this.applyWithWalker(new ImportPatterns(sourceFile, this.getOptions())); return this.applyWithWalker(new ImportPatterns(sourceFile, this.getOptions()));
} };
} return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule; exports.Rule = Rule;
class ImportPatterns extends Lint.RuleWalker { var ImportPatterns = /** @class */ (function (_super) {
constructor(file, opts) { __extends(ImportPatterns, _super);
super(file, opts); function ImportPatterns(file, opts) {
this.imports = Object.create(null); var _this = _super.call(this, file, opts) || this;
_this.imports = Object.create(null);
return _this;
} }
visitImportDeclaration(node) { ImportPatterns.prototype.visitImportDeclaration = function (node) {
let path = node.moduleSpecifier.getText(); var path = node.moduleSpecifier.getText();
// remove quotes // remove quotes
path = path.slice(1, -1); path = path.slice(1, -1);
if (path[0] === '.') { if (path[0] === '.') {
path = path_1.join(path_1.dirname(node.getSourceFile().fileName), path); path = path_1.join(path_1.dirname(node.getSourceFile().fileName), path);
} }
if (this.imports[path]) { if (this.imports[path]) {
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Duplicate imports for '${path}'.`)); this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Duplicate imports for '" + path + "'."));
} }
this.imports[path] = true; this.imports[path] = true;
} };
} return ImportPatterns;
}(Lint.RuleWalker));

View File

@@ -3,60 +3,79 @@
* Copyright (c) Microsoft Corporation. All rights reserved. * Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const ts = require("typescript"); var ts = require("typescript");
const Lint = require("tslint"); var Lint = require("tslint");
const minimatch = require("minimatch"); var minimatch = require("minimatch");
const path_1 = require("path"); var path_1 = require("path");
class Rule extends Lint.Rules.AbstractRule { var Rule = /** @class */ (function (_super) {
apply(sourceFile) { __extends(Rule, _super);
const configs = this.getOptions().ruleArguments; function Rule() {
for (const config of configs) { return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
var configs = this.getOptions().ruleArguments;
for (var _i = 0, configs_1 = configs; _i < configs_1.length; _i++) {
var config = configs_1[_i];
if (minimatch(sourceFile.fileName, config.target)) { if (minimatch(sourceFile.fileName, config.target)) {
return this.applyWithWalker(new ImportPatterns(sourceFile, this.getOptions(), config)); return this.applyWithWalker(new ImportPatterns(sourceFile, this.getOptions(), config));
} }
} }
return []; return [];
} };
} return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule; exports.Rule = Rule;
class ImportPatterns extends Lint.RuleWalker { var ImportPatterns = /** @class */ (function (_super) {
constructor(file, opts, _config) { __extends(ImportPatterns, _super);
super(file, opts); function ImportPatterns(file, opts, _config) {
this._config = _config; var _this = _super.call(this, file, opts) || this;
_this._config = _config;
return _this;
} }
visitImportEqualsDeclaration(node) { ImportPatterns.prototype.visitImportEqualsDeclaration = function (node) {
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) { if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
this._validateImport(node.moduleReference.expression.getText(), node); this._validateImport(node.moduleReference.expression.getText(), node);
} }
} };
visitImportDeclaration(node) { ImportPatterns.prototype.visitImportDeclaration = function (node) {
this._validateImport(node.moduleSpecifier.getText(), node); this._validateImport(node.moduleSpecifier.getText(), node);
} };
visitCallExpression(node) { ImportPatterns.prototype.visitCallExpression = function (node) {
super.visitCallExpression(node); _super.prototype.visitCallExpression.call(this, node);
// import('foo') statements inside the code // import('foo') statements inside the code
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) { if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
const [path] = node.arguments; var path = node.arguments[0];
this._validateImport(path.getText(), node); this._validateImport(path.getText(), node);
} }
} };
_validateImport(path, node) { ImportPatterns.prototype._validateImport = function (path, node) {
// remove quotes // remove quotes
path = path.slice(1, -1); path = path.slice(1, -1);
// resolve relative paths // resolve relative paths
if (path[0] === '.') { if (path[0] === '.') {
path = path_1.join(this.getSourceFile().fileName, path); path = path_1.join(this.getSourceFile().fileName, path);
} }
let restrictions; var restrictions;
if (typeof this._config.restrictions === 'string') { if (typeof this._config.restrictions === 'string') {
restrictions = [this._config.restrictions]; restrictions = [this._config.restrictions];
} }
else { else {
restrictions = this._config.restrictions; restrictions = this._config.restrictions;
} }
let matched = false; var matched = false;
for (const pattern of restrictions) { for (var _i = 0, restrictions_1 = restrictions; _i < restrictions_1.length; _i++) {
var pattern = restrictions_1[_i];
if (minimatch(path, pattern)) { if (minimatch(path, pattern)) {
matched = true; matched = true;
break; break;
@@ -64,7 +83,8 @@ class ImportPatterns extends Lint.RuleWalker {
} }
if (!matched) { if (!matched) {
// None of the restrictions matched // None of the restrictions matched
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Imports violates '${restrictions.join(' or ')}' restrictions. See https://github.com/Microsoft/vscode/wiki/Code-Organization`)); this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Imports violates '" + restrictions.join(' or ') + "' restrictions. See https://github.com/Microsoft/vscode/wiki/Code-Organization"));
} }
} };
} return ImportPatterns;
}(Lint.RuleWalker));

View File

@@ -3,22 +3,36 @@
* Copyright (c) Microsoft Corporation. All rights reserved. * Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const ts = require("typescript"); var ts = require("typescript");
const Lint = require("tslint"); var Lint = require("tslint");
const path_1 = require("path"); var path_1 = require("path");
class Rule extends Lint.Rules.AbstractRule { var Rule = /** @class */ (function (_super) {
apply(sourceFile) { __extends(Rule, _super);
const parts = path_1.dirname(sourceFile.fileName).split(/\\|\//); function Rule() {
const ruleArgs = this.getOptions().ruleArguments[0]; return _super !== null && _super.apply(this, arguments) || this;
let config; }
for (let i = parts.length - 1; i >= 0; i--) { Rule.prototype.apply = function (sourceFile) {
var parts = path_1.dirname(sourceFile.fileName).split(/\\|\//);
var ruleArgs = this.getOptions().ruleArguments[0];
var config;
for (var i = parts.length - 1; i >= 0; i--) {
if (ruleArgs[parts[i]]) { if (ruleArgs[parts[i]]) {
config = { config = {
allowed: new Set(ruleArgs[parts[i]]).add(parts[i]), allowed: new Set(ruleArgs[parts[i]]).add(parts[i]),
disallowed: new Set() disallowed: new Set()
}; };
Object.keys(ruleArgs).forEach(key => { Object.keys(ruleArgs).forEach(function (key) {
if (!config.allowed.has(key)) { if (!config.allowed.has(key)) {
config.disallowed.add(key); config.disallowed.add(key);
} }
@@ -30,54 +44,58 @@ class Rule extends Lint.Rules.AbstractRule {
return []; return [];
} }
return this.applyWithWalker(new LayeringRule(sourceFile, config, this.getOptions())); return this.applyWithWalker(new LayeringRule(sourceFile, config, this.getOptions()));
} };
} return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule; exports.Rule = Rule;
class LayeringRule extends Lint.RuleWalker { var LayeringRule = /** @class */ (function (_super) {
constructor(file, config, opts) { __extends(LayeringRule, _super);
super(file, opts); function LayeringRule(file, config, opts) {
this._config = config; var _this = _super.call(this, file, opts) || this;
_this._config = config;
return _this;
} }
visitImportEqualsDeclaration(node) { LayeringRule.prototype.visitImportEqualsDeclaration = function (node) {
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) { if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
this._validateImport(node.moduleReference.expression.getText(), node); this._validateImport(node.moduleReference.expression.getText(), node);
} }
} };
visitImportDeclaration(node) { LayeringRule.prototype.visitImportDeclaration = function (node) {
this._validateImport(node.moduleSpecifier.getText(), node); this._validateImport(node.moduleSpecifier.getText(), node);
} };
visitCallExpression(node) { LayeringRule.prototype.visitCallExpression = function (node) {
super.visitCallExpression(node); _super.prototype.visitCallExpression.call(this, node);
// import('foo') statements inside the code // import('foo') statements inside the code
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) { if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
const [path] = node.arguments; var path = node.arguments[0];
this._validateImport(path.getText(), node); this._validateImport(path.getText(), node);
} }
} };
_validateImport(path, node) { LayeringRule.prototype._validateImport = function (path, node) {
// remove quotes // remove quotes
path = path.slice(1, -1); path = path.slice(1, -1);
if (path[0] === '.') { if (path[0] === '.') {
path = path_1.join(path_1.dirname(node.getSourceFile().fileName), path); path = path_1.join(path_1.dirname(node.getSourceFile().fileName), path);
} }
const parts = path_1.dirname(path).split(/\\|\//); var parts = path_1.dirname(path).split(/\\|\//);
for (let i = parts.length - 1; i >= 0; i--) { for (var i = parts.length - 1; i >= 0; i--) {
const part = parts[i]; var part = parts[i];
if (this._config.allowed.has(part)) { if (this._config.allowed.has(part)) {
// GOOD - same layer // GOOD - same layer
return; return;
} }
if (this._config.disallowed.has(part)) { if (this._config.disallowed.has(part)) {
// BAD - wrong layer // BAD - wrong layer
const message = `Bad layering. You are not allowed to access '${part}' from here, allowed layers are: [${LayeringRule._print(this._config.allowed)}]`; var message = "Bad layering. You are not allowed to access '" + part + "' from here, allowed layers are: [" + LayeringRule._print(this._config.allowed) + "]";
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), message)); this.addFailure(this.createFailure(node.getStart(), node.getWidth(), message));
return; return;
} }
} }
} };
static _print(set) { LayeringRule._print = function (set) {
const r = []; var r = [];
set.forEach(e => r.push(e)); set.forEach(function (e) { return r.push(e); });
return r.join(', '); return r.join(', ');
} };
} return LayeringRule;
}(Lint.RuleWalker));

View File

@@ -16,9 +16,9 @@ export class Rule extends Lint.Rules.AbstractRule {
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] { public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
const parts = dirname(sourceFile.fileName).split(/\\|\//); const parts = dirname(sourceFile.fileName).split(/\\|\//);
const ruleArgs = this.getOptions().ruleArguments[0]; let ruleArgs = this.getOptions().ruleArguments[0];
let config: Config | undefined; let config: Config;
for (let i = parts.length - 1; i >= 0; i--) { for (let i = parts.length - 1; i >= 0; i--) {
if (ruleArgs[parts[i]]) { if (ruleArgs[parts[i]]) {
config = { config = {
@@ -26,8 +26,8 @@ export class Rule extends Lint.Rules.AbstractRule {
disallowed: new Set<string>() disallowed: new Set<string>()
}; };
Object.keys(ruleArgs).forEach(key => { Object.keys(ruleArgs).forEach(key => {
if (!config!.allowed.has(key)) { if (!config.allowed.has(key)) {
config!.disallowed.add(key); config.disallowed.add(key);
} }
}); });
break; break;
@@ -98,7 +98,7 @@ class LayeringRule extends Lint.RuleWalker {
} }
static _print(set: Set<string>): string { static _print(set: Set<string>): string {
const r: string[] = []; let r: string[] = [];
set.forEach(e => r.push(e)); set.forEach(e => r.push(e));
return r.join(', '); return r.join(', ');
} }

View File

@@ -1,22 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const ts = require("typescript");
const Lint = require("tslint");
class Rule extends Lint.Rules.AbstractRule {
apply(sourceFile) {
return this.applyWithWalker(new NewBufferRuleWalker(sourceFile, this.getOptions()));
}
}
exports.Rule = Rule;
class NewBufferRuleWalker extends Lint.RuleWalker {
visitNewExpression(node) {
if (node.expression.kind === ts.SyntaxKind.Identifier && node.expression && node.expression.text === 'Buffer') {
this.addFailureAtNode(node, '`new Buffer` is deprecated. Consider Buffer.From or Buffer.alloc instead.');
}
super.visitNewExpression(node);
}
}

View File

@@ -1,23 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as ts from 'typescript';
import * as Lint from 'tslint';
export class Rule extends Lint.Rules.AbstractRule {
apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
return this.applyWithWalker(new NewBufferRuleWalker(sourceFile, this.getOptions()));
}
}
class NewBufferRuleWalker extends Lint.RuleWalker {
visitNewExpression(node: ts.NewExpression) {
if (node.expression.kind === ts.SyntaxKind.Identifier && node.expression && (node.expression as ts.Identifier).text === 'Buffer') {
this.addFailureAtNode(node, '`new Buffer` is deprecated. Consider Buffer.From or Buffer.alloc instead.');
}
super.visitNewExpression(node);
}
}

View File

@@ -3,42 +3,57 @@
* Copyright (c) Microsoft Corporation. All rights reserved. * Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const ts = require("typescript"); var ts = require("typescript");
const Lint = require("tslint"); var Lint = require("tslint");
const path_1 = require("path"); var path_1 = require("path");
class Rule extends Lint.Rules.AbstractRule { var Rule = /** @class */ (function (_super) {
apply(sourceFile) { __extends(Rule, _super);
function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
if (/vs(\/|\\)editor/.test(sourceFile.fileName)) { if (/vs(\/|\\)editor/.test(sourceFile.fileName)) {
// the vs/editor folder is allowed to use the standalone editor // the vs/editor folder is allowed to use the standalone editor
return []; return [];
} }
return this.applyWithWalker(new NoStandaloneEditorRuleWalker(sourceFile, this.getOptions())); return this.applyWithWalker(new NoStandaloneEditorRuleWalker(sourceFile, this.getOptions()));
} };
} return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule; exports.Rule = Rule;
class NoStandaloneEditorRuleWalker extends Lint.RuleWalker { var NoStandaloneEditorRuleWalker = /** @class */ (function (_super) {
constructor(file, opts) { __extends(NoStandaloneEditorRuleWalker, _super);
super(file, opts); function NoStandaloneEditorRuleWalker(file, opts) {
return _super.call(this, file, opts) || this;
} }
visitImportEqualsDeclaration(node) { NoStandaloneEditorRuleWalker.prototype.visitImportEqualsDeclaration = function (node) {
if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) { if (node.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference) {
this._validateImport(node.moduleReference.expression.getText(), node); this._validateImport(node.moduleReference.expression.getText(), node);
} }
} };
visitImportDeclaration(node) { NoStandaloneEditorRuleWalker.prototype.visitImportDeclaration = function (node) {
this._validateImport(node.moduleSpecifier.getText(), node); this._validateImport(node.moduleSpecifier.getText(), node);
} };
visitCallExpression(node) { NoStandaloneEditorRuleWalker.prototype.visitCallExpression = function (node) {
super.visitCallExpression(node); _super.prototype.visitCallExpression.call(this, node);
// import('foo') statements inside the code // import('foo') statements inside the code
if (node.expression.kind === ts.SyntaxKind.ImportKeyword) { if (node.expression.kind === ts.SyntaxKind.ImportKeyword) {
const [path] = node.arguments; var path = node.arguments[0];
this._validateImport(path.getText(), node); this._validateImport(path.getText(), node);
} }
} };
// {{SQL CARBON EDIT}} - Rename node argument to _node to prevent errors since it is not used NoStandaloneEditorRuleWalker.prototype._validateImport = function (path, node) {
_validateImport(path, _node) {
// remove quotes // remove quotes
path = path.slice(1, -1); path = path.slice(1, -1);
// resolve relative paths // resolve relative paths
@@ -53,5 +68,6 @@ class NoStandaloneEditorRuleWalker extends Lint.RuleWalker {
// {{SQL CARBON EDIT}} // {{SQL CARBON EDIT}}
//this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Not allowed to import standalone editor modules. See https://github.com/Microsoft/vscode/wiki/Code-Organization`)); //this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Not allowed to import standalone editor modules. See https://github.com/Microsoft/vscode/wiki/Code-Organization`));
} }
} };
} return NoStandaloneEditorRuleWalker;
}(Lint.RuleWalker));

View File

@@ -43,8 +43,7 @@ class NoStandaloneEditorRuleWalker extends Lint.RuleWalker {
} }
} }
// {{SQL CARBON EDIT}} - Rename node argument to _node to prevent errors since it is not used private _validateImport(path: string, node: ts.Node): void {
private _validateImport(path: string, _node: ts.Node): void {
// remove quotes // remove quotes
path = path.slice(1, -1); path = path.slice(1, -1);

View File

@@ -3,17 +3,32 @@
* Copyright (c) Microsoft Corporation. All rights reserved. * Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const ts = require("typescript"); var ts = require("typescript");
const Lint = require("tslint"); var Lint = require("tslint");
/** /**
* Implementation of the no-unexternalized-strings rule. * Implementation of the no-unexternalized-strings rule.
*/ */
class Rule extends Lint.Rules.AbstractRule { var Rule = /** @class */ (function (_super) {
apply(sourceFile) { __extends(Rule, _super);
return this.applyWithWalker(new NoUnexternalizedStringsRuleWalker(sourceFile, this.getOptions())); function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
} }
} Rule.prototype.apply = function (sourceFile) {
return this.applyWithWalker(new NoUnexternalizedStringsRuleWalker(sourceFile, this.getOptions()));
};
return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule; exports.Rule = Rule;
function isStringLiteral(node) { function isStringLiteral(node) {
return node && node.kind === ts.SyntaxKind.StringLiteral; return node && node.kind === ts.SyntaxKind.StringLiteral;
@@ -24,76 +39,73 @@ function isObjectLiteral(node) {
function isPropertyAssignment(node) { function isPropertyAssignment(node) {
return node && node.kind === ts.SyntaxKind.PropertyAssignment; return node && node.kind === ts.SyntaxKind.PropertyAssignment;
} }
class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker { var NoUnexternalizedStringsRuleWalker = /** @class */ (function (_super) {
constructor(file, opts) { __extends(NoUnexternalizedStringsRuleWalker, _super);
super(file, opts); function NoUnexternalizedStringsRuleWalker(file, opts) {
this.signatures = Object.create(null); var _this = _super.call(this, file, opts) || this;
this.ignores = Object.create(null); _this.signatures = Object.create(null);
this.messageIndex = undefined; _this.ignores = Object.create(null);
this.keyIndex = undefined; _this.messageIndex = undefined;
this.usedKeys = Object.create(null); _this.keyIndex = undefined;
const options = this.getOptions(); _this.usedKeys = Object.create(null);
const first = options && options.length > 0 ? options[0] : null; var options = _this.getOptions();
var first = options && options.length > 0 ? options[0] : null;
if (first) { if (first) {
if (Array.isArray(first.signatures)) { if (Array.isArray(first.signatures)) {
first.signatures.forEach((signature) => this.signatures[signature] = true); first.signatures.forEach(function (signature) { return _this.signatures[signature] = true; });
} }
if (Array.isArray(first.ignores)) { if (Array.isArray(first.ignores)) {
first.ignores.forEach((ignore) => this.ignores[ignore] = true); first.ignores.forEach(function (ignore) { return _this.ignores[ignore] = true; });
} }
if (typeof first.messageIndex !== 'undefined') { if (typeof first.messageIndex !== 'undefined') {
this.messageIndex = first.messageIndex; _this.messageIndex = first.messageIndex;
} }
if (typeof first.keyIndex !== 'undefined') { if (typeof first.keyIndex !== 'undefined') {
this.keyIndex = first.keyIndex; _this.keyIndex = first.keyIndex;
} }
} }
return _this;
} }
visitSourceFile(node) { NoUnexternalizedStringsRuleWalker.prototype.visitSourceFile = function (node) {
super.visitSourceFile(node); var _this = this;
Object.keys(this.usedKeys).forEach(key => { _super.prototype.visitSourceFile.call(this, node);
// Keys are quoted. Object.keys(this.usedKeys).forEach(function (key) {
let identifier = key.substr(1, key.length - 2); var occurrences = _this.usedKeys[key];
if (!NoUnexternalizedStringsRuleWalker.IDENTIFIER.test(identifier)) {
let occurrence = this.usedKeys[key][0];
this.addFailure(this.createFailure(occurrence.key.getStart(), occurrence.key.getWidth(), `The key ${occurrence.key.getText()} doesn't conform to a valid localize identifier`));
}
const occurrences = this.usedKeys[key];
if (occurrences.length > 1) { if (occurrences.length > 1) {
occurrences.forEach(occurrence => { occurrences.forEach(function (occurrence) {
this.addFailure((this.createFailure(occurrence.key.getStart(), occurrence.key.getWidth(), `Duplicate key ${occurrence.key.getText()} with different message value.`))); _this.addFailure((_this.createFailure(occurrence.key.getStart(), occurrence.key.getWidth(), "Duplicate key " + occurrence.key.getText() + " with different message value.")));
}); });
} }
}); });
} };
visitStringLiteral(node) { NoUnexternalizedStringsRuleWalker.prototype.visitStringLiteral = function (node) {
this.checkStringLiteral(node); this.checkStringLiteral(node);
super.visitStringLiteral(node); _super.prototype.visitStringLiteral.call(this, node);
} };
checkStringLiteral(node) { NoUnexternalizedStringsRuleWalker.prototype.checkStringLiteral = function (node) {
const text = node.getText(); var text = node.getText();
const doubleQuoted = text.length >= 2 && text[0] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE && text[text.length - 1] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE; var doubleQuoted = text.length >= 2 && text[0] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE && text[text.length - 1] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE;
const info = this.findDescribingParent(node); var info = this.findDescribingParent(node);
// Ignore strings in import and export nodes. // Ignore strings in import and export nodes.
if (info && info.isImport && doubleQuoted) { if (info && info.isImport && doubleQuoted) {
const fix = [ var fix = [
Lint.Replacement.replaceFromTo(node.getStart(), 1, '\''), Lint.Replacement.replaceFromTo(node.getStart(), 1, '\''),
Lint.Replacement.replaceFromTo(node.getStart() + text.length - 1, 1, '\''), Lint.Replacement.replaceFromTo(node.getStart() + text.length - 1, 1, '\''),
]; ];
this.addFailureAtNode(node, NoUnexternalizedStringsRuleWalker.ImportFailureMessage, fix); this.addFailureAtNode(node, NoUnexternalizedStringsRuleWalker.ImportFailureMessage, fix);
return; return;
} }
const callInfo = info ? info.callInfo : null; var callInfo = info ? info.callInfo : null;
const functionName = callInfo ? callInfo.callExpression.expression.getText() : null; var functionName = callInfo ? callInfo.callExpression.expression.getText() : null;
if (functionName && this.ignores[functionName]) { if (functionName && this.ignores[functionName]) {
return; return;
} }
if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) { if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) {
const s = node.getText(); var s = node.getText();
const fix = [ var fix = [
Lint.Replacement.replaceFromTo(node.getStart(), node.getWidth(), `nls.localize('KEY-${s.substring(1, s.length - 1)}', ${s})`), Lint.Replacement.replaceFromTo(node.getStart(), node.getWidth(), "nls.localize('KEY-" + s.substring(1, s.length - 1) + "', " + s + ")"),
]; ];
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Unexternalized string found: ${node.getText()}`, fix)); this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Unexternalized string found: " + node.getText(), fix));
return; return;
} }
// We have a single quoted string outside a localize function name. // We have a single quoted string outside a localize function name.
@@ -101,21 +113,22 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
return; return;
} }
// We have a string that is a direct argument into the localize call. // We have a string that is a direct argument into the localize call.
const keyArg = callInfo && callInfo.argIndex === this.keyIndex var keyArg = callInfo.argIndex === this.keyIndex
? callInfo.callExpression.arguments[this.keyIndex] ? callInfo.callExpression.arguments[this.keyIndex]
: null; : null;
if (keyArg) { if (keyArg) {
if (isStringLiteral(keyArg)) { if (isStringLiteral(keyArg)) {
this.recordKey(keyArg, this.messageIndex && callInfo ? callInfo.callExpression.arguments[this.messageIndex] : undefined); this.recordKey(keyArg, this.messageIndex ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
} }
else if (isObjectLiteral(keyArg)) { else if (isObjectLiteral(keyArg)) {
for (const property of keyArg.properties) { for (var i = 0; i < keyArg.properties.length; i++) {
var property = keyArg.properties[i];
if (isPropertyAssignment(property)) { if (isPropertyAssignment(property)) {
const name = property.name.getText(); var name_1 = property.name.getText();
if (name === 'key') { if (name_1 === 'key') {
const initializer = property.initializer; var initializer = property.initializer;
if (isStringLiteral(initializer)) { if (isStringLiteral(initializer)) {
this.recordKey(initializer, this.messageIndex && callInfo ? callInfo.callExpression.arguments[this.messageIndex] : undefined); this.recordKey(initializer, this.messageIndex ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
} }
break; break;
} }
@@ -123,42 +136,42 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
} }
} }
} }
const messageArg = callInfo.callExpression.arguments[this.messageIndex]; var messageArg = callInfo.callExpression.arguments[this.messageIndex];
if (messageArg && messageArg.kind !== ts.SyntaxKind.StringLiteral) { if (messageArg && messageArg.kind !== ts.SyntaxKind.StringLiteral) {
this.addFailure(this.createFailure(messageArg.getStart(), messageArg.getWidth(), `Message argument to '${callInfo.callExpression.expression.getText()}' must be a string literal.`)); this.addFailure(this.createFailure(messageArg.getStart(), messageArg.getWidth(), "Message argument to '" + callInfo.callExpression.expression.getText() + "' must be a string literal."));
return; return;
} }
} };
recordKey(keyNode, messageNode) { NoUnexternalizedStringsRuleWalker.prototype.recordKey = function (keyNode, messageNode) {
const text = keyNode.getText(); var text = keyNode.getText();
// We have an empty key // We have an empty key
if (text.match(/(['"]) *\1/)) { if (text.match(/(['"]) *\1/)) {
if (messageNode) { if (messageNode) {
this.addFailureAtNode(keyNode, `Key is empty for message: ${messageNode.getText()}`); this.addFailureAtNode(keyNode, "Key is empty for message: " + messageNode.getText());
} }
else { else {
this.addFailureAtNode(keyNode, `Key is empty.`); this.addFailureAtNode(keyNode, "Key is empty.");
} }
return; return;
} }
let occurrences = this.usedKeys[text]; var occurrences = this.usedKeys[text];
if (!occurrences) { if (!occurrences) {
occurrences = []; occurrences = [];
this.usedKeys[text] = occurrences; this.usedKeys[text] = occurrences;
} }
if (messageNode) { if (messageNode) {
if (occurrences.some(pair => pair.message ? pair.message.getText() === messageNode.getText() : false)) { if (occurrences.some(function (pair) { return pair.message ? pair.message.getText() === messageNode.getText() : false; })) {
return; return;
} }
} }
occurrences.push({ key: keyNode, message: messageNode }); occurrences.push({ key: keyNode, message: messageNode });
} };
findDescribingParent(node) { NoUnexternalizedStringsRuleWalker.prototype.findDescribingParent = function (node) {
let parent; var parent;
while ((parent = node.parent)) { while ((parent = node.parent)) {
const kind = parent.kind; var kind = parent.kind;
if (kind === ts.SyntaxKind.CallExpression) { if (kind === ts.SyntaxKind.CallExpression) {
const callExpression = parent; var callExpression = parent;
return { callInfo: { callExpression: callExpression, argIndex: callExpression.arguments.indexOf(node) } }; return { callInfo: { callExpression: callExpression, argIndex: callExpression.arguments.indexOf(node) } };
} }
else if (kind === ts.SyntaxKind.ImportEqualsDeclaration || kind === ts.SyntaxKind.ImportDeclaration || kind === ts.SyntaxKind.ExportDeclaration) { else if (kind === ts.SyntaxKind.ImportEqualsDeclaration || kind === ts.SyntaxKind.ImportDeclaration || kind === ts.SyntaxKind.ExportDeclaration) {
@@ -172,9 +185,8 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
} }
node = parent; node = parent;
} }
return null; };
} NoUnexternalizedStringsRuleWalker.ImportFailureMessage = 'Do not use double quotes for imports.';
} NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE = '"';
NoUnexternalizedStringsRuleWalker.ImportFailureMessage = 'Do not use double quotes for imports.'; return NoUnexternalizedStringsRuleWalker;
NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE = '"'; }(Lint.RuleWalker));
NoUnexternalizedStringsRuleWalker.IDENTIFIER = /^[_a-zA-Z0-9][ .\-_a-zA-Z0-9]*$/;

View File

@@ -40,7 +40,7 @@ function isPropertyAssignment(node: ts.Node): node is ts.PropertyAssignment {
interface KeyMessagePair { interface KeyMessagePair {
key: ts.StringLiteral; key: ts.StringLiteral;
message: ts.Node | undefined; message: ts.Node;
} }
class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker { class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
@@ -50,8 +50,8 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
private static DOUBLE_QUOTE: string = '"'; private static DOUBLE_QUOTE: string = '"';
private signatures: Map<boolean>; private signatures: Map<boolean>;
private messageIndex: number | undefined; private messageIndex: number;
private keyIndex: number | undefined; private keyIndex: number;
private ignores: Map<boolean>; private ignores: Map<boolean>;
private usedKeys: Map<KeyMessagePair[]>; private usedKeys: Map<KeyMessagePair[]>;
@@ -63,8 +63,8 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
this.messageIndex = undefined; this.messageIndex = undefined;
this.keyIndex = undefined; this.keyIndex = undefined;
this.usedKeys = Object.create(null); this.usedKeys = Object.create(null);
const options: any[] = this.getOptions(); let options: any[] = this.getOptions();
const first: UnexternalizedStringsOptions = options && options.length > 0 ? options[0] : null; let first: UnexternalizedStringsOptions = options && options.length > 0 ? options[0] : null;
if (first) { if (first) {
if (Array.isArray(first.signatures)) { if (Array.isArray(first.signatures)) {
first.signatures.forEach((signature: string) => this.signatures[signature] = true); first.signatures.forEach((signature: string) => this.signatures[signature] = true);
@@ -81,17 +81,10 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
} }
} }
private static IDENTIFIER = /^[_a-zA-Z0-9][ .\-_a-zA-Z0-9]*$/;
protected visitSourceFile(node: ts.SourceFile): void { protected visitSourceFile(node: ts.SourceFile): void {
super.visitSourceFile(node); super.visitSourceFile(node);
Object.keys(this.usedKeys).forEach(key => { Object.keys(this.usedKeys).forEach(key => {
// Keys are quoted. let occurrences = this.usedKeys[key];
let identifier = key.substr(1, key.length - 2);
if (!NoUnexternalizedStringsRuleWalker.IDENTIFIER.test(identifier)) {
let occurrence = this.usedKeys[key][0];
this.addFailure(this.createFailure(occurrence.key.getStart(), occurrence.key.getWidth(), `The key ${occurrence.key.getText()} doesn't conform to a valid localize identifier`));
}
const occurrences = this.usedKeys[key];
if (occurrences.length > 1) { if (occurrences.length > 1) {
occurrences.forEach(occurrence => { occurrences.forEach(occurrence => {
this.addFailure((this.createFailure(occurrence.key.getStart(), occurrence.key.getWidth(), `Duplicate key ${occurrence.key.getText()} with different message value.`))); this.addFailure((this.createFailure(occurrence.key.getStart(), occurrence.key.getWidth(), `Duplicate key ${occurrence.key.getText()} with different message value.`)));
@@ -106,9 +99,9 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
} }
private checkStringLiteral(node: ts.StringLiteral): void { private checkStringLiteral(node: ts.StringLiteral): void {
const text = node.getText(); let text = node.getText();
const doubleQuoted = text.length >= 2 && text[0] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE && text[text.length - 1] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE; let doubleQuoted = text.length >= 2 && text[0] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE && text[text.length - 1] === NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE;
const info = this.findDescribingParent(node); let info = this.findDescribingParent(node);
// Ignore strings in import and export nodes. // Ignore strings in import and export nodes.
if (info && info.isImport && doubleQuoted) { if (info && info.isImport && doubleQuoted) {
const fix = [ const fix = [
@@ -122,13 +115,13 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
); );
return; return;
} }
const callInfo = info ? info.callInfo : null; let callInfo = info ? info.callInfo : null;
const functionName = callInfo ? callInfo.callExpression.expression.getText() : null; let functionName = callInfo ? callInfo.callExpression.expression.getText() : null;
if (functionName && this.ignores[functionName]) { if (functionName && this.ignores[functionName]) {
return; return;
} }
if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName!])) { if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) {
const s = node.getText(); const s = node.getText();
const fix = [ const fix = [
Lint.Replacement.replaceFromTo(node.getStart(), node.getWidth(), `nls.localize('KEY-${s.substring(1, s.length - 1)}', ${s})`), Lint.Replacement.replaceFromTo(node.getStart(), node.getWidth(), `nls.localize('KEY-${s.substring(1, s.length - 1)}', ${s})`),
@@ -137,24 +130,25 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
return; return;
} }
// We have a single quoted string outside a localize function name. // We have a single quoted string outside a localize function name.
if (!doubleQuoted && !this.signatures[functionName!]) { if (!doubleQuoted && !this.signatures[functionName]) {
return; return;
} }
// We have a string that is a direct argument into the localize call. // We have a string that is a direct argument into the localize call.
const keyArg: ts.Expression | null = callInfo && callInfo.argIndex === this.keyIndex let keyArg: ts.Expression = callInfo.argIndex === this.keyIndex
? callInfo.callExpression.arguments[this.keyIndex] ? callInfo.callExpression.arguments[this.keyIndex]
: null; : null;
if (keyArg) { if (keyArg) {
if (isStringLiteral(keyArg)) { if (isStringLiteral(keyArg)) {
this.recordKey(keyArg, this.messageIndex && callInfo ? callInfo.callExpression.arguments[this.messageIndex] : undefined); this.recordKey(keyArg, this.messageIndex ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
} else if (isObjectLiteral(keyArg)) { } else if (isObjectLiteral(keyArg)) {
for (const property of keyArg.properties) { for (let i = 0; i < keyArg.properties.length; i++) {
let property = keyArg.properties[i];
if (isPropertyAssignment(property)) { if (isPropertyAssignment(property)) {
const name = property.name.getText(); let name = property.name.getText();
if (name === 'key') { if (name === 'key') {
const initializer = property.initializer; let initializer = property.initializer;
if (isStringLiteral(initializer)) { if (isStringLiteral(initializer)) {
this.recordKey(initializer, this.messageIndex && callInfo ? callInfo.callExpression.arguments[this.messageIndex] : undefined); this.recordKey(initializer, this.messageIndex ? callInfo.callExpression.arguments[this.messageIndex] : undefined);
} }
break; break;
} }
@@ -163,18 +157,18 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
} }
} }
const messageArg = callInfo!.callExpression.arguments[this.messageIndex!]; const messageArg = callInfo.callExpression.arguments[this.messageIndex];
if (messageArg && messageArg.kind !== ts.SyntaxKind.StringLiteral) { if (messageArg && messageArg.kind !== ts.SyntaxKind.StringLiteral) {
this.addFailure(this.createFailure( this.addFailure(this.createFailure(
messageArg.getStart(), messageArg.getWidth(), messageArg.getStart(), messageArg.getWidth(),
`Message argument to '${callInfo!.callExpression.expression.getText()}' must be a string literal.`)); `Message argument to '${callInfo.callExpression.expression.getText()}' must be a string literal.`));
return; return;
} }
} }
private recordKey(keyNode: ts.StringLiteral, messageNode: ts.Node | undefined) { private recordKey(keyNode: ts.StringLiteral, messageNode: ts.Node) {
const text = keyNode.getText(); let text = keyNode.getText();
// We have an empty key // We have an empty key
if (text.match(/(['"]) *\1/)) { if (text.match(/(['"]) *\1/)) {
if (messageNode) { if (messageNode) {
@@ -197,12 +191,12 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
occurrences.push({ key: keyNode, message: messageNode }); occurrences.push({ key: keyNode, message: messageNode });
} }
private findDescribingParent(node: ts.Node): { callInfo?: { callExpression: ts.CallExpression, argIndex: number }, isImport?: boolean; } | null { private findDescribingParent(node: ts.Node): { callInfo?: { callExpression: ts.CallExpression, argIndex: number }, isImport?: boolean; } {
let parent: ts.Node; let parent: ts.Node;
while ((parent = node.parent)) { while ((parent = node.parent)) {
const kind = parent.kind; let kind = parent.kind;
if (kind === ts.SyntaxKind.CallExpression) { if (kind === ts.SyntaxKind.CallExpression) {
const callExpression = parent as ts.CallExpression; let callExpression = parent as ts.CallExpression;
return { callInfo: { callExpression: callExpression, argIndex: callExpression.arguments.indexOf(<any>node) } }; return { callInfo: { callExpression: callExpression, argIndex: callExpression.arguments.indexOf(<any>node) } };
} else if (kind === ts.SyntaxKind.ImportEqualsDeclaration || kind === ts.SyntaxKind.ImportDeclaration || kind === ts.SyntaxKind.ExportDeclaration) { } else if (kind === ts.SyntaxKind.ImportEqualsDeclaration || kind === ts.SyntaxKind.ImportDeclaration || kind === ts.SyntaxKind.ExportDeclaration) {
return { isImport: true }; return { isImport: true };
@@ -214,6 +208,5 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
} }
node = parent; node = parent;
} }
return null;
} }
} }

View File

@@ -3,43 +3,59 @@
* Copyright (c) Microsoft Corporation. All rights reserved. * Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information. * Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/ *--------------------------------------------------------------------------------------------*/
var __extends = (this && this.__extends) || (function () {
var extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
return function (d, b) {
extendStatics(d, b);
function __() { this.constructor = d; }
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
const Lint = require("tslint"); var Lint = require("tslint");
const fs = require("fs"); var fs = require("fs");
class Rule extends Lint.Rules.AbstractRule { var Rule = /** @class */ (function (_super) {
apply(sourceFile) { __extends(Rule, _super);
function Rule() {
return _super !== null && _super.apply(this, arguments) || this;
}
Rule.prototype.apply = function (sourceFile) {
return this.applyWithWalker(new TranslationRemindRuleWalker(sourceFile, this.getOptions())); return this.applyWithWalker(new TranslationRemindRuleWalker(sourceFile, this.getOptions()));
} };
} return Rule;
}(Lint.Rules.AbstractRule));
exports.Rule = Rule; exports.Rule = Rule;
class TranslationRemindRuleWalker extends Lint.RuleWalker { var TranslationRemindRuleWalker = /** @class */ (function (_super) {
constructor(file, opts) { __extends(TranslationRemindRuleWalker, _super);
super(file, opts); function TranslationRemindRuleWalker(file, opts) {
return _super.call(this, file, opts) || this;
} }
visitImportDeclaration(node) { TranslationRemindRuleWalker.prototype.visitImportDeclaration = function (node) {
const declaration = node.moduleSpecifier.getText(); var declaration = node.moduleSpecifier.getText();
if (declaration !== `'${TranslationRemindRuleWalker.NLS_MODULE}'`) { if (declaration !== "'" + TranslationRemindRuleWalker.NLS_MODULE + "'") {
return; return;
} }
this.visitImportLikeDeclaration(node); this.visitImportLikeDeclaration(node);
} };
visitImportEqualsDeclaration(node) { TranslationRemindRuleWalker.prototype.visitImportEqualsDeclaration = function (node) {
const reference = node.moduleReference.getText(); var reference = node.moduleReference.getText();
if (reference !== `require('${TranslationRemindRuleWalker.NLS_MODULE}')`) { if (reference !== "require('" + TranslationRemindRuleWalker.NLS_MODULE + "')") {
return; return;
} }
this.visitImportLikeDeclaration(node); this.visitImportLikeDeclaration(node);
} };
visitImportLikeDeclaration(node) { TranslationRemindRuleWalker.prototype.visitImportLikeDeclaration = function (node) {
const currentFile = node.getSourceFile().fileName; var currentFile = node.getSourceFile().fileName;
const matchService = currentFile.match(/vs\/workbench\/services\/\w+/); var matchService = currentFile.match(/vs\/workbench\/services\/\w+/);
const matchPart = currentFile.match(/vs\/workbench\/parts\/\w+/); var matchPart = currentFile.match(/vs\/workbench\/parts\/\w+/);
if (!matchService && !matchPart) { if (!matchService && !matchPart) {
return; return;
} }
const resource = matchService ? matchService[0] : matchPart[0]; var resource = matchService ? matchService[0] : matchPart[0];
let resourceDefined = false; var resourceDefined = false;
let json; var json;
try { try {
json = fs.readFileSync('./build/lib/i18n.resources.json', 'utf8'); json = fs.readFileSync('./build/lib/i18n.resources.json', 'utf8');
} }
@@ -47,16 +63,17 @@ class TranslationRemindRuleWalker extends Lint.RuleWalker {
console.error('[translation-remind rule]: File with resources to pull from Transifex was not found. Aborting translation resource check for newly defined workbench part/service.'); console.error('[translation-remind rule]: File with resources to pull from Transifex was not found. Aborting translation resource check for newly defined workbench part/service.');
return; return;
} }
const workbenchResources = JSON.parse(json).workbench; var workbenchResources = JSON.parse(json).workbench;
workbenchResources.forEach((existingResource) => { workbenchResources.forEach(function (existingResource) {
if (existingResource.name === resource) { if (existingResource.name === resource) {
resourceDefined = true; resourceDefined = true;
return; return;
} }
}); });
if (!resourceDefined) { if (!resourceDefined) {
this.addFailureAtNode(node, `Please add '${resource}' to ./build/lib/i18n.resources.json file to use translations here.`); this.addFailureAtNode(node, "Please add '" + resource + "' to ./build/lib/i18n.resources.json file to use translations here.");
} }
} };
} TranslationRemindRuleWalker.NLS_MODULE = 'vs/nls';
TranslationRemindRuleWalker.NLS_MODULE = 'vs/nls'; return TranslationRemindRuleWalker;
}(Lint.RuleWalker));

View File

@@ -30,7 +30,7 @@ class TranslationRemindRuleWalker extends Lint.RuleWalker {
this.visitImportLikeDeclaration(node); this.visitImportLikeDeclaration(node);
} }
protected visitImportEqualsDeclaration(node: ts.ImportEqualsDeclaration): void { protected visitImportEqualsDeclaration(node: ts.ImportEqualsDeclaration): void {
const reference = node.moduleReference.getText(); const reference = node.moduleReference.getText();
if (reference !== `require('${TranslationRemindRuleWalker.NLS_MODULE}')`) { if (reference !== `require('${TranslationRemindRuleWalker.NLS_MODULE}')`) {
return; return;
@@ -47,7 +47,7 @@ class TranslationRemindRuleWalker extends Lint.RuleWalker {
return; return;
} }
const resource = matchService ? matchService[0] : matchPart![0]; const resource = matchService ? matchService[0] : matchPart[0];
let resourceDefined = false; let resourceDefined = false;
let json; let json;
@@ -59,7 +59,7 @@ class TranslationRemindRuleWalker extends Lint.RuleWalker {
} }
const workbenchResources = JSON.parse(json).workbench; const workbenchResources = JSON.parse(json).workbench;
workbenchResources.forEach((existingResource: any) => { workbenchResources.forEach(existingResource => {
if (existingResource.name === resource) { if (existingResource.name === resource) {
resourceDefined = true; resourceDefined = true;
return; return;

View File

@@ -0,0 +1,10 @@
// ATTENTION - THIS DIRECTORY CONTAINS THIRD PARTY OPEN SOURCE MATERIALS:
// All OSS in this folder is development time only
[{
"name": "definitelytyped",
"repositoryURL": "https://github.com/DefinitelyTyped/DefinitelyTyped",
"license": "MIT",
"isDev": true
}
]

361
build/lib/typings/Q.d.ts vendored Normal file
View File

@@ -0,0 +1,361 @@
// Type definitions for Q
// Project: https://github.com/kriskowal/q
// Definitions by: Barrie Nemetchek <https://github.com/bnemetchek>, Andrew Gaspar <https://github.com/AndrewGaspar/>, John Reilly <https://github.com/johnnyreilly>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
/**
* If value is a Q promise, returns the promise.
* If value is a promise from another library it is coerced into a Q promise (where possible).
*/
declare function Q<T>(promise: Q.IPromise<T>): Q.Promise<T>;
/**
* If value is not a promise, returns a promise that is fulfilled with value.
*/
declare function Q<T>(value: T): Q.Promise<T>;
/**
* Calling with nothing at all creates a void promise
*/
declare function Q(): Q.Promise<void>;
declare namespace Q {
type IWhenable<T> = IPromise<T> | T;
interface IPromise<T> {
then<U>(onFulfill?: (value: T) => IWhenable<U>, onReject?: (error: any) => IWhenable<U>): IPromise<U>;
}
interface Deferred<T> {
promise: Promise<T>;
resolve(value?: IWhenable<T>): void;
reject(reason: any): void;
notify(value: any): void;
makeNodeResolver(): (reason: any, value: T) => void;
}
interface Promise<T> {
/**
* Like a finally clause, allows you to observe either the fulfillment or rejection of a promise, but to do so without modifying the final value. This is useful for collecting resources regardless of whether a job succeeded, like closing a database connection, shutting a server down, or deleting an unneeded key from an object.
* finally returns a promise, which will become resolved with the same fulfillment value or rejection reason as promise. However, if callback returns a promise, the resolution of the returned promise will be delayed until the promise returned from callback is finished.
*/
fin(finallyCallback: () => any): Promise<T>;
/**
* Like a finally clause, allows you to observe either the fulfillment or rejection of a promise, but to do so without modifying the final value. This is useful for collecting resources regardless of whether a job succeeded, like closing a database connection, shutting a server down, or deleting an unneeded key from an object.
* finally returns a promise, which will become resolved with the same fulfillment value or rejection reason as promise. However, if callback returns a promise, the resolution of the returned promise will be delayed until the promise returned from callback is finished.
*/
finally(finallyCallback: () => any): Promise<T>;
/**
* The then method from the Promises/A+ specification, with an additional progress handler.
*/
then<U>(onFulfill?: (value: T) => IWhenable<U>, onReject?: (error: any) => IWhenable<U>, onProgress?: Function): Promise<U>;
/**
* Like then, but "spreads" the array into a variadic fulfillment handler. If any of the promises in the array are rejected, instead calls onRejected with the first rejected promise's rejection reason.
*
* This is especially useful in conjunction with all
*/
spread<U>(onFulfill: (...args: any[]) => IWhenable<U>, onReject?: (reason: any) => IWhenable<U>): Promise<U>;
fail<U>(onRejected: (reason: any) => IWhenable<U>): Promise<U>;
/**
* A sugar method, equivalent to promise.then(undefined, onRejected).
*/
catch<U>(onRejected: (reason: any) => IWhenable<U>): Promise<U>;
/**
* A sugar method, equivalent to promise.then(undefined, undefined, onProgress).
*/
progress(onProgress: (progress: any) => any): Promise<T>;
/**
* Much like then, but with different behavior around unhandled rejection. If there is an unhandled rejection, either because promise is rejected and no onRejected callback was provided, or because onFulfilled or onRejected threw an error or returned a rejected promise, the resulting rejection reason is thrown as an exception in a future turn of the event loop.
*
* This method should be used to terminate chains of promises that will not be passed elsewhere. Since exceptions thrown in then callbacks are consumed and transformed into rejections, exceptions at the end of the chain are easy to accidentally, silently ignore. By arranging for the exception to be thrown in a future turn of the event loop, so that it won't be caught, it causes an onerror event on the browser window, or an uncaughtException event on Node.js's process object.
*
* Exceptions thrown by done will have long stack traces, if Q.longStackSupport is set to true. If Q.onerror is set, exceptions will be delivered there instead of thrown in a future turn.
*
* The Golden Rule of done vs. then usage is: either return your promise to someone else, or if the chain ends with you, call done to terminate it.
*/
done(onFulfilled?: (value: T) => any, onRejected?: (reason: any) => any, onProgress?: (progress: any) => any): void;
/**
* If callback is a function, assumes it's a Node.js-style callback, and calls it as either callback(rejectionReason) when/if promise becomes rejected, or as callback(null, fulfillmentValue) when/if promise becomes fulfilled. If callback is not a function, simply returns promise.
*/
nodeify(callback: (reason: any, value: any) => void): Promise<T>;
/**
* Returns a promise to get the named property of an object. Essentially equivalent to
*
* promise.then(function (o) {
* return o[propertyName];
* });
*/
get<U>(propertyName: String): Promise<U>;
set<U>(propertyName: String, value: any): Promise<U>;
delete<U>(propertyName: String): Promise<U>;
/**
* Returns a promise for the result of calling the named method of an object with the given array of arguments. The object itself is this in the function, just like a synchronous method call. Essentially equivalent to
*
* promise.then(function (o) {
* return o[methodName].apply(o, args);
* });
*/
post<U>(methodName: String, args: any[]): Promise<U>;
/**
* Returns a promise for the result of calling the named method of an object with the given variadic arguments. The object itself is this in the function, just like a synchronous method call.
*/
invoke<U>(methodName: String, ...args: any[]): Promise<U>;
fapply<U>(args: any[]): Promise<U>;
fcall<U>(...args: any[]): Promise<U>;
/**
* Returns a promise for an array of the property names of an object. Essentially equivalent to
*
* promise.then(function (o) {
* return Object.keys(o);
* });
*/
keys(): Promise<string[]>;
/**
* A sugar method, equivalent to promise.then(function () { return value; }).
*/
thenResolve<U>(value: U): Promise<U>;
/**
* A sugar method, equivalent to promise.then(function () { throw reason; }).
*/
thenReject(reason: any): Promise<T>;
/**
* Attaches a handler that will observe the value of the promise when it becomes fulfilled, returning a promise for that same value, perhaps deferred but not replaced by the promise returned by the onFulfilled handler.
*/
tap(onFulfilled: (value: T) => any): Promise<T>;
timeout(ms: number, message?: string): Promise<T>;
/**
* Returns a promise that will have the same result as promise, but will only be fulfilled or rejected after at least ms milliseconds have passed.
*/
delay(ms: number): Promise<T>;
/**
* Returns whether a given promise is in the fulfilled state. When the static version is used on non-promises, the result is always true.
*/
isFulfilled(): boolean;
/**
* Returns whether a given promise is in the rejected state. When the static version is used on non-promises, the result is always false.
*/
isRejected(): boolean;
/**
* Returns whether a given promise is in the pending state. When the static version is used on non-promises, the result is always false.
*/
isPending(): boolean;
valueOf(): any;
/**
* Returns a "state snapshot" object, which will be in one of three forms:
*
* - { state: "pending" }
* - { state: "fulfilled", value: <fulfllment value> }
* - { state: "rejected", reason: <rejection reason> }
*/
inspect(): PromiseState<T>;
}
interface PromiseState<T> {
/**
* "fulfilled", "rejected", "pending"
*/
state: string;
value?: T;
reason?: any;
}
// If no value provided, returned promise will be of void type
export function when(): Promise<void>;
// if no fulfill, reject, or progress provided, returned promise will be of same type
export function when<T>(value: IWhenable<T>): Promise<T>;
// If a non-promise value is provided, it will not reject or progress
export function when<T, U>(value: IWhenable<T>, onFulfilled: (val: T) => IWhenable<U>, onRejected?: (reason: any) => IWhenable<U>, onProgress?: (progress: any) => any): Promise<U>;
/**
* Currently "impossible" (and I use the term loosely) to implement due to TypeScript limitations as it is now.
* See: https://github.com/Microsoft/TypeScript/issues/1784 for discussion on it.
*/
// export function try(method: Function, ...args: any[]): Promise<any>;
export function fbind<T>(method: (...args: any[]) => IWhenable<T>, ...args: any[]): (...args: any[]) => Promise<T>;
export function fcall<T>(method: (...args: any[]) => T, ...args: any[]): Promise<T>;
export function send<T>(obj: any, functionName: string, ...args: any[]): Promise<T>;
export function invoke<T>(obj: any, functionName: string, ...args: any[]): Promise<T>;
export function mcall<T>(obj: any, functionName: string, ...args: any[]): Promise<T>;
export function denodeify<T>(nodeFunction: Function, ...args: any[]): (...args: any[]) => Promise<T>;
export function nbind<T>(nodeFunction: Function, thisArg: any, ...args: any[]): (...args: any[]) => Promise<T>;
export function nfbind<T>(nodeFunction: Function, ...args: any[]): (...args: any[]) => Promise<T>;
export function nfcall<T>(nodeFunction: Function, ...args: any[]): Promise<T>;
export function nfapply<T>(nodeFunction: Function, args: any[]): Promise<T>;
export function ninvoke<T>(nodeModule: any, functionName: string, ...args: any[]): Promise<T>;
export function npost<T>(nodeModule: any, functionName: string, args: any[]): Promise<T>;
export function nsend<T>(nodeModule: any, functionName: string, ...args: any[]): Promise<T>;
export function nmcall<T>(nodeModule: any, functionName: string, ...args: any[]): Promise<T>;
/**
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
*/
export function all<A, B, C, D, E, F>(promises: IWhenable<[IWhenable<A>, IWhenable<B>, IWhenable<C>, IWhenable<D>, IWhenable<E>, IWhenable<F>]>): Promise<[A, B, C, D, E, F]>;
/**
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
*/
export function all<A, B, C, D, E>(promises: IWhenable<[IWhenable<A>, IWhenable<B>, IWhenable<C>, IWhenable<D>, IWhenable<E>]>): Promise<[A, B, C, D, E]>;
/**
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
*/
export function all<A, B, C, D>(promises: IWhenable<[IWhenable<A>, IWhenable<B>, IWhenable<C>, IWhenable<D>]>): Promise<[A, B, C, D]>;
/**
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
*/
export function all<A, B, C>(promises: IWhenable<[IWhenable<A>, IWhenable<B>, IWhenable<C>]>): Promise<[A, B, C]>;
/**
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
*/
export function all<A, B>(promises: IWhenable<[IWhenable<A>, IWhenable<B>]>): Promise<[A, B]>;
/**
* Returns a promise that is fulfilled with an array containing the fulfillment value of each promise, or is rejected with the same rejection reason as the first promise to be rejected.
*/
export function all<T>(promises: IWhenable<IWhenable<T>[]>): Promise<T[]>;
/**
* Returns a promise for the first of an array of promises to become settled.
*/
export function race<T>(promises: IWhenable<T>[]): Promise<T>;
/**
* Returns a promise that is fulfilled with an array of promise state snapshots, but only after all the original promises have settled, i.e. become either fulfilled or rejected.
*/
export function allSettled<T>(promises: IWhenable<IWhenable<T>[]>): Promise<PromiseState<T>[]>;
export function allResolved<T>(promises: IWhenable<IWhenable<T>[]>): Promise<Promise<T>[]>;
/**
* Like then, but "spreads" the array into a variadic fulfillment handler. If any of the promises in the array are rejected, instead calls onRejected with the first rejected promise's rejection reason.
* This is especially useful in conjunction with all.
*/
export function spread<T, U>(promises: IWhenable<T>[], onFulfilled: (...args: T[]) => IWhenable<U>, onRejected?: (reason: any) => IWhenable<U>): Promise<U>;
/**
* Returns a promise that will have the same result as promise, except that if promise is not fulfilled or rejected before ms milliseconds, the returned promise will be rejected with an Error with the given message. If message is not supplied, the message will be "Timed out after " + ms + " ms".
*/
export function timeout<T>(promise: Promise<T>, ms: number, message?: string): Promise<T>;
/**
* Returns a promise that will have the same result as promise, but will only be fulfilled or rejected after at least ms milliseconds have passed.
*/
export function delay<T>(promise: Promise<T>, ms: number): Promise<T>;
/**
* Returns a promise that will have the same result as promise, but will only be fulfilled or rejected after at least ms milliseconds have passed.
*/
export function delay<T>(value: T, ms: number): Promise<T>;
/**
* Returns a promise that will be fulfilled with undefined after at least ms milliseconds have passed.
*/
export function delay(ms: number): Promise <void>;
/**
* Returns whether a given promise is in the fulfilled state. When the static version is used on non-promises, the result is always true.
*/
export function isFulfilled(promise: Promise<any>): boolean;
/**
* Returns whether a given promise is in the rejected state. When the static version is used on non-promises, the result is always false.
*/
export function isRejected(promise: Promise<any>): boolean;
/**
* Returns whether a given promise is in the pending state. When the static version is used on non-promises, the result is always false.
*/
export function isPending(promise: Promise<any>): boolean;
/**
* Returns a "deferred" object with a:
* promise property
* resolve(value) method
* reject(reason) method
* notify(value) method
* makeNodeResolver() method
*/
export function defer<T>(): Deferred<T>;
/**
* Returns a promise that is rejected with reason.
*/
export function reject<T>(reason?: any): Promise<T>;
export function Promise<T>(resolver: (resolve: (val: IWhenable<T>) => void , reject: (reason: any) => void , notify: (progress: any) => void ) => void ): Promise<T>;
/**
* Creates a new version of func that accepts any combination of promise and non-promise values, converting them to their fulfillment values before calling the original func. The returned version also always returns a promise: if func does a return or throw, then Q.promised(func) will return fulfilled or rejected promise, respectively.
*
* This can be useful for creating functions that accept either promises or non-promise values, and for ensuring that the function always returns a promise even in the face of unintentional thrown exceptions.
*/
export function promised<T>(callback: (...args: any[]) => T): (...args: any[]) => Promise<T>;
/**
* Returns whether the given value is a Q promise.
*/
export function isPromise(object: any): boolean;
/**
* Returns whether the given value is a promise (i.e. it's an object with a then function).
*/
export function isPromiseAlike(object: any): boolean;
/**
* Returns whether a given promise is in the pending state. When the static version is used on non-promises, the result is always false.
*/
export function isPending(object: any): boolean;
/**
* If an object is not a promise, it is as "near" as possible.
* If a promise is rejected, it is as "near" as possible too.
* If its a fulfilled promise, the fulfillment value is nearer.
* If its a deferred promise and the deferred has been resolved, the
* resolution is "nearer".
*/
export function nearer<T>(promise: Promise<T>): T;
/**
* This is an experimental tool for converting a generator function into a deferred function. This has the potential of reducing nested callbacks in engines that support yield.
*/
export function async<T>(generatorFunction: any): (...args: any[]) => Promise<T>;
export function nextTick(callback: Function): void;
/**
* A settable property that will intercept any uncaught errors that would otherwise be thrown in the next tick of the event loop, usually as a result of done. Can be useful for getting the full stack trace of an error in browsers, which is not usually possible with window.onerror.
*/
export var onerror: (reason: any) => void;
/**
* A settable property that lets you turn on long stack trace support. If turned on, "stack jumps" will be tracked across asynchronous promise operations, so that if an uncaught error is thrown by done or a rejection reason's stack property is inspected in a rejection callback, a long stack trace is produced.
*/
export var longStackSupport: boolean;
/**
* Calling resolve with a pending promise causes promise to wait on the passed promise, becoming fulfilled with its fulfillment value or rejected with its rejection reason (or staying pending forever, if the passed promise does).
* Calling resolve with a rejected promise causes promise to be rejected with the passed promise's rejection reason.
* Calling resolve with a fulfilled promise causes promise to be fulfilled with the passed promise's fulfillment value.
* Calling resolve with a non-promise value causes promise to be fulfilled with that value.
*/
export function resolve<T>(object: IWhenable<T>): Promise<T>;
/**
* Resets the global "Q" variable to the value it has before Q was loaded.
* This will either be undefined if there was no version or the version of Q which was already loaded before.
* @returns { The last version of Q. }
*/
export function noConflict(): typeof Q;
}
declare module "q" {
export = Q;
}

View File

@@ -1,16 +0,0 @@
{
"registrations": [
{
"component": {
"type": "git",
"git": {
"name": "definitelytyped",
"repositoryUrl": "https://github.com/DefinitelyTyped/DefinitelyTyped",
"commitHash": "69e3ac6bec3008271f76bbfa7cf69aa9198c4ff0"
}
},
"license": "MIT"
}
],
"version": 1
}

121
build/lib/typings/chalk.d.ts vendored Normal file
View File

@@ -0,0 +1,121 @@
// Type definitions for chalk v0.4.0
// Project: https://github.com/sindresorhus/chalk
// Definitions by: Diullei Gomes <https://github.com/Diullei>, Bart van der Schoor <https://github.com/Bartvds>, Nico Jansen <https://github.com/nicojs>
// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped
declare namespace Chalk {
export var enabled: boolean;
export var supportsColor: boolean;
export var styles: ChalkStyleMap;
export function stripColor(value: string): any;
export function hasColor(str: string): boolean;
export interface ChalkChain extends ChalkStyle {
(...text: string[]): string;
}
export interface ChalkStyleElement {
open: string;
close: string;
}
// General
export var reset: ChalkChain;
export var bold: ChalkChain;
export var italic: ChalkChain;
export var underline: ChalkChain;
export var inverse: ChalkChain;
export var strikethrough: ChalkChain;
// Text colors
export var black: ChalkChain;
export var red: ChalkChain;
export var green: ChalkChain;
export var yellow: ChalkChain;
export var blue: ChalkChain;
export var magenta: ChalkChain;
export var cyan: ChalkChain;
export var white: ChalkChain;
export var gray: ChalkChain;
export var grey: ChalkChain;
// Background colors
export var bgBlack: ChalkChain;
export var bgRed: ChalkChain;
export var bgGreen: ChalkChain;
export var bgYellow: ChalkChain;
export var bgBlue: ChalkChain;
export var bgMagenta: ChalkChain;
export var bgCyan: ChalkChain;
export var bgWhite: ChalkChain;
export interface ChalkStyle {
// General
reset: ChalkChain;
bold: ChalkChain;
italic: ChalkChain;
underline: ChalkChain;
inverse: ChalkChain;
strikethrough: ChalkChain;
// Text colors
black: ChalkChain;
red: ChalkChain;
green: ChalkChain;
yellow: ChalkChain;
blue: ChalkChain;
magenta: ChalkChain;
cyan: ChalkChain;
white: ChalkChain;
gray: ChalkChain;
grey: ChalkChain;
// Background colors
bgBlack: ChalkChain;
bgRed: ChalkChain;
bgGreen: ChalkChain;
bgYellow: ChalkChain;
bgBlue: ChalkChain;
bgMagenta: ChalkChain;
bgCyan: ChalkChain;
bgWhite: ChalkChain;
}
export interface ChalkStyleMap {
// General
reset: ChalkStyleElement;
bold: ChalkStyleElement;
italic: ChalkStyleElement;
underline: ChalkStyleElement;
inverse: ChalkStyleElement;
strikethrough: ChalkStyleElement;
// Text colors
black: ChalkStyleElement;
red: ChalkStyleElement;
green: ChalkStyleElement;
yellow: ChalkStyleElement;
blue: ChalkStyleElement;
magenta: ChalkStyleElement;
cyan: ChalkStyleElement;
white: ChalkStyleElement;
gray: ChalkStyleElement;
// Background colors
bgBlack: ChalkStyleElement;
bgRed: ChalkStyleElement;
bgGreen: ChalkStyleElement;
bgYellow: ChalkStyleElement;
bgBlue: ChalkStyleElement;
bgMagenta: ChalkStyleElement;
bgCyan: ChalkStyleElement;
bgWhite: ChalkStyleElement;
}
}
declare module "chalk" {
export = Chalk;
}

Some files were not shown because too many files have changed in this diff Show More