mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-23 11:01:38 -05:00
Compare commits
20 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1ebfd02356 | ||
|
|
c942bc1dbb | ||
|
|
8439cde610 | ||
|
|
2080c525a7 | ||
|
|
db2d380b6c | ||
|
|
a06f80bdb1 | ||
|
|
1f76c85b1b | ||
|
|
cffc18d5ea | ||
|
|
a747b6a500 | ||
|
|
711b7bf622 | ||
|
|
9b0757de9c | ||
|
|
b9a0744a83 | ||
|
|
c69915ca58 | ||
|
|
a16835918a | ||
|
|
f9e27d7112 | ||
|
|
c072ba9c5c | ||
|
|
807fb2ed8a | ||
|
|
daf347b728 | ||
|
|
e76222db7a | ||
|
|
f676090901 |
@@ -1,4 +1,4 @@
|
|||||||
# EditorConfig is awesome: https://EditorConfig.org
|
# EditorConfig is awesome: http://EditorConfig.org
|
||||||
|
|
||||||
# top-most EditorConfig file
|
# top-most EditorConfig file
|
||||||
root = true
|
root = true
|
||||||
@@ -6,6 +6,7 @@ root = true
|
|||||||
# Tab indentation
|
# Tab indentation
|
||||||
[*]
|
[*]
|
||||||
indent_style = tab
|
indent_style = tab
|
||||||
|
indent_size = 4
|
||||||
trim_trailing_whitespace = true
|
trim_trailing_whitespace = true
|
||||||
|
|
||||||
# The indent size used in the `package.json` file cannot be changed
|
# The indent size used in the `package.json` file cannot be changed
|
||||||
|
|||||||
19
.eslintrc
Normal file
19
.eslintrc
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
{
|
||||||
|
"env": {
|
||||||
|
"node": true,
|
||||||
|
"es6": true
|
||||||
|
},
|
||||||
|
"rules": {
|
||||||
|
"no-console": 0,
|
||||||
|
"no-cond-assign": 0,
|
||||||
|
"no-unused-vars": 1,
|
||||||
|
"no-extra-semi": "warn",
|
||||||
|
"semi": "warn"
|
||||||
|
},
|
||||||
|
"extends": "eslint:recommended",
|
||||||
|
"parserOptions": {
|
||||||
|
"ecmaFeatures": {
|
||||||
|
"experimentalObjectRestSpread": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
{
|
|
||||||
"root": true,
|
|
||||||
"env": {
|
|
||||||
"node": true,
|
|
||||||
"es6": true
|
|
||||||
},
|
|
||||||
"rules": {
|
|
||||||
"no-console": 0,
|
|
||||||
"no-cond-assign": 0,
|
|
||||||
"no-unused-vars": 1,
|
|
||||||
"no-extra-semi": "warn",
|
|
||||||
"semi": "warn"
|
|
||||||
},
|
|
||||||
"extends": "eslint:recommended",
|
|
||||||
"parserOptions": {
|
|
||||||
"ecmaFeatures": {
|
|
||||||
"experimentalObjectRestSpread": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
3
.gitattributes
vendored
3
.gitattributes
vendored
@@ -6,5 +6,4 @@ ThirdPartyNotices.txt eol=crlf
|
|||||||
*.bat eol=crlf
|
*.bat eol=crlf
|
||||||
*.cmd eol=crlf
|
*.cmd eol=crlf
|
||||||
*.ps1 eol=lf
|
*.ps1 eol=lf
|
||||||
*.sh eol=lf
|
*.sh eol=lf
|
||||||
*.rtf -text
|
|
||||||
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,10 +1,6 @@
|
|||||||
---
|
---
|
||||||
name: Bug report
|
name: Bug report
|
||||||
about: Create a report to help us improve
|
about: Create a report to help us improve
|
||||||
title: ''
|
|
||||||
labels: Bug
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
<!-- Please search existing issues to avoid creating duplicates. -->
|
<!-- Please search existing issues to avoid creating duplicates. -->
|
||||||
|
|||||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -1,20 +0,0 @@
|
|||||||
---
|
|
||||||
name: Feature request
|
|
||||||
about: Suggest an idea for this project
|
|
||||||
title: ''
|
|
||||||
labels: Enhancement
|
|
||||||
assignees: ''
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Is your feature request related to a problem? Please describe.**
|
|
||||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
|
||||||
|
|
||||||
**Describe the solution or feature you'd like**
|
|
||||||
A clear and concise description of what you want to happen.
|
|
||||||
|
|
||||||
**Describe alternatives you've considered**
|
|
||||||
A clear and concise description of any alternative solutions or features you've considered.
|
|
||||||
|
|
||||||
**Additional context**
|
|
||||||
Add any other context or screenshots about the feature request here.
|
|
||||||
49
.github/classifier.yml
vendored
49
.github/classifier.yml
vendored
@@ -1,49 +0,0 @@
|
|||||||
{
|
|
||||||
perform: false,
|
|
||||||
alwaysRequireAssignee: false,
|
|
||||||
labelsRequiringAssignee: [],
|
|
||||||
autoAssignees: {
|
|
||||||
accessibility: [],
|
|
||||||
acquisition: [],
|
|
||||||
agent: [],
|
|
||||||
azure: [],
|
|
||||||
backup: [],
|
|
||||||
bcdr: [],
|
|
||||||
'chart viewer': [],
|
|
||||||
connection: [],
|
|
||||||
dacfx: [],
|
|
||||||
dashboard: [],
|
|
||||||
'data explorer': [],
|
|
||||||
documentation: [],
|
|
||||||
'edit data': [],
|
|
||||||
export: [],
|
|
||||||
extensibility: [],
|
|
||||||
extensionManager: [],
|
|
||||||
globalization: [],
|
|
||||||
grid: [],
|
|
||||||
import: [],
|
|
||||||
insights: [],
|
|
||||||
intellisense: [],
|
|
||||||
localization: [],
|
|
||||||
'managed instance': [],
|
|
||||||
notebooks: [],
|
|
||||||
'object explorer': [],
|
|
||||||
performance: [],
|
|
||||||
profiler: [],
|
|
||||||
'query editor': [],
|
|
||||||
'query execution': [],
|
|
||||||
reliability: [],
|
|
||||||
restore: [],
|
|
||||||
scripting: [],
|
|
||||||
'server group': [],
|
|
||||||
settings: [],
|
|
||||||
setup: [],
|
|
||||||
shell: [],
|
|
||||||
showplan: [],
|
|
||||||
snippet: [],
|
|
||||||
sql2019Preview: [],
|
|
||||||
sqldw: [],
|
|
||||||
supportability: [],
|
|
||||||
ux: []
|
|
||||||
}
|
|
||||||
}
|
|
||||||
12
.github/commands.yml
vendored
12
.github/commands.yml
vendored
@@ -1,12 +0,0 @@
|
|||||||
{
|
|
||||||
perform: false,
|
|
||||||
commands: [
|
|
||||||
{
|
|
||||||
type: 'label',
|
|
||||||
name: 'duplicate',
|
|
||||||
allowTriggerByBot: true,
|
|
||||||
action: 'close',
|
|
||||||
comment: "Thanks for creating this issue! We figured it's covering the same as another one we already have. Thus, we closed this one as a duplicate. You can search for existing issues [here](https://aka.ms/vscodeissuesearch). See also our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines.\n\nHappy Coding!"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
5
.github/copycat.yml
vendored
5
.github/copycat.yml
vendored
@@ -1,5 +0,0 @@
|
|||||||
{
|
|
||||||
perform: true,
|
|
||||||
target_owner: 'anthonydresser',
|
|
||||||
target_repo: 'testissues'
|
|
||||||
}
|
|
||||||
6
.github/locker.yml
vendored
6
.github/locker.yml
vendored
@@ -1,6 +0,0 @@
|
|||||||
{
|
|
||||||
daysAfterClose: 45,
|
|
||||||
daysSinceLastUpdate: 3,
|
|
||||||
ignoredLabels: [],
|
|
||||||
perform: true
|
|
||||||
}
|
|
||||||
6
.github/needs_more_info.yml
vendored
6
.github/needs_more_info.yml
vendored
@@ -1,6 +0,0 @@
|
|||||||
{
|
|
||||||
daysUntilClose: 7,
|
|
||||||
needsMoreInfoLabel: 'needs more info',
|
|
||||||
perform: true,
|
|
||||||
closeComment: "This issue has been closed automatically because it needs more information and has not had recent activity in the last 7 days. If you have more info to help resolve the issue, leave a comment"
|
|
||||||
}
|
|
||||||
6
.github/new_release.yml
vendored
6
.github/new_release.yml
vendored
@@ -1,6 +0,0 @@
|
|||||||
{
|
|
||||||
newReleaseLabel: 'new-release',
|
|
||||||
newReleaseColor: '006b75',
|
|
||||||
daysAfterRelease: 5,
|
|
||||||
perform: true
|
|
||||||
}
|
|
||||||
5
.github/similarity.yml
vendored
5
.github/similarity.yml
vendored
@@ -1,5 +0,0 @@
|
|||||||
{
|
|
||||||
perform: true,
|
|
||||||
whenCreatedByTeam: true,
|
|
||||||
comment: "Thanks for submitting this issue. Please also check if it is already covered by an existing one, like:\n${potentialDuplicates}"
|
|
||||||
}
|
|
||||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -1,10 +1,8 @@
|
|||||||
.DS_Store
|
.DS_Store
|
||||||
.cache
|
|
||||||
npm-debug.log
|
npm-debug.log
|
||||||
Thumbs.db
|
Thumbs.db
|
||||||
node_modules/
|
node_modules/
|
||||||
.build/
|
.build/
|
||||||
extensions/**/dist/
|
|
||||||
out/
|
out/
|
||||||
out-build/
|
out-build/
|
||||||
out-editor/
|
out-editor/
|
||||||
@@ -15,11 +13,8 @@ out-editor-min/
|
|||||||
out-monaco-editor-core/
|
out-monaco-editor-core/
|
||||||
out-vscode/
|
out-vscode/
|
||||||
out-vscode-min/
|
out-vscode-min/
|
||||||
out-vscode-reh/
|
build/node_modules
|
||||||
out-vscode-reh-min/
|
|
||||||
out-vscode-reh-pkg/
|
|
||||||
**/node_modules
|
|
||||||
coverage/
|
coverage/
|
||||||
test_data/
|
test_data/
|
||||||
test-results/
|
test-results/
|
||||||
yarn-error.log
|
yarn-error.log
|
||||||
58
.travis.yml
Normal file
58
.travis.yml
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
sudo: false
|
||||||
|
language: cpp
|
||||||
|
|
||||||
|
os:
|
||||||
|
- linux
|
||||||
|
- osx
|
||||||
|
|
||||||
|
cache:
|
||||||
|
directories:
|
||||||
|
- $HOME/.cache/yarn
|
||||||
|
|
||||||
|
notifications:
|
||||||
|
email: false
|
||||||
|
webhooks:
|
||||||
|
- http://vscode-probot.westus.cloudapp.azure.com:3450/travis/notifications
|
||||||
|
- http://vscode-test-probot.westus.cloudapp.azure.com:3450/travis/notifications
|
||||||
|
|
||||||
|
addons:
|
||||||
|
apt:
|
||||||
|
sources:
|
||||||
|
- ubuntu-toolchain-r-test
|
||||||
|
packages:
|
||||||
|
- gcc-4.9
|
||||||
|
- g++-4.9
|
||||||
|
- gcc-4.9-multilib
|
||||||
|
- g++-4.9-multilib
|
||||||
|
- zip
|
||||||
|
- libgtk2.0-0
|
||||||
|
- libx11-dev
|
||||||
|
- libxkbfile-dev
|
||||||
|
- libsecret-1-dev
|
||||||
|
|
||||||
|
before_install:
|
||||||
|
- git submodule update --init --recursive
|
||||||
|
- nvm install 8.9.1
|
||||||
|
- nvm use 8.9.1
|
||||||
|
- npm i -g yarn
|
||||||
|
# - npm config set python `which python`
|
||||||
|
- if [ $TRAVIS_OS_NAME == "linux" ]; then
|
||||||
|
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:99.0;
|
||||||
|
sh -e /etc/init.d/xvfb start;
|
||||||
|
sleep 3;
|
||||||
|
fi
|
||||||
|
# Make npm logs less verbose
|
||||||
|
# - npm config set depth 0
|
||||||
|
# - npm config set loglevel warn
|
||||||
|
|
||||||
|
install:
|
||||||
|
- yarn
|
||||||
|
|
||||||
|
script:
|
||||||
|
- node_modules/.bin/gulp electron --silent
|
||||||
|
- node_modules/.bin/gulp compile --silent --max_old_space_size=4096
|
||||||
|
- node_modules/.bin/gulp optimize-vscode --silent --max_old_space_size=4096
|
||||||
|
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then ./scripts/test.sh --coverage --reporter dot; else ./scripts/test.sh --reporter dot; fi
|
||||||
|
|
||||||
|
after_success:
|
||||||
|
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then node_modules/.bin/coveralls < .build/coverage/lcov.info; fi
|
||||||
23
.vscode/cglicenses.schema.json
vendored
23
.vscode/cglicenses.schema.json
vendored
@@ -1,23 +0,0 @@
|
|||||||
{
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"required": [
|
|
||||||
"name",
|
|
||||||
"licenseDetail"
|
|
||||||
],
|
|
||||||
"properties": {
|
|
||||||
"name": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The name of the dependency"
|
|
||||||
},
|
|
||||||
"licenseDetail": {
|
|
||||||
"type": "array",
|
|
||||||
"description": "The complete license text of the dependency",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
142
.vscode/cgmanifest.schema.json
vendored
142
.vscode/cgmanifest.schema.json
vendored
@@ -1,142 +0,0 @@
|
|||||||
{
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"registrations": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "object",
|
|
||||||
"properties": {
|
|
||||||
"component": {
|
|
||||||
"oneOf": [
|
|
||||||
{
|
|
||||||
"type": "object",
|
|
||||||
"required": [
|
|
||||||
"type",
|
|
||||||
"git"
|
|
||||||
],
|
|
||||||
"properties": {
|
|
||||||
"type": {
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"git"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"git": {
|
|
||||||
"type": "object",
|
|
||||||
"required": [
|
|
||||||
"name",
|
|
||||||
"repositoryUrl",
|
|
||||||
"commitHash"
|
|
||||||
],
|
|
||||||
"properties": {
|
|
||||||
"name": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"repositoryUrl": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"commitHash": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "object",
|
|
||||||
"required": [
|
|
||||||
"type",
|
|
||||||
"npm"
|
|
||||||
],
|
|
||||||
"properties": {
|
|
||||||
"type": {
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"npm"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"npm": {
|
|
||||||
"type": "object",
|
|
||||||
"required": [
|
|
||||||
"name",
|
|
||||||
"version"
|
|
||||||
],
|
|
||||||
"properties": {
|
|
||||||
"name": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"version": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "object",
|
|
||||||
"required": [
|
|
||||||
"type",
|
|
||||||
"other"
|
|
||||||
],
|
|
||||||
"properties": {
|
|
||||||
"type": {
|
|
||||||
"type": "string",
|
|
||||||
"enum": [
|
|
||||||
"other"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"other": {
|
|
||||||
"type": "object",
|
|
||||||
"required": [
|
|
||||||
"name",
|
|
||||||
"downloadUrl",
|
|
||||||
"version"
|
|
||||||
],
|
|
||||||
"properties": {
|
|
||||||
"name": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"downloadUrl": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"version": {
|
|
||||||
"type": "string"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"repositoryUrl": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The git url of the component"
|
|
||||||
},
|
|
||||||
"version": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The version of the component"
|
|
||||||
},
|
|
||||||
"license": {
|
|
||||||
"type": "string",
|
|
||||||
"description": "The name of the license"
|
|
||||||
},
|
|
||||||
"developmentDependency": {
|
|
||||||
"type": "boolean",
|
|
||||||
"description": "This component is inlined in the vscode repo and **is not shipped**."
|
|
||||||
},
|
|
||||||
"isOnlyProductionDependency": {
|
|
||||||
"type": "boolean",
|
|
||||||
"description": "This component is shipped and **is not inlined in the vscode repo**."
|
|
||||||
},
|
|
||||||
"licenseDetail": {
|
|
||||||
"type": "array",
|
|
||||||
"items": {
|
|
||||||
"type": "string"
|
|
||||||
},
|
|
||||||
"description": "The license text"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
2
.vscode/extensions.json
vendored
2
.vscode/extensions.json
vendored
@@ -2,7 +2,7 @@
|
|||||||
// See https://go.microsoft.com/fwlink/?LinkId=827846
|
// See https://go.microsoft.com/fwlink/?LinkId=827846
|
||||||
// for the documentation about the extensions.json format
|
// for the documentation about the extensions.json format
|
||||||
"recommendations": [
|
"recommendations": [
|
||||||
"ms-vscode.vscode-typescript-tslint-plugin",
|
"eg2.tslint",
|
||||||
"dbaeumer.vscode-eslint",
|
"dbaeumer.vscode-eslint",
|
||||||
"msjsdiag.debugger-for-chrome"
|
"msjsdiag.debugger-for-chrome"
|
||||||
]
|
]
|
||||||
|
|||||||
149
.vscode/launch.json
vendored
149
.vscode/launch.json
vendored
@@ -9,12 +9,14 @@
|
|||||||
"stopOnEntry": true,
|
"stopOnEntry": true,
|
||||||
"args": [
|
"args": [
|
||||||
"hygiene"
|
"hygiene"
|
||||||
]
|
],
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "node",
|
"type": "node",
|
||||||
"request": "attach",
|
"request": "attach",
|
||||||
"name": "Attach to Extension Host",
|
"name": "Attach to Extension Host",
|
||||||
|
"protocol": "inspector",
|
||||||
"port": 5870,
|
"port": 5870,
|
||||||
"restart": true,
|
"restart": true,
|
||||||
"outFiles": [
|
"outFiles": [
|
||||||
@@ -22,15 +24,19 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "chrome",
|
"type": "node",
|
||||||
"request": "attach",
|
"request": "attach",
|
||||||
"name": "Attach to Shared Process",
|
"name": "Attach to Shared Process",
|
||||||
"port": 9222,
|
"protocol": "inspector",
|
||||||
"urlFilter": "*"
|
"port": 5871,
|
||||||
|
"outFiles": [
|
||||||
|
"${workspaceFolder}/out/**/*.js"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "node",
|
"type": "node",
|
||||||
"request": "attach",
|
"request": "attach",
|
||||||
|
"protocol": "inspector",
|
||||||
"name": "Attach to Search Process",
|
"name": "Attach to Search Process",
|
||||||
"port": 5876,
|
"port": 5876,
|
||||||
"outFiles": [
|
"outFiles": [
|
||||||
@@ -41,6 +47,7 @@
|
|||||||
"type": "node",
|
"type": "node",
|
||||||
"request": "attach",
|
"request": "attach",
|
||||||
"name": "Attach to CLI Process",
|
"name": "Attach to CLI Process",
|
||||||
|
"protocol": "inspector",
|
||||||
"port": 5874,
|
"port": 5874,
|
||||||
"outFiles": [
|
"outFiles": [
|
||||||
"${workspaceFolder}/out/**/*.js"
|
"${workspaceFolder}/out/**/*.js"
|
||||||
@@ -50,6 +57,7 @@
|
|||||||
"type": "node",
|
"type": "node",
|
||||||
"request": "attach",
|
"request": "attach",
|
||||||
"name": "Attach to Main Process",
|
"name": "Attach to Main Process",
|
||||||
|
"protocol": "inspector",
|
||||||
"port": 5875,
|
"port": 5875,
|
||||||
"outFiles": [
|
"outFiles": [
|
||||||
"${workspaceFolder}/out/**/*.js"
|
"${workspaceFolder}/out/**/*.js"
|
||||||
@@ -65,45 +73,6 @@
|
|||||||
"type": "chrome",
|
"type": "chrome",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"name": "Launch azuredatastudio",
|
"name": "Launch azuredatastudio",
|
||||||
"windows": {
|
|
||||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat",
|
|
||||||
"timeout": 20000
|
|
||||||
},
|
|
||||||
"osx": {
|
|
||||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
|
|
||||||
"timeout": 20000
|
|
||||||
},
|
|
||||||
"linux": {
|
|
||||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
|
|
||||||
"timeout": 20000
|
|
||||||
},
|
|
||||||
"env": {
|
|
||||||
"VSCODE_EXTHOST_WILL_SEND_SOCKET": null
|
|
||||||
},
|
|
||||||
"breakOnLoad": false,
|
|
||||||
"urlFilter": "*workbench.html*",
|
|
||||||
"runtimeArgs": [
|
|
||||||
"--inspect=5875",
|
|
||||||
"--no-cached-data"
|
|
||||||
],
|
|
||||||
"webRoot": "${workspaceFolder}"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "node",
|
|
||||||
"request": "launch",
|
|
||||||
"name": "Launch ADS (Main Process)",
|
|
||||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
|
|
||||||
"runtimeArgs": [
|
|
||||||
"--no-cached-data"
|
|
||||||
],
|
|
||||||
"outFiles": [
|
|
||||||
"${workspaceFolder}/out/**/*.js"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "chrome",
|
|
||||||
"request": "launch",
|
|
||||||
"name": "Launch azuredatastudio with new notebook command",
|
|
||||||
"windows": {
|
"windows": {
|
||||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat"
|
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat"
|
||||||
},
|
},
|
||||||
@@ -115,8 +84,7 @@
|
|||||||
},
|
},
|
||||||
"urlFilter": "*index.html*",
|
"urlFilter": "*index.html*",
|
||||||
"runtimeArgs": [
|
"runtimeArgs": [
|
||||||
"--inspect=5875",
|
"--inspect=5875"
|
||||||
"--command=notebook.command.new"
|
|
||||||
],
|
],
|
||||||
"skipFiles": [
|
"skipFiles": [
|
||||||
"**/winjs*.js"
|
"**/winjs*.js"
|
||||||
@@ -124,6 +92,34 @@
|
|||||||
"webRoot": "${workspaceFolder}",
|
"webRoot": "${workspaceFolder}",
|
||||||
"timeout": 45000
|
"timeout": 45000
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"type": "node",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "Unit Tests",
|
||||||
|
"protocol": "inspector",
|
||||||
|
"program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
|
||||||
|
"runtimeExecutable": "${workspaceFolder}/.build/electron/Azure Data Studio.app/Contents/MacOS/Electron",
|
||||||
|
"windows": {
|
||||||
|
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio.exe"
|
||||||
|
},
|
||||||
|
"linux": {
|
||||||
|
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio"
|
||||||
|
},
|
||||||
|
"stopOnEntry": false,
|
||||||
|
"outputCapture": "std",
|
||||||
|
"args": [
|
||||||
|
"--delay",
|
||||||
|
"--timeout",
|
||||||
|
"2000"
|
||||||
|
],
|
||||||
|
"cwd": "${workspaceFolder}",
|
||||||
|
"env": {
|
||||||
|
"ELECTRON_RUN_AS_NODE": "true"
|
||||||
|
},
|
||||||
|
"outFiles": [
|
||||||
|
"${workspaceFolder}/out/**/*.js"
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "Launch Built-in Extension",
|
"name": "Launch Built-in Extension",
|
||||||
"type": "extensionHost",
|
"type": "extensionHost",
|
||||||
@@ -132,70 +128,9 @@
|
|||||||
"args": [
|
"args": [
|
||||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/debug-auto-launch"
|
"--extensionDevelopmentPath=${workspaceRoot}/extensions/debug-auto-launch"
|
||||||
]
|
]
|
||||||
},
|
}
|
||||||
{
|
|
||||||
"type": "node",
|
|
||||||
"request": "launch",
|
|
||||||
"name": "Launch Smoke Test",
|
|
||||||
"program": "${workspaceFolder}/test/smoke/test/index.js",
|
|
||||||
"cwd": "${workspaceFolder}/test/smoke",
|
|
||||||
"env": {
|
|
||||||
"BUILD_ARTIFACTSTAGINGDIRECTORY": "${workspaceFolder}"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "node",
|
|
||||||
"request": "launch",
|
|
||||||
"name": "Run Unit Tests",
|
|
||||||
"program": "${workspaceFolder}/test/electron/index.js",
|
|
||||||
"runtimeExecutable": "${workspaceFolder}/.build/electron/Azure Data Studio.app/Contents/MacOS/Electron",
|
|
||||||
"windows": {
|
|
||||||
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio.exe"
|
|
||||||
},
|
|
||||||
"linux": {
|
|
||||||
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio"
|
|
||||||
},
|
|
||||||
"outputCapture": "std",
|
|
||||||
"args": [
|
|
||||||
"--remote-debugging-port=9222"
|
|
||||||
],
|
|
||||||
"cwd": "${workspaceFolder}",
|
|
||||||
"outFiles": [
|
|
||||||
"${workspaceFolder}/out/**/*.js"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"type": "chrome",
|
|
||||||
"request": "launch",
|
|
||||||
"name": "Run Extension Unit Tests",
|
|
||||||
"windows": {
|
|
||||||
"runtimeExecutable": "${workspaceFolder}/scripts/test-extensions-unit.bat"
|
|
||||||
},
|
|
||||||
"osx": {
|
|
||||||
"runtimeExecutable": "${workspaceFolder}/scripts/test-extensions-unit.sh"
|
|
||||||
},
|
|
||||||
"linux": {
|
|
||||||
"runtimeExecutable": "${workspaceFolder}/scripts/test-extensions-unit.sh"
|
|
||||||
},
|
|
||||||
"webRoot": "${workspaceFolder}",
|
|
||||||
"timeout": 45000
|
|
||||||
},
|
|
||||||
],
|
],
|
||||||
"compounds": [
|
"compounds": [
|
||||||
{
|
|
||||||
"name": "Debug Unit Tests",
|
|
||||||
"configurations": [
|
|
||||||
"Attach to azuredatastudio",
|
|
||||||
"Run Unit Tests"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Debug Extension Unit Tests",
|
|
||||||
"configurations": [
|
|
||||||
"Attach to Extension Host",
|
|
||||||
"Run Extension Unit Tests"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "Debug azuredatastudio Main and Renderer",
|
"name": "Debug azuredatastudio Main and Renderer",
|
||||||
"configurations": [
|
"configurations": [
|
||||||
|
|||||||
26
.vscode/settings.json
vendored
26
.vscode/settings.json
vendored
@@ -11,7 +11,7 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"files.associations": {
|
"files.associations": {
|
||||||
"cglicenses.json": "jsonc"
|
"OSSREADME.json": "jsonc"
|
||||||
},
|
},
|
||||||
"search.exclude": {
|
"search.exclude": {
|
||||||
"**/node_modules": true,
|
"**/node_modules": true,
|
||||||
@@ -22,9 +22,9 @@
|
|||||||
"out-vscode/**": true,
|
"out-vscode/**": true,
|
||||||
"i18n/**": true,
|
"i18n/**": true,
|
||||||
"extensions/**/out/**": true,
|
"extensions/**/out/**": true,
|
||||||
"test/smoke/out/**": true,
|
"test/smoke/out/**": true
|
||||||
"src/vs/base/test/node/uri.test.data.txt": true
|
|
||||||
},
|
},
|
||||||
|
"tslint.enable": true,
|
||||||
"lcov.path": [
|
"lcov.path": [
|
||||||
"./.build/coverage/lcov.info",
|
"./.build/coverage/lcov.info",
|
||||||
"./.build/coverage-single/lcov.info"
|
"./.build/coverage-single/lcov.info"
|
||||||
@@ -43,20 +43,6 @@
|
|||||||
"git.ignoreLimitWarning": true,
|
"git.ignoreLimitWarning": true,
|
||||||
"emmet.excludeLanguages": [],
|
"emmet.excludeLanguages": [],
|
||||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||||
"typescript.preferences.quoteStyle": "single",
|
"typescript.preferences.quoteStyle": "single"
|
||||||
"json.schemas": [
|
|
||||||
{
|
}
|
||||||
"fileMatch": [
|
|
||||||
"cgmanifest.json"
|
|
||||||
],
|
|
||||||
"url": "./.vscode/cgmanifest.schema.json"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"fileMatch": [
|
|
||||||
"cglicenses.json"
|
|
||||||
],
|
|
||||||
"url": "./.vscode/cglicenses.schema.json"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"git.ignoreLimitWarning": true
|
|
||||||
}
|
|
||||||
|
|||||||
40
.vscode/shared.code-snippets
vendored
40
.vscode/shared.code-snippets
vendored
@@ -1,40 +0,0 @@
|
|||||||
{
|
|
||||||
// Each snippet is defined under a snippet name and has a scope, prefix, body and
|
|
||||||
// description. The scope defines in watch languages the snippet is applicable. The prefix is what is
|
|
||||||
// used to trigger the snippet and the body will be expanded and inserted.Possible variables are:
|
|
||||||
// $1, $2 for tab stops, $0 for the final cursor position, and ${1:label}, ${2:another} for placeholders.
|
|
||||||
// Placeholders with the same ids are connected.
|
|
||||||
// Example:
|
|
||||||
"MSFT Copyright Header": {
|
|
||||||
"scope": "javascript,typescript,css",
|
|
||||||
"prefix": [
|
|
||||||
"header",
|
|
||||||
"stub",
|
|
||||||
"copyright"
|
|
||||||
],
|
|
||||||
"body": [
|
|
||||||
"/*---------------------------------------------------------------------------------------------",
|
|
||||||
" * Copyright (c) Microsoft Corporation. All rights reserved.",
|
|
||||||
" * Licensed under the Source EULA. See License.txt in the project root for license information.",
|
|
||||||
" *--------------------------------------------------------------------------------------------*/",
|
|
||||||
"",
|
|
||||||
"$0"
|
|
||||||
],
|
|
||||||
"description": "Insert Copyright Statement"
|
|
||||||
},
|
|
||||||
"TS -> Inject Service": {
|
|
||||||
"scope": "typescript",
|
|
||||||
"description": "Constructor Injection Pattern",
|
|
||||||
"prefix": "@inject",
|
|
||||||
"body": "@$1 private readonly _$2: ${1},$0"
|
|
||||||
},
|
|
||||||
"TS -> Event & Emitter": {
|
|
||||||
"scope": "typescript",
|
|
||||||
"prefix": "emitter",
|
|
||||||
"description": "Add emitter and event properties",
|
|
||||||
"body": [
|
|
||||||
"private readonly _onDid$1 = new Emitter<$2>();",
|
|
||||||
"readonly onDid$1: Event<$2> = this._onDid$1.event;"
|
|
||||||
],
|
|
||||||
}
|
|
||||||
}
|
|
||||||
16
.vscode/tasks.json
vendored
16
.vscode/tasks.json
vendored
@@ -28,20 +28,6 @@
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"type": "npm",
|
|
||||||
"script": "strict-initialization-watch",
|
|
||||||
"label": "TS - Strict Initialization",
|
|
||||||
"isBackground": true,
|
|
||||||
"presentation": {
|
|
||||||
"reveal": "never"
|
|
||||||
},
|
|
||||||
"problemMatcher": {
|
|
||||||
"base": "$tsc-watch",
|
|
||||||
"owner": "typescript-strict-initialization",
|
|
||||||
"applyTo": "allDocuments"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"type": "gulp",
|
"type": "gulp",
|
||||||
"task": "tslint",
|
"task": "tslint",
|
||||||
@@ -83,4 +69,4 @@
|
|||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
2
.yarnrc
2
.yarnrc
@@ -1,3 +1,3 @@
|
|||||||
disturl "https://atom.io/download/electron"
|
disturl "https://atom.io/download/electron"
|
||||||
target "3.1.8"
|
target "2.0.9"
|
||||||
runtime "electron"
|
runtime "electron"
|
||||||
|
|||||||
111
CHANGELOG.md
111
CHANGELOG.md
@@ -1,116 +1,5 @@
|
|||||||
# Change Log
|
# Change Log
|
||||||
|
|
||||||
## Version 1.7.0
|
|
||||||
* Release date: May 8, 2019
|
|
||||||
* Release status: General Availability
|
|
||||||
|
|
||||||
## What's new in this version
|
|
||||||
* Announcing Schema Compare *Preview* extension
|
|
||||||
* Tasks Panel UX improvement
|
|
||||||
* Announcing new Welcome page
|
|
||||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/31?closed=1).
|
|
||||||
|
|
||||||
## Contributions and "thank you"
|
|
||||||
We would like to thank all our users who raised issues.
|
|
||||||
|
|
||||||
## Version 1.6.0
|
|
||||||
* Release date: April 18, 2019
|
|
||||||
* Release status: General Availability
|
|
||||||
|
|
||||||
## What's new in this version
|
|
||||||
* Align with latest VS Code editor platform (currently 1.33.1)
|
|
||||||
* Resolved [bugs and issues](https://github.com/Microsoft/azuredatastudio/milestone/26?closed=1).
|
|
||||||
|
|
||||||
## Contributions and "thank you"
|
|
||||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
|
||||||
|
|
||||||
* yamatoya for `fix the format (#4899)`
|
|
||||||
|
|
||||||
## Version 1.5.1
|
|
||||||
* Release date: March 18, 2019
|
|
||||||
* Release status: General Availability
|
|
||||||
|
|
||||||
## What's new in this version
|
|
||||||
* Announcing T-SQL Notebooks
|
|
||||||
* Announcing PostgreSQL extension
|
|
||||||
* Announcing SQL Server Dacpac extension
|
|
||||||
* Resolved [bugs and issues](https://github.com/Microsoft/azuredatastudio/milestone/25?closed=1).
|
|
||||||
|
|
||||||
## Contributions and "thank you"
|
|
||||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
|
||||||
|
|
||||||
* GeoffYoung for `Fix sqlDropColumn description #4422`
|
|
||||||
|
|
||||||
## Version 1.4.5
|
|
||||||
* Release date: February 13, 2019
|
|
||||||
* Release status: General Availability
|
|
||||||
|
|
||||||
## What's new in this version
|
|
||||||
* Added **Admin pack for SQL Server** extension pack to make it easier to install SQL Server admin-related extensions. This includes:
|
|
||||||
* [SQL Server Agent](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-agent-extension?view=sql-server-2017)
|
|
||||||
* [SQL Server Profiler](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-profiler-extension?view=sql-server-2017)
|
|
||||||
* [SQL Server Import](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-import-extension?view=sql-server-2017)
|
|
||||||
* Added filtering extended event support in Profiler extension
|
|
||||||
* Added Save as XML feature that can save T-SQL results as XML
|
|
||||||
* Added Data-Tier Application Wizard improvements
|
|
||||||
* Added Generate script button
|
|
||||||
* Added view to give warnings of possible data loss during deployment
|
|
||||||
* Updates to the [SQL Server 2019 Preview extension](https://docs.microsoft.com/sql/azure-data-studio/sql-server-2019-extension?view=sql-server-ver15)
|
|
||||||
* Results streaming enabled by default for long running queries
|
|
||||||
* Resolved [bugs and issues](https://github.com/Microsoft/azuredatastudio/milestone/23?closed=1).
|
|
||||||
|
|
||||||
## Contributions and "thank you"
|
|
||||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
|
||||||
|
|
||||||
* AlexFsmn for `Added context menu for DBs in explorer view to backup & restore db. #2277`
|
|
||||||
* sadedil for `Missing feature request: Save as XML #3729`
|
|
||||||
* gbritton1 for `Removed reference to object explorer #3463`
|
|
||||||
|
|
||||||
## Version 1.3.8
|
|
||||||
* Release date: January 9, 2019
|
|
||||||
* Release status: General Availability
|
|
||||||
|
|
||||||
## What's new in this version
|
|
||||||
* #13 Feature Request: Azure Active Directory Authentication
|
|
||||||
* #1040 Stream initial query results as they become available
|
|
||||||
* #3298 Сan't add an azure account.
|
|
||||||
* #2387 Support Per-User Installer
|
|
||||||
* SQL Server Import updates for DACPAC\BACPAC
|
|
||||||
* SQL Server Profiler UI and UX improvements
|
|
||||||
* Updates to [SQL Server 2019 extension](https://docs.microsoft.com/sql/azure-data-studio/sql-server-2019-extension?view=sql-server-ver15)
|
|
||||||
* **sp_executesql to SQL** and **New Database** extensions
|
|
||||||
|
|
||||||
## Contributions and "thank you"
|
|
||||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
|
||||||
|
|
||||||
* Tarig0 for `Add Routine_Type to CreateStoredProc fixes #3257 (#3286)`
|
|
||||||
* oltruong for `typo fix #3025'`
|
|
||||||
* Thomas-S-B for `Removed unnecessary IErrorDetectionStrategy #749`
|
|
||||||
* Thomas-S-B for `Simplified code #750`
|
|
||||||
|
|
||||||
## Version 1.2.4
|
|
||||||
* Release date: November 6, 2018
|
|
||||||
* Release status: General Availability
|
|
||||||
|
|
||||||
## What's new in this version
|
|
||||||
* Update to the SQL Server 2019 Preview extension
|
|
||||||
* Introducing Paste the Plan extension
|
|
||||||
* Introducing High Color queries extension, including SSMS editor theme
|
|
||||||
* Fixes in SQL Server Agent, Profiler, and Import extensions
|
|
||||||
* Fix .Net Core Socket KeepAlive issue causing dropped inactive connections on macOS
|
|
||||||
* Upgrade SQL Tools Service to .Net Core 2.2 Preview 3 (for eventual AAD support)
|
|
||||||
* Fix customer reported GitHub issues
|
|
||||||
|
|
||||||
## Contributions and "thank you"
|
|
||||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
|
||||||
|
|
||||||
* rdaniels6813 for `Add query plan theme support #3031`
|
|
||||||
* Ruturaj123 for `Fixed some typos and grammatical errors #3027`
|
|
||||||
* PromoFaux for `Use emoji shortcodes in CONTRIBUTING.md instead of <20> #3009`
|
|
||||||
* ckaczor for `Fix: DATETIMEOFFSET data types should be ISO formatted #714`
|
|
||||||
* hi-im-T0dd for `Fixed sync issue with my forked master so this commit is correct #2948`
|
|
||||||
* hi-im-T0dd for `Fixed when right clicking and selecting Manage-correct name displays #2794`
|
|
||||||
|
|
||||||
## Version 1.1.3
|
## Version 1.1.3
|
||||||
* Release date: October 18, 2018
|
* Release date: October 18, 2018
|
||||||
* Release status: General Availability
|
* Release status: General Availability
|
||||||
|
|||||||
@@ -18,15 +18,11 @@ File a single issue per problem and feature request.
|
|||||||
* Do not enumerate multiple bugs or feature requests in the same issue.
|
* Do not enumerate multiple bugs or feature requests in the same issue.
|
||||||
* Do not add your issue as a comment to an existing issue unless it's for the identical input. Many issues look similar, but have different causes.
|
* Do not add your issue as a comment to an existing issue unless it's for the identical input. Many issues look similar, but have different causes.
|
||||||
|
|
||||||
The more information you can provide, the more likely someone will be successful at reproducing the issue and finding a fix.
|
The more information you can provide, the more likely someone will be successful reproducing the issue and finding a fix.
|
||||||
|
|
||||||
The built-in tool for reporting an issue, which you can access by using `Report Issue` in Azure Data Studio's Help menu, can help streamline this process by automatically providing the version of Azure Data Studio, all your installed extensions, and your system info.
|
|
||||||
|
|
||||||
Please include the following with each issue.
|
Please include the following with each issue.
|
||||||
|
|
||||||
* Version of Azure Data Studio (formerly SQL Operations Studio)
|
* Version of Azure Data Studio (formerly SQL Operations Studio).
|
||||||
|
|
||||||
* Your operating system
|
|
||||||
|
|
||||||
> **Tip:** You can easily create an issue using `Report Issues` from Azure Data Studio Help menu.
|
> **Tip:** You can easily create an issue using `Report Issues` from Azure Data Studio Help menu.
|
||||||
|
|
||||||
|
|||||||
1751
OSSREADME.json
Normal file
1751
OSSREADME.json
Normal file
File diff suppressed because it is too large
Load Diff
37
README.md
37
README.md
@@ -1,7 +1,6 @@
|
|||||||
# Azure Data Studio
|
# Azure Data Studio
|
||||||
|
|
||||||
[](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
[](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||||
[](https://dev.azure.com/ms/azuredatastudio/_build/latest?definitionId=4)
|
|
||||||
|
|
||||||
Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
|
Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
|
||||||
|
|
||||||
@@ -9,22 +8,16 @@ Azure Data Studio is a data management tool that enables you to work with SQL Se
|
|||||||
|
|
||||||
Platform | Link
|
Platform | Link
|
||||||
-- | --
|
-- | --
|
||||||
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2091882
|
Windows Setup Installer | https://go.microsoft.com/fwlink/?linkid=2030731
|
||||||
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2091491
|
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2030736
|
||||||
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2091490
|
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2030738
|
||||||
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2091489
|
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2030741
|
||||||
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2091488
|
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2030746
|
||||||
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2091487
|
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2030750
|
||||||
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2092022
|
|
||||||
|
|
||||||
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
|
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
|
||||||
|
|
||||||
Try out the latest insiders build from `master`:
|
Try out the latest insiders build from `master` at https://github.com/Microsoft/azuredatastudio/releases.
|
||||||
- [Windows User Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-user/insider)
|
|
||||||
- [Windows System Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64/insider)
|
|
||||||
- [Windows ZIP - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-archive/insider)
|
|
||||||
- [macOS ZIP - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/darwin/insider)
|
|
||||||
- [Linux TAR.GZ - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/linux-x64/insider)
|
|
||||||
|
|
||||||
See the [change log](https://github.com/Microsoft/azuredatastudio/blob/master/CHANGELOG.md) for additional details of what's in this release.
|
See the [change log](https://github.com/Microsoft/azuredatastudio/blob/master/CHANGELOG.md) for additional details of what's in this release.
|
||||||
|
|
||||||
@@ -68,21 +61,6 @@ The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.micro
|
|||||||
## Contributions and "Thank You"
|
## Contributions and "Thank You"
|
||||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||||
|
|
||||||
* yamatoya for `fix the format (#4899)`
|
|
||||||
* GeoffYoung for `Fix sqlDropColumn description #4422`
|
|
||||||
* AlexFsmn for `Added context menu for DBs in explorer view to backup & restore db. #2277`
|
|
||||||
* sadedil for `Missing feature request: Save as XML #3729`
|
|
||||||
* gbritton1 for `Removed reference to object explorer #3463`
|
|
||||||
* Tarig0 for `Add Routine_Type to CreateStoredProc fixes #3257 (#3286)`
|
|
||||||
* oltruong for `typo fix #3025'`
|
|
||||||
* Thomas-S-B for `Removed unnecessary IErrorDetectionStrategy #749`
|
|
||||||
* Thomas-S-B for `Simplified code #750`
|
|
||||||
* rdaniels6813 for `Add query plan theme support #3031`
|
|
||||||
* Ruturaj123 for `Fixed some typos and grammatical errors #3027`
|
|
||||||
* PromoFaux for `Use emoji shortcodes in CONTRIBUTING.md instead of <20> #3009`
|
|
||||||
* ckaczor for `Fix: DATETIMEOFFSET data types should be ISO formatted #714`
|
|
||||||
* hi-im-T0dd for `Fixed sync issue with my forked master so this commit is correct #2948`
|
|
||||||
* hi-im-T0dd for `Fixed when right clicking and selecting Manage-correct name displays #2794`
|
|
||||||
* philoushka for `center the icon #2760`
|
* philoushka for `center the icon #2760`
|
||||||
* anthonypants for `Typo #2775`
|
* anthonypants for `Typo #2775`
|
||||||
* kstolte for `Fix Invalid Configuration in Launch.json #2789`
|
* kstolte for `Fix Invalid Configuration in Launch.json #2789`
|
||||||
@@ -125,6 +103,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
|||||||
* Russian: Andrey Veselov, Anton Fontanov, Anton Savin, Elena Ostrovskaia, Igor Babichev, Maxim Zelensky, Rodion Fedechkin, Tasha T, Vladimir Zyryanov
|
* Russian: Andrey Veselov, Anton Fontanov, Anton Savin, Elena Ostrovskaia, Igor Babichev, Maxim Zelensky, Rodion Fedechkin, Tasha T, Vladimir Zyryanov
|
||||||
* Portuguese Brazil: Daniel de Sousa, Diogo Duarte, Douglas Correa, Douglas Eccker, José Emanuel Mendes, Marcelo Fernandes, Marcondes Alexandre, Roberto Fonseca, Rodrigo Crespi
|
* Portuguese Brazil: Daniel de Sousa, Diogo Duarte, Douglas Correa, Douglas Eccker, José Emanuel Mendes, Marcelo Fernandes, Marcondes Alexandre, Roberto Fonseca, Rodrigo Crespi
|
||||||
|
|
||||||
|
|
||||||
And of course, we'd like to thank the authors of all upstream dependencies. Please see a full list in the [ThirdPartyNotices.txt](https://raw.githubusercontent.com/Microsoft/azuredatastudio/master/ThirdPartyNotices.txt)
|
And of course, we'd like to thank the authors of all upstream dependencies. Please see a full list in the [ThirdPartyNotices.txt](https://raw.githubusercontent.com/Microsoft/azuredatastudio/master/ThirdPartyNotices.txt)
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|||||||
@@ -17,12 +17,10 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
|||||||
chokidar: https://github.com/paulmillr/chokidar
|
chokidar: https://github.com/paulmillr/chokidar
|
||||||
comment-json: https://github.com/kaelzhang/node-comment-json
|
comment-json: https://github.com/kaelzhang/node-comment-json
|
||||||
core-js: https://github.com/zloirock/core-js
|
core-js: https://github.com/zloirock/core-js
|
||||||
decompress: https://github.com/kevva/decompress
|
|
||||||
emmet: https://github.com/emmetio/emmet
|
emmet: https://github.com/emmetio/emmet
|
||||||
error-ex: https://github.com/Qix-/node-error-ex
|
error-ex: https://github.com/Qix-/node-error-ex
|
||||||
escape-string-regexp: https://github.com/sindresorhus/escape-string-regexp
|
escape-string-regexp: https://github.com/sindresorhus/escape-string-regexp
|
||||||
fast-plist: https://github.com/Microsoft/node-fast-plist
|
fast-plist: https://github.com/Microsoft/node-fast-plist
|
||||||
figures: https://github.com/sindresorhus/figures
|
|
||||||
find-remove: https://www.npmjs.com/package/find-remove
|
find-remove: https://www.npmjs.com/package/find-remove
|
||||||
fs-extra: https://github.com/jprichardson/node-fs-extra
|
fs-extra: https://github.com/jprichardson/node-fs-extra
|
||||||
gc-signals: https://github.com/Microsoft/node-gc-signals
|
gc-signals: https://github.com/Microsoft/node-gc-signals
|
||||||
@@ -36,35 +34,28 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
|||||||
jquery-ui: https://github.com/jquery/jquery-ui
|
jquery-ui: https://github.com/jquery/jquery-ui
|
||||||
jquery.event.drag: https://github.com/devongovett/jquery.event.drag
|
jquery.event.drag: https://github.com/devongovett/jquery.event.drag
|
||||||
jschardet: https://github.com/aadsm/jschardet
|
jschardet: https://github.com/aadsm/jschardet
|
||||||
JupyterLab: https://github.com/jupyterlab/jupyterlab
|
|
||||||
make-error: https://github.com/JsCommunity/make-error
|
make-error: https://github.com/JsCommunity/make-error
|
||||||
minimist: https://github.com/substack/minimist
|
minimist: https://github.com/substack/minimist
|
||||||
moment: https://github.com/moment/moment
|
moment: https://github.com/moment/moment
|
||||||
native-keymap: https://github.com/Microsoft/node-native-keymap
|
native-keymap: https://github.com/Microsoft/node-native-keymap
|
||||||
native-watchdog: https://github.com/Microsoft/node-native-watchdog
|
native-watchdog: https://github.com/Microsoft/node-native-watchdog
|
||||||
ng2-charts: https://github.com/valor-software/ng2-charts
|
ng2-charts: https://github.com/valor-software/ng2-charts
|
||||||
node-fetch: https://github.com/bitinn/node-fetch
|
|
||||||
node-pty: https://github.com/Tyriar/node-pty
|
node-pty: https://github.com/Tyriar/node-pty
|
||||||
nsfw: https://github.com/Axosoft/nsfw
|
nsfw: https://github.com/Axosoft/nsfw
|
||||||
pretty-data: https://github.com/vkiryukhin/pretty-data
|
pretty-data: https://github.com/vkiryukhin/pretty-data
|
||||||
primeng: https://github.com/primefaces/primeng
|
primeng: https://github.com/primefaces/primeng
|
||||||
process-nextick-args: https://github.com/calvinmetcalf/process-nextick-args
|
|
||||||
pty.js: https://github.com/chjj/pty.js
|
pty.js: https://github.com/chjj/pty.js
|
||||||
reflect-metadata: https://github.com/rbuckton/reflect-metadata
|
reflect-metadata: https://github.com/rbuckton/reflect-metadata
|
||||||
request: https://github.com/request/request
|
|
||||||
rxjs: https://github.com/ReactiveX/RxJS
|
rxjs: https://github.com/ReactiveX/RxJS
|
||||||
semver: https://github.com/npm/node-semver
|
semver: https://github.com/npm/node-semver
|
||||||
slickgrid: https://github.com/6pac/SlickGrid
|
slickgrid: https://github.com/6pac/SlickGrid
|
||||||
sqltoolsservice: https://github.com/Microsoft/sqltoolsservice
|
sqltoolsservice: https://github.com/Microsoft/sqltoolsservice
|
||||||
svg.js: https://github.com/svgdotjs/svg.js
|
svg.js: https://github.com/svgdotjs/svg.js
|
||||||
systemjs: https://github.com/systemjs/systemjs
|
systemjs: https://github.com/systemjs/systemjs
|
||||||
temp-write: https://github.com/sindresorhus/temp-write
|
|
||||||
underscore: https://github.com/jashkenas/underscore
|
underscore: https://github.com/jashkenas/underscore
|
||||||
v8-profiler: https://github.com/node-inspector/v8-profiler
|
v8-profiler: https://github.com/node-inspector/v8-profiler
|
||||||
vscode: https://github.com/microsoft/vscode
|
vscode: https://github.com/microsoft/vscode
|
||||||
vscode-debugprotocol: https://github.com/Microsoft/vscode-debugadapter-node
|
vscode-debugprotocol: https://github.com/Microsoft/vscode-debugadapter-node
|
||||||
vscode-languageclient: https://github.com/Microsoft/vscode-languageserver-node
|
|
||||||
vscode-nls: https://github.com/Microsoft/vscode-nls
|
|
||||||
vscode-ripgrep: https://github.com/roblourens/vscode-ripgrep
|
vscode-ripgrep: https://github.com/roblourens/vscode-ripgrep
|
||||||
vscode-textmate: https://github.com/Microsoft/vscode-textmate
|
vscode-textmate: https://github.com/Microsoft/vscode-textmate
|
||||||
winreg: https://github.com/fresc81/node-winreg
|
winreg: https://github.com/fresc81/node-winreg
|
||||||
@@ -72,9 +63,10 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
|||||||
yauzl: https://github.com/thejoshwolfe/yauzl
|
yauzl: https://github.com/thejoshwolfe/yauzl
|
||||||
zone.js: https://www.npmjs.com/package/zone
|
zone.js: https://www.npmjs.com/package/zone
|
||||||
|
|
||||||
Microsoft PROSE SDK: https://microsoft.github.io/prose
|
|
||||||
|
|
||||||
%% angular NOTICES AND INFORMATION BEGIN HERE
|
%% angular NOTICES AND INFORMATION BEGIN HERE
|
||||||
|
=========================================
|
||||||
|
The MIT License
|
||||||
|
|
||||||
Copyright (c) 2014-2017 Google, Inc. http://angular.io
|
Copyright (c) 2014-2017 Google, Inc. http://angular.io
|
||||||
|
|
||||||
@@ -300,20 +292,6 @@ THE SOFTWARE.
|
|||||||
=========================================
|
=========================================
|
||||||
END OF core-js NOTICES AND INFORMATION
|
END OF core-js NOTICES AND INFORMATION
|
||||||
|
|
||||||
%% decompress NOTICES AND INFORMATION BEGIN HERE
|
|
||||||
=========================================
|
|
||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) Kevin Mårtensson <kevinmartensson@gmail.com> (github.com/kevva)
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
=========================================
|
|
||||||
END OF decompress NOTICES AND INFORMATION
|
|
||||||
|
|
||||||
%% emmet NOTICES AND INFORMATION BEGIN HERE
|
%% emmet NOTICES AND INFORMATION BEGIN HERE
|
||||||
=========================================
|
=========================================
|
||||||
The MIT License (MIT)
|
The MIT License (MIT)
|
||||||
@@ -343,6 +321,32 @@ END OF emmet NOTICES AND INFORMATION
|
|||||||
=========================================
|
=========================================
|
||||||
The MIT License (MIT)
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2015 JD Ballard
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in
|
||||||
|
all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||||
|
THE SOFTWARE.
|
||||||
|
=========================================
|
||||||
|
END OF error-ex NOTICES AND INFORMATION
|
||||||
|
|
||||||
|
%% escape-string-regexp NOTICES AND INFORMATION BEGIN HERE
|
||||||
|
=========================================
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
@@ -389,20 +393,6 @@ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEAL
|
|||||||
=========================================
|
=========================================
|
||||||
END OF fast-plist NOTICES AND INFORMATION
|
END OF fast-plist NOTICES AND INFORMATION
|
||||||
|
|
||||||
%% figures NOTICES AND INFORMATION BEGIN HERE
|
|
||||||
=========================================
|
|
||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
=========================================
|
|
||||||
END OF figures NOTICES AND INFORMATION
|
|
||||||
|
|
||||||
%% fs-extra NOTICES AND INFORMATION BEGIN HERE
|
%% fs-extra NOTICES AND INFORMATION BEGIN HERE
|
||||||
=========================================
|
=========================================
|
||||||
(The MIT License)
|
(The MIT License)
|
||||||
@@ -1176,43 +1166,6 @@ That's all there is to it!
|
|||||||
=========================================
|
=========================================
|
||||||
END OF jschardet NOTICES AND INFORMATION
|
END OF jschardet NOTICES AND INFORMATION
|
||||||
|
|
||||||
%% JupyterLab NOTICES AND INFORMATION BEGIN HERE
|
|
||||||
Copyright (c) 2015 Project Jupyter Contributors
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
1. Redistributions of source code must retain the above copyright notice, this
|
|
||||||
list of conditions and the following disclaimer.
|
|
||||||
|
|
||||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer in the documentation
|
|
||||||
and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
3. Neither the name of the copyright holder nor the names of its
|
|
||||||
contributors may be used to endorse or promote products derived from
|
|
||||||
this software without specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
|
||||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
|
||||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
||||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
||||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
||||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
Semver File License
|
|
||||||
===================
|
|
||||||
|
|
||||||
The semver.py file is from https://github.com/podhmo/python-semver
|
|
||||||
which is licensed under the "MIT" license. See the semver.py file for details.
|
|
||||||
|
|
||||||
END OF JupyterLab NOTICES AND INFORMATION
|
|
||||||
|
|
||||||
%% make-error NOTICES AND INFORMATION BEGIN HERE
|
%% make-error NOTICES AND INFORMATION BEGIN HERE
|
||||||
=========================================
|
=========================================
|
||||||
ISC © Julien Fontanet
|
ISC © Julien Fontanet
|
||||||
@@ -1344,32 +1297,6 @@ SOFTWARE.
|
|||||||
=========================================
|
=========================================
|
||||||
END OF ng2-charts NOTICES AND INFORMATION
|
END OF ng2-charts NOTICES AND INFORMATION
|
||||||
|
|
||||||
%% node-fetch NOTICES AND INFORMATION BEGIN HERE
|
|
||||||
=========================================
|
|
||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2016 David Frank
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
=========================================
|
|
||||||
END OF node-fetch NOTICES AND INFORMATION
|
|
||||||
|
|
||||||
%% node-pty NOTICES AND INFORMATION BEGIN HERE
|
%% node-pty NOTICES AND INFORMATION BEGIN HERE
|
||||||
=========================================
|
=========================================
|
||||||
Copyright (c) 2012-2015, Christopher Jeffrey (https://github.com/chjj/)
|
Copyright (c) 2012-2015, Christopher Jeffrey (https://github.com/chjj/)
|
||||||
@@ -1444,30 +1371,6 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI
|
|||||||
=========================================
|
=========================================
|
||||||
END OF primeng NOTICES AND INFORMATION
|
END OF primeng NOTICES AND INFORMATION
|
||||||
|
|
||||||
%% process-nextick-args NOTICES AND INFORMATION BEGIN HERE
|
|
||||||
=========================================
|
|
||||||
# Copyright (c) 2015 Calvin Metcalf
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.**
|
|
||||||
=========================================
|
|
||||||
END OF process-nextick-args NOTICES AND INFORMATION
|
|
||||||
|
|
||||||
%% pty.js NOTICES AND INFORMATION BEGIN HERE
|
%% pty.js NOTICES AND INFORMATION BEGIN HERE
|
||||||
=========================================
|
=========================================
|
||||||
Copyright (c) 2012-2015, Christopher Jeffrey (https://github.com/chjj/)
|
Copyright (c) 2012-2015, Christopher Jeffrey (https://github.com/chjj/)
|
||||||
@@ -1552,66 +1455,6 @@ END OF TERMS AND CONDITIONS
|
|||||||
=========================================
|
=========================================
|
||||||
END OF reflect-metadata NOTICES AND INFORMATION
|
END OF reflect-metadata NOTICES AND INFORMATION
|
||||||
|
|
||||||
%% request NOTICES AND INFORMATION BEGIN HERE
|
|
||||||
=========================================
|
|
||||||
Apache License
|
|
||||||
|
|
||||||
Version 2.0, January 2004
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
||||||
|
|
||||||
1. Definitions.
|
|
||||||
|
|
||||||
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
|
|
||||||
|
|
||||||
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
|
|
||||||
|
|
||||||
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
|
|
||||||
|
|
||||||
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
|
|
||||||
|
|
||||||
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
|
|
||||||
|
|
||||||
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
|
|
||||||
|
|
||||||
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
|
|
||||||
|
|
||||||
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
|
|
||||||
|
|
||||||
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
|
|
||||||
|
|
||||||
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
|
|
||||||
|
|
||||||
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
|
|
||||||
|
|
||||||
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
|
|
||||||
|
|
||||||
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
|
|
||||||
|
|
||||||
You must give any other recipients of the Work or Derivative Works a copy of this License; and
|
|
||||||
|
|
||||||
You must cause any modified files to carry prominent notices stating that You changed the files; and
|
|
||||||
|
|
||||||
You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
|
|
||||||
|
|
||||||
If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
|
|
||||||
|
|
||||||
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
|
|
||||||
|
|
||||||
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
|
|
||||||
|
|
||||||
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
|
|
||||||
|
|
||||||
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
|
|
||||||
|
|
||||||
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
=========================================
|
|
||||||
END OF request NOTICES AND INFORMATION
|
|
||||||
|
|
||||||
%% rxjs NOTICES AND INFORMATION BEGIN HERE
|
%% rxjs NOTICES AND INFORMATION BEGIN HERE
|
||||||
=========================================
|
=========================================
|
||||||
Apache License
|
Apache License
|
||||||
@@ -1937,20 +1780,6 @@ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEAL
|
|||||||
=========================================
|
=========================================
|
||||||
END OF systemjs NOTICES AND INFORMATION
|
END OF systemjs NOTICES AND INFORMATION
|
||||||
|
|
||||||
%% temp-write NOTICES AND INFORMATION BEGIN HERE
|
|
||||||
=========================================
|
|
||||||
MIT License
|
|
||||||
|
|
||||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
=========================================
|
|
||||||
END OF temp-write NOTICES AND INFORMATION
|
|
||||||
|
|
||||||
%% underscore NOTICES AND INFORMATION BEGIN HERE
|
%% underscore NOTICES AND INFORMATION BEGIN HERE
|
||||||
=========================================
|
=========================================
|
||||||
Copyright (c) 2009-2017 Jeremy Ashkenas, DocumentCloud and Investigative
|
Copyright (c) 2009-2017 Jeremy Ashkenas, DocumentCloud and Investigative
|
||||||
@@ -2053,50 +1882,6 @@ OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWA
|
|||||||
=========================================
|
=========================================
|
||||||
END OF vscode-debugprotocol NOTICES AND INFORMATION
|
END OF vscode-debugprotocol NOTICES AND INFORMATION
|
||||||
|
|
||||||
%% vscode-languageclient NOTICES AND INFORMATION BEGIN HERE
|
|
||||||
=========================================
|
|
||||||
Copyright (c) Microsoft Corporation
|
|
||||||
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
MIT License
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
|
|
||||||
files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy,
|
|
||||||
modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
|
|
||||||
is furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
|
||||||
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
|
|
||||||
OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
=========================================
|
|
||||||
END OF vscode-languageclient NOTICES AND INFORMATION
|
|
||||||
|
|
||||||
%% vscode-nls NOTICES AND INFORMATION BEGIN HERE
|
|
||||||
=========================================
|
|
||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) Microsoft Corporation
|
|
||||||
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
|
|
||||||
files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy,
|
|
||||||
modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
|
|
||||||
is furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
|
||||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
|
||||||
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
|
|
||||||
OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
||||||
=========================================
|
|
||||||
END OF vscode-nls NOTICES AND INFORMATION
|
|
||||||
|
|
||||||
%% vscode-ripgrep NOTICES AND INFORMATION BEGIN HERE
|
%% vscode-ripgrep NOTICES AND INFORMATION BEGIN HERE
|
||||||
=========================================
|
=========================================
|
||||||
vscode-ripgrep
|
vscode-ripgrep
|
||||||
@@ -2256,187 +2041,3 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|||||||
THE SOFTWARE.
|
THE SOFTWARE.
|
||||||
=========================================
|
=========================================
|
||||||
END OF zone.js NOTICES AND INFORMATION
|
END OF zone.js NOTICES AND INFORMATION
|
||||||
|
|
||||||
%% Microsoft.ProgramSynthesis.Common NOTICES AND INFORMATION BEGIN HERE
|
|
||||||
=========================================
|
|
||||||
NOTICES AND INFORMATION
|
|
||||||
Do Not Translate or Localize
|
|
||||||
|
|
||||||
This software incorporates material from third parties. Microsoft makes certain
|
|
||||||
open source code available at http://3rdpartysource.microsoft.com, or you may
|
|
||||||
send a check or money order for US $5.00, including the product name, the open
|
|
||||||
source component name, and version number, to:
|
|
||||||
|
|
||||||
Source Code Compliance Team
|
|
||||||
Microsoft Corporation
|
|
||||||
One Microsoft Way
|
|
||||||
Redmond, WA 98052
|
|
||||||
USA
|
|
||||||
|
|
||||||
Notwithstanding any other terms, you may reverse engineer this software to the
|
|
||||||
extent required to debug changes to any libraries licensed under the GNU Lesser
|
|
||||||
General Public License.
|
|
||||||
|
|
||||||
-------------------------------START OF THIRD-PARTY NOTICES-------------------------------------------
|
|
||||||
|
|
||||||
===================================CoreFx (BEGIN)
|
|
||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) .NET Foundation and Contributors
|
|
||||||
|
|
||||||
All rights reserved.
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
===================================CoreFx (END)
|
|
||||||
|
|
||||||
===================================CoreFxLab (BEGIN)
|
|
||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) Microsoft Corporation
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
===================================CoreFxLab (END)
|
|
||||||
|
|
||||||
===================================Reactive Extensions (BEGIN)
|
|
||||||
Copyright (c) .NET Foundation and Contributors
|
|
||||||
All Rights Reserved
|
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License"); you
|
|
||||||
may not use this file except in compliance with the License. You may
|
|
||||||
obtain a copy of the License at
|
|
||||||
|
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
|
||||||
Unless required by applicable law or agreed to in writing, software
|
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
||||||
implied. See the License for the specific language governing permissions
|
|
||||||
and limitations under the License.
|
|
||||||
|
|
||||||
List of contributors to the Rx libraries
|
|
||||||
|
|
||||||
Rx and Ix.NET:
|
|
||||||
Wes Dyer
|
|
||||||
Jeffrey van Gogh
|
|
||||||
Matthew Podwysocki
|
|
||||||
Bart De Smet
|
|
||||||
Danny van Velzen
|
|
||||||
Erik Meijer
|
|
||||||
Brian Beckman
|
|
||||||
Aaron Lahman
|
|
||||||
Georgi Chkodrov
|
|
||||||
Arthur Watson
|
|
||||||
Gert Drapers
|
|
||||||
Mark Shields
|
|
||||||
Eric Rozell
|
|
||||||
|
|
||||||
Rx.js and Ix.js:
|
|
||||||
Matthew Podwysocki
|
|
||||||
Jeffrey van Gogh
|
|
||||||
Bart De Smet
|
|
||||||
Brian Beckman
|
|
||||||
Wes Dyer
|
|
||||||
Erik Meijer
|
|
||||||
|
|
||||||
Tx:
|
|
||||||
Georgi Chkodrov
|
|
||||||
Bart De Smet
|
|
||||||
Aaron Lahman
|
|
||||||
Erik Meijer
|
|
||||||
Brian Grunkemeyer
|
|
||||||
Beysim Sezgin
|
|
||||||
Tiho Tarnavski
|
|
||||||
Collin Meek
|
|
||||||
Sajay Anthony
|
|
||||||
Karen Albrecht
|
|
||||||
John Allen
|
|
||||||
Zach Kramer
|
|
||||||
|
|
||||||
Rx++ and Ix++:
|
|
||||||
Aaron Lahman
|
|
||||||
===================================Reactive Extensions (END)
|
|
||||||
|
|
||||||
-------------------------------END OF THIRD-PARTY NOTICES-------------------------------------------
|
|
||||||
=========================================
|
|
||||||
END OF Microsoft.ProgramSynthesis.Common NOTICES AND INFORMATION
|
|
||||||
|
|
||||||
%% Microsoft.ProgramSynthesis.Detection NOTICES AND INFORMATION BEGIN HERE
|
|
||||||
=========================================
|
|
||||||
NOTICES AND INFORMATION
|
|
||||||
Do Not Translate or Localize
|
|
||||||
|
|
||||||
This software incorporates material from third parties. Microsoft makes certain
|
|
||||||
open source code available at http://3rdpartysource.microsoft.com, or you may
|
|
||||||
send a check or money order for US $5.00, including the product name, the open
|
|
||||||
source component name, and version number, to:
|
|
||||||
|
|
||||||
Source Code Compliance Team
|
|
||||||
Microsoft Corporation
|
|
||||||
One Microsoft Way
|
|
||||||
Redmond, WA 98052
|
|
||||||
USA
|
|
||||||
|
|
||||||
Notwithstanding any other terms, you may reverse engineer this software to the
|
|
||||||
extent required to debug changes to any libraries licensed under the GNU Lesser
|
|
||||||
General Public License.
|
|
||||||
|
|
||||||
-------------------------------START OF THIRD-PARTY NOTICES-------------------------------------------
|
|
||||||
|
|
||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2014 ExcelDataReader
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
|
||||||
in the Software without restriction, including without limitation the rights
|
|
||||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
copies of the Software, and to permit persons to whom the Software is
|
|
||||||
furnished to do so, subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
SOFTWARE.
|
|
||||||
===================================ExcelDataReader (END)
|
|
||||||
|
|
||||||
-------------------------------END OF THIRD-PARTY NOTICES-------------------------------------------
|
|
||||||
=========================================
|
|
||||||
END OF Microsoft.ProgramSynthesis.Detection NOTICES AND INFORMATION
|
|
||||||
|
|||||||
19
appveyor.yml
Normal file
19
appveyor.yml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
environment:
|
||||||
|
ELECTRON_RUN_AS_NODE: 1
|
||||||
|
VSCODE_BUILD_VERBOSE: true
|
||||||
|
|
||||||
|
cache:
|
||||||
|
- '%LOCALAPPDATA%\Yarn\cache'
|
||||||
|
|
||||||
|
install:
|
||||||
|
- ps: Install-Product node 8.9.1 x64
|
||||||
|
|
||||||
|
build_script:
|
||||||
|
- yarn
|
||||||
|
- .\node_modules\.bin\gulp electron
|
||||||
|
- npm run compile
|
||||||
|
|
||||||
|
test_script:
|
||||||
|
- node --version
|
||||||
|
- .\scripts\test.bat
|
||||||
|
- .\scripts\test-integration.bat
|
||||||
@@ -1,66 +0,0 @@
|
|||||||
steps:
|
|
||||||
- task: NodeTool@0
|
|
||||||
inputs:
|
|
||||||
versionSpec: '10.15.1'
|
|
||||||
displayName: 'Install Node.js'
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
npm i -g yarn
|
|
||||||
displayName: 'preinstall'
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:10
|
|
||||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
|
||||||
sudo chmod +x /etc/init.d/xvfb
|
|
||||||
sudo update-rc.d xvfb defaults
|
|
||||||
sudo service xvfb start
|
|
||||||
# sh -e /etc/init.d/xvfb start
|
|
||||||
# sleep 3
|
|
||||||
displayName: 'Linux preinstall'
|
|
||||||
condition: eq(variables['Agent.OS'], 'Linux')
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn
|
|
||||||
displayName: 'Install'
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn gulp electron-x64
|
|
||||||
displayName: Download Electron
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn gulp hygiene
|
|
||||||
displayName: Run Hygiene Checks
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn tslint
|
|
||||||
displayName: 'Run TSLint'
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn strict-null-check
|
|
||||||
displayName: 'Run Strict Null Check'
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn compile
|
|
||||||
displayName: 'Compile'
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter
|
|
||||||
displayName: 'Tests'
|
|
||||||
condition: eq(variables['Agent.OS'], 'Linux')
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter --coverage
|
|
||||||
displayName: 'Tests'
|
|
||||||
condition: ne(variables['Agent.OS'], 'Linux')
|
|
||||||
|
|
||||||
- task: PublishTestResults@2
|
|
||||||
inputs:
|
|
||||||
testResultsFiles: '**/test-results.xml'
|
|
||||||
condition: succeededOrFailed()
|
|
||||||
|
|
||||||
- task: PublishCodeCoverageResults@1
|
|
||||||
inputs:
|
|
||||||
codeCoverageTool: 'cobertura'
|
|
||||||
summaryFileLocation: $(System.DefaultWorkingDirectory)/.build/coverage/cobertura-coverage.xml
|
|
||||||
reportDirectory: $(System.DefaultWorkingDirectory)/.build/coverage/lcov-reports
|
|
||||||
condition: ne(variables['Agent.OS'], 'Linux')
|
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
steps:
|
|
||||||
- task: NodeTool@0
|
|
||||||
inputs:
|
|
||||||
versionSpec: '10.15.1'
|
|
||||||
displayName: 'Install Node.js'
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn
|
|
||||||
displayName: 'Yarn Install'
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn gulp electron-x64
|
|
||||||
displayName: 'Electron'
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn gulp hygiene
|
|
||||||
displayName: Run Hygiene Checks
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn tslint
|
|
||||||
displayName: 'Run TSLint'
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn strict-null-check
|
|
||||||
displayName: 'Run Strict Null Check'
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn compile
|
|
||||||
displayName: 'Compile'
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
.\scripts\test.bat --reporter mocha-junit-reporter --coverage
|
|
||||||
displayName: 'Test'
|
|
||||||
|
|
||||||
- task: PublishTestResults@2
|
|
||||||
inputs:
|
|
||||||
testResultsFiles: 'test-results.xml'
|
|
||||||
condition: succeededOrFailed()
|
|
||||||
|
|
||||||
- task: PublishCodeCoverageResults@1
|
|
||||||
inputs:
|
|
||||||
codeCoverageTool: 'cobertura'
|
|
||||||
summaryFileLocation: $(System.DefaultWorkingDirectory)\.build\coverage\cobertura-coverage.xml
|
|
||||||
reportDirectory: $(System.DefaultWorkingDirectory)\.build\coverage\lcov-report
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
trigger:
|
|
||||||
- master
|
|
||||||
- releases/*
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
# All tasks on Windows
|
|
||||||
- job: build_all_windows
|
|
||||||
displayName: Build all tasks (Windows)
|
|
||||||
pool:
|
|
||||||
vmImage: vs2017-win2016
|
|
||||||
steps:
|
|
||||||
- template: azure-pipelines-windows.yml
|
|
||||||
|
|
||||||
# All tasks on Linux
|
|
||||||
- job: build_all_linux
|
|
||||||
displayName: Build all tasks (Linux)
|
|
||||||
pool:
|
|
||||||
vmImage: 'Ubuntu 16.04'
|
|
||||||
steps:
|
|
||||||
- template: azure-pipelines-linux-mac.yml
|
|
||||||
|
|
||||||
# All tasks on macOS
|
|
||||||
- job: build_all_darwin
|
|
||||||
displayName: Build all tasks (macOS)
|
|
||||||
pool:
|
|
||||||
vmImage: macos-10.13
|
|
||||||
steps:
|
|
||||||
- template: azure-pipelines-linux-mac.yml
|
|
||||||
@@ -1,126 +0,0 @@
|
|||||||
# cleanup rules for native node modules, .gitignore style
|
|
||||||
|
|
||||||
fsevents/binding.gyp
|
|
||||||
fsevents/fsevents.cc
|
|
||||||
fsevents/build/**
|
|
||||||
fsevents/src/**
|
|
||||||
fsevents/test/**
|
|
||||||
!fsevents/**/*.node
|
|
||||||
|
|
||||||
vscode-sqlite3/binding.gyp
|
|
||||||
vscode-sqlite3/benchmark/**
|
|
||||||
vscode-sqlite3/cloudformation/**
|
|
||||||
vscode-sqlite3/deps/**
|
|
||||||
vscode-sqlite3/test/**
|
|
||||||
vscode-sqlite3/build/**
|
|
||||||
vscode-sqlite3/src/**
|
|
||||||
!vscode-sqlite3/build/Release/*.node
|
|
||||||
|
|
||||||
oniguruma/binding.gyp
|
|
||||||
oniguruma/build/**
|
|
||||||
oniguruma/src/**
|
|
||||||
oniguruma/deps/**
|
|
||||||
!oniguruma/build/Release/*.node
|
|
||||||
!oniguruma/src/*.js
|
|
||||||
|
|
||||||
windows-mutex/binding.gyp
|
|
||||||
windows-mutex/build/**
|
|
||||||
windows-mutex/src/**
|
|
||||||
!windows-mutex/**/*.node
|
|
||||||
|
|
||||||
native-keymap/binding.gyp
|
|
||||||
native-keymap/build/**
|
|
||||||
native-keymap/src/**
|
|
||||||
native-keymap/deps/**
|
|
||||||
!native-keymap/build/Release/*.node
|
|
||||||
|
|
||||||
native-is-elevated/binding.gyp
|
|
||||||
native-is-elevated/build/**
|
|
||||||
native-is-elevated/src/**
|
|
||||||
native-is-elevated/deps/**
|
|
||||||
!native-is-elevated/build/Release/*.node
|
|
||||||
|
|
||||||
native-watchdog/binding.gyp
|
|
||||||
native-watchdog/build/**
|
|
||||||
native-watchdog/src/**
|
|
||||||
!native-watchdog/build/Release/*.node
|
|
||||||
|
|
||||||
spdlog/binding.gyp
|
|
||||||
spdlog/build/**
|
|
||||||
spdlog/deps/**
|
|
||||||
spdlog/src/**
|
|
||||||
spdlog/test/**
|
|
||||||
!spdlog/build/Release/*.node
|
|
||||||
|
|
||||||
jschardet/dist/**
|
|
||||||
|
|
||||||
windows-foreground-love/binding.gyp
|
|
||||||
windows-foreground-love/build/**
|
|
||||||
windows-foreground-love/src/**
|
|
||||||
!windows-foreground-love/**/*.node
|
|
||||||
|
|
||||||
windows-process-tree/binding.gyp
|
|
||||||
windows-process-tree/build/**
|
|
||||||
windows-process-tree/src/**
|
|
||||||
!windows-process-tree/**/*.node
|
|
||||||
|
|
||||||
gc-signals/binding.gyp
|
|
||||||
gc-signals/build/**
|
|
||||||
gc-signals/src/**
|
|
||||||
gc-signals/deps/**
|
|
||||||
|
|
||||||
!gc-signals/build/Release/*.node
|
|
||||||
!gc-signals/src/index.js
|
|
||||||
|
|
||||||
keytar/binding.gyp
|
|
||||||
keytar/build/**
|
|
||||||
keytar/src/**
|
|
||||||
keytar/script/**
|
|
||||||
keytar/node_modules/**
|
|
||||||
!keytar/**/*.node
|
|
||||||
|
|
||||||
node-pty/binding.gyp
|
|
||||||
node-pty/build/**
|
|
||||||
node-pty/src/**
|
|
||||||
node-pty/tools/**
|
|
||||||
!node-pty/build/Release/*.exe
|
|
||||||
!node-pty/build/Release/*.dll
|
|
||||||
!node-pty/build/Release/*.node
|
|
||||||
|
|
||||||
chart.js/node_modules/**
|
|
||||||
|
|
||||||
emmet/node_modules/**
|
|
||||||
|
|
||||||
pty.js/build/**
|
|
||||||
!pty.js/build/Release/**
|
|
||||||
|
|
||||||
jquery-ui/external/**
|
|
||||||
jquery-ui/demos/**
|
|
||||||
|
|
||||||
core-js/**/**
|
|
||||||
|
|
||||||
slickgrid/node_modules/**
|
|
||||||
slickgrid/examples/**
|
|
||||||
|
|
||||||
vscode-nsfw/binding.gyp
|
|
||||||
vscode-nsfw/build/**
|
|
||||||
vscode-nsfw/src/**
|
|
||||||
vscode-nsfw/openpa/**
|
|
||||||
vscode-nsfw/includes/**
|
|
||||||
!vscode-nsfw/build/Release/*.node
|
|
||||||
!vscode-nsfw/**/*.a
|
|
||||||
|
|
||||||
vsda/binding.gyp
|
|
||||||
vsda/README.md
|
|
||||||
vsda/build/**
|
|
||||||
vsda/*.bat
|
|
||||||
vsda/*.sh
|
|
||||||
vsda/*.cpp
|
|
||||||
vsda/*.h
|
|
||||||
!vsda/build/Release/vsda.node
|
|
||||||
|
|
||||||
vscode-windows-ca-certs/**/*
|
|
||||||
!vscode-windows-ca-certs/package.json
|
|
||||||
!vscode-windows-ca-certs/**/*.node
|
|
||||||
|
|
||||||
node-addon-api/**/*
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
/*---------------------------------------------------------------------------------------------
|
|
||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
|
||||||
*--------------------------------------------------------------------------------------------*/
|
|
||||||
|
|
||||||
import * as cp from 'child_process';
|
|
||||||
import * as path from 'path';
|
|
||||||
|
|
||||||
function yarnInstall(packageName: string): void {
|
|
||||||
cp.execSync(`yarn add --no-lockfile ${packageName}`);
|
|
||||||
cp.execSync(`yarn add --no-lockfile ${packageName}`, { cwd: path.join( process.cwd(), 'remote') });
|
|
||||||
}
|
|
||||||
|
|
||||||
const product = require('../../../product.json');
|
|
||||||
const dependencies = product.dependencies || {} as { [name: string]: string; };
|
|
||||||
|
|
||||||
Object.keys(dependencies).forEach(name => {
|
|
||||||
const url = dependencies[name];
|
|
||||||
yarnInstall(url);
|
|
||||||
});
|
|
||||||
@@ -1,176 +0,0 @@
|
|||||||
/*---------------------------------------------------------------------------------------------
|
|
||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
|
||||||
*--------------------------------------------------------------------------------------------*/
|
|
||||||
|
|
||||||
'use strict';
|
|
||||||
|
|
||||||
import * as url from 'url';
|
|
||||||
import * as azure from 'azure-storage';
|
|
||||||
import * as mime from 'mime';
|
|
||||||
import { DocumentClient, RetrievedDocument } from 'documentdb';
|
|
||||||
|
|
||||||
function log(...args: any[]) {
|
|
||||||
console.log(...[`[${new Date().toISOString()}]`, ...args]);
|
|
||||||
}
|
|
||||||
|
|
||||||
function error(...args: any[]) {
|
|
||||||
console.error(...[`[${new Date().toISOString()}]`, ...args]);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (process.argv.length < 3) {
|
|
||||||
error('Usage: node sync-mooncake.js <quality>');
|
|
||||||
process.exit(-1);
|
|
||||||
}
|
|
||||||
|
|
||||||
interface Build extends RetrievedDocument {
|
|
||||||
assets: Asset[];
|
|
||||||
}
|
|
||||||
|
|
||||||
interface Asset {
|
|
||||||
platform: string;
|
|
||||||
type: string;
|
|
||||||
url: string;
|
|
||||||
mooncakeUrl: string;
|
|
||||||
hash: string;
|
|
||||||
sha256hash: string;
|
|
||||||
size: number;
|
|
||||||
supportsFastUpdate?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
function updateBuild(commit: string, quality: string, platform: string, type: string, asset: Asset): Promise<void> {
|
|
||||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
|
||||||
const collection = 'dbs/builds/colls/' + quality;
|
|
||||||
const updateQuery = {
|
|
||||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
|
||||||
parameters: [{ name: '@id', value: commit }]
|
|
||||||
};
|
|
||||||
|
|
||||||
let updateTries = 0;
|
|
||||||
|
|
||||||
function _update(): Promise<void> {
|
|
||||||
updateTries++;
|
|
||||||
|
|
||||||
return new Promise<void>((c, e) => {
|
|
||||||
client.queryDocuments(collection, updateQuery).toArray((err, results) => {
|
|
||||||
if (err) { return e(err); }
|
|
||||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
|
||||||
|
|
||||||
const release = results[0];
|
|
||||||
|
|
||||||
release.assets = [
|
|
||||||
...release.assets.filter((a: any) => !(a.platform === platform && a.type === type)),
|
|
||||||
asset
|
|
||||||
];
|
|
||||||
|
|
||||||
client.replaceDocument(release._self, release, err => {
|
|
||||||
if (err && err.code === 409 && updateTries < 5) { return c(_update()); }
|
|
||||||
if (err) { return e(err); }
|
|
||||||
|
|
||||||
log('Build successfully updated.');
|
|
||||||
c();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return _update();
|
|
||||||
}
|
|
||||||
|
|
||||||
async function sync(commit: string, quality: string): Promise<void> {
|
|
||||||
log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
|
|
||||||
|
|
||||||
const cosmosdb = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
|
||||||
const collection = `dbs/builds/colls/${quality}`;
|
|
||||||
const query = {
|
|
||||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
|
||||||
parameters: [{ name: '@id', value: commit }]
|
|
||||||
};
|
|
||||||
|
|
||||||
const build = await new Promise<Build>((c, e) => {
|
|
||||||
cosmosdb.queryDocuments(collection, query).toArray((err, results) => {
|
|
||||||
if (err) { return e(err); }
|
|
||||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
|
||||||
c(results[0] as Build);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
log(`Found build for ${commit}, with ${build.assets.length} assets`);
|
|
||||||
|
|
||||||
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2']!;
|
|
||||||
|
|
||||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2']!)
|
|
||||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
|
||||||
|
|
||||||
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY']!, `${storageAccount}.blob.core.chinacloudapi.cn`)
|
|
||||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
|
||||||
|
|
||||||
// mooncake is fussy and far away, this is needed!
|
|
||||||
blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
|
|
||||||
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
|
|
||||||
|
|
||||||
for (const asset of build.assets) {
|
|
||||||
try {
|
|
||||||
const blobPath = url.parse(asset.url).path;
|
|
||||||
|
|
||||||
if (!blobPath) {
|
|
||||||
throw new Error(`Failed to parse URL: ${asset.url}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const blobName = blobPath.replace(/^\/\w+\//, '');
|
|
||||||
|
|
||||||
log(`Found ${blobName}`);
|
|
||||||
|
|
||||||
if (asset.mooncakeUrl) {
|
|
||||||
log(` Already in Mooncake ✔️`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const readStream = blobService.createReadStream(quality, blobName, undefined!);
|
|
||||||
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
|
|
||||||
contentSettings: {
|
|
||||||
contentType: mime.lookup(blobPath),
|
|
||||||
cacheControl: 'max-age=31536000, public'
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const writeStream = mooncakeBlobService.createWriteStreamToBlockBlob(quality, blobName, blobOptions, undefined);
|
|
||||||
|
|
||||||
log(` Uploading to Mooncake...`);
|
|
||||||
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
|
|
||||||
|
|
||||||
log(` Updating build in DB...`);
|
|
||||||
asset.mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
|
||||||
await updateBuild(commit, quality, asset.platform, asset.type, asset);
|
|
||||||
|
|
||||||
log(` Done ✔️`);
|
|
||||||
} catch (err) {
|
|
||||||
error(err);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
log(`All done ✔️`);
|
|
||||||
}
|
|
||||||
|
|
||||||
function main(): void {
|
|
||||||
if (process.env['VSCODE_BUILD_SKIP_PUBLISH']) {
|
|
||||||
error('Skipping publish due to VSCODE_BUILD_SKIP_PUBLISH');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const commit = process.env['BUILD_SOURCEVERSION'];
|
|
||||||
|
|
||||||
if (!commit) {
|
|
||||||
error('Skipping publish due to missing BUILD_SOURCEVERSION');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const quality = process.argv[2];
|
|
||||||
|
|
||||||
sync(commit, quality).catch(err => {
|
|
||||||
error(err);
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
main();
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
set -e
|
|
||||||
yarn gulp vscode-darwin-min
|
|
||||||
yarn gulp vscode-reh-darwin-min
|
|
||||||
yarn gulp upload-vscode-sourcemaps
|
|
||||||
@@ -1,50 +0,0 @@
|
|||||||
steps:
|
|
||||||
- task: NodeTool@0
|
|
||||||
inputs:
|
|
||||||
versionSpec: "10.15.1"
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
|
||||||
inputs:
|
|
||||||
versionSpec: "1.10.1"
|
|
||||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
|
||||||
# inputs:
|
|
||||||
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
|
||||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
|
||||||
# vstsFeed: '$(ArtifactFeed)'
|
|
||||||
# condition: eq(variables['System.PullRequest.PullRequestId'], '')
|
|
||||||
- script: |
|
|
||||||
yarn
|
|
||||||
displayName: Install Dependencies
|
|
||||||
# condition: or(ne(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
|
|
||||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
|
||||||
# inputs:
|
|
||||||
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
|
||||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
|
||||||
# vstsFeed: '$(ArtifactFeed)'
|
|
||||||
# condition: and(succeeded(), eq(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
|
|
||||||
- script: |
|
|
||||||
yarn gulp electron-x64
|
|
||||||
displayName: Download Electron
|
|
||||||
- script: |
|
|
||||||
yarn gulp hygiene
|
|
||||||
displayName: Run Hygiene Checks
|
|
||||||
- script: |
|
|
||||||
yarn monaco-compile-check
|
|
||||||
displayName: Run Monaco Editor Checks
|
|
||||||
- script: |
|
|
||||||
yarn compile
|
|
||||||
displayName: Compile Sources
|
|
||||||
- script: |
|
|
||||||
yarn download-builtin-extensions
|
|
||||||
displayName: Download Built-in Extensions
|
|
||||||
- script: |
|
|
||||||
./scripts/test.sh --tfs "Unit Tests"
|
|
||||||
displayName: Run Unit Tests
|
|
||||||
- script: |
|
|
||||||
./scripts/test-integration.sh --tfs "Integration Tests"
|
|
||||||
displayName: Run Integration Tests
|
|
||||||
- task: PublishTestResults@2
|
|
||||||
displayName: Publish Tests Results
|
|
||||||
inputs:
|
|
||||||
testResultsFiles: '*-results.xml'
|
|
||||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
|
||||||
condition: succeededOrFailed()
|
|
||||||
@@ -1,96 +0,0 @@
|
|||||||
steps:
|
|
||||||
- task: NodeTool@0
|
|
||||||
inputs:
|
|
||||||
versionSpec: "10.15.1"
|
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
|
||||||
inputs:
|
|
||||||
versionSpec: "1.10.1"
|
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
|
||||||
inputs:
|
|
||||||
azureSubscription: 'vscode-builds-subscription'
|
|
||||||
KeyVaultName: vscode
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
|
|
||||||
cat << EOF > ~/.netrc
|
|
||||||
machine monacotools.visualstudio.com
|
|
||||||
password $(devops-pat)
|
|
||||||
machine github.com
|
|
||||||
login vscode
|
|
||||||
password $(github-distro-mixin-password)
|
|
||||||
EOF
|
|
||||||
|
|
||||||
git config user.email "vscode@microsoft.com"
|
|
||||||
git config user.name "VSCode"
|
|
||||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
|
||||||
git fetch distro
|
|
||||||
git merge $(node -p "require('./package.json').distro")
|
|
||||||
|
|
||||||
yarn
|
|
||||||
yarn gulp mixin
|
|
||||||
yarn gulp hygiene
|
|
||||||
yarn monaco-compile-check
|
|
||||||
node build/azure-pipelines/common/installDistro.js
|
|
||||||
node build/lib/builtInExtensions.js
|
|
||||||
displayName: Prepare build
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
|
||||||
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
|
||||||
./build/azure-pipelines/darwin/build.sh
|
|
||||||
displayName: Build
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
./scripts/test.sh --build --tfs "Unit Tests"
|
|
||||||
# APP_NAME="`ls $(agent.builddirectory)/VSCode-darwin | head -n 1`"
|
|
||||||
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-darwin/$APP_NAME"
|
|
||||||
displayName: Run unit tests
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
./scripts/test-integration.sh --build --tfs "Integration Tests"
|
|
||||||
displayName: Run integration tests
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
pushd ../VSCode-darwin && zip -r -X -y ../VSCode-darwin.zip * && popd
|
|
||||||
displayName: Archive build
|
|
||||||
|
|
||||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
|
||||||
inputs:
|
|
||||||
ConnectedServiceName: 'ESRP CodeSign'
|
|
||||||
FolderPath: '$(agent.builddirectory)'
|
|
||||||
Pattern: 'VSCode-darwin.zip'
|
|
||||||
signConfigType: inlineSignParams
|
|
||||||
inlineOperation: |
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"keyCode": "CP-401337-Apple",
|
|
||||||
"operationSetCode": "MacAppDeveloperSign",
|
|
||||||
"parameters": [ ],
|
|
||||||
"toolName": "sign",
|
|
||||||
"toolVersion": "1.0"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
SessionTimeout: 120
|
|
||||||
displayName: Codesign
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
|
||||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
|
||||||
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
|
||||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
|
||||||
VSCODE_HOCKEYAPP_TOKEN="$(vscode-hockeyapp-token)" \
|
|
||||||
./build/azure-pipelines/darwin/publish.sh
|
|
||||||
displayName: Publish
|
|
||||||
|
|
||||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
|
||||||
displayName: 'Component Detection'
|
|
||||||
continueOnError: true
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# remove pkg from archive
|
|
||||||
zip -d ../VSCode-darwin.zip "*.pkg"
|
|
||||||
|
|
||||||
# publish the build
|
|
||||||
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
|
|
||||||
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
|
|
||||||
node build/azure-pipelines/common/publish.js \
|
|
||||||
"$VSCODE_QUALITY" \
|
|
||||||
darwin \
|
|
||||||
archive \
|
|
||||||
"VSCode-darwin-$VSCODE_QUALITY.zip" \
|
|
||||||
$VERSION \
|
|
||||||
true \
|
|
||||||
../VSCode-darwin.zip
|
|
||||||
|
|
||||||
# package Remote Extension Host
|
|
||||||
pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
|
|
||||||
|
|
||||||
# publish Remote Extension Host
|
|
||||||
node build/azure-pipelines/common/publish.js \
|
|
||||||
"$VSCODE_QUALITY" \
|
|
||||||
server-darwin \
|
|
||||||
archive-unsigned \
|
|
||||||
"vscode-server-darwin.zip" \
|
|
||||||
$VERSION \
|
|
||||||
true \
|
|
||||||
../vscode-server-darwin.zip
|
|
||||||
|
|
||||||
# publish hockeyapp symbols
|
|
||||||
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "$VSCODE_ARCH" "$VSCODE_HOCKEYAPP_ID_MACOS"
|
|
||||||
|
|
||||||
# upload configuration
|
|
||||||
yarn gulp upload-vscode-configuration
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
trigger:
|
|
||||||
branches:
|
|
||||||
include: ['master', 'release/*']
|
|
||||||
pr:
|
|
||||||
branches:
|
|
||||||
include: ['master', 'release/*']
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- task: NodeTool@0
|
|
||||||
inputs:
|
|
||||||
versionSpec: "10.15.1"
|
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
|
||||||
inputs:
|
|
||||||
azureSubscription: 'vscode-builds-subscription'
|
|
||||||
KeyVaultName: vscode
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
|
|
||||||
cat << EOF > ~/.netrc
|
|
||||||
machine github.com
|
|
||||||
login vscode
|
|
||||||
password $(github-distro-mixin-password)
|
|
||||||
EOF
|
|
||||||
|
|
||||||
git config user.email "vscode@microsoft.com"
|
|
||||||
git config user.name "VSCode"
|
|
||||||
|
|
||||||
git remote add distro "https://github.com/$VSCODE_MIXIN_REPO.git"
|
|
||||||
git fetch distro
|
|
||||||
git push distro origin/master:refs/heads/master
|
|
||||||
git merge $(node -p "require('./package.json').distro")
|
|
||||||
|
|
||||||
displayName: Sync & Merge Distro
|
|
||||||
1
build/azure-pipelines/linux/.gitignore
vendored
1
build/azure-pipelines/linux/.gitignore
vendored
@@ -1 +0,0 @@
|
|||||||
pat
|
|
||||||
@@ -1,7 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
set -e
|
|
||||||
yarn gulp "vscode-linux-$VSCODE_ARCH-min"
|
|
||||||
|
|
||||||
if [[ "$VSCODE_ARCH" != "ia32" ]]; then
|
|
||||||
yarn gulp vscode-reh-linux-$VSCODE_ARCH-min
|
|
||||||
fi
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
steps:
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0
|
|
||||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
|
||||||
sudo chmod +x /etc/init.d/xvfb
|
|
||||||
sudo update-rc.d xvfb defaults
|
|
||||||
sudo service xvfb start
|
|
||||||
- task: NodeTool@0
|
|
||||||
inputs:
|
|
||||||
versionSpec: "10.15.1"
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
|
||||||
inputs:
|
|
||||||
versionSpec: "1.10.1"
|
|
||||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
|
||||||
# inputs:
|
|
||||||
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
|
||||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
|
||||||
# vstsFeed: '$(ArtifactFeed)'
|
|
||||||
# condition: eq(variables['System.PullRequest.PullRequestId'], '')
|
|
||||||
- script: |
|
|
||||||
yarn
|
|
||||||
displayName: Install Dependencies
|
|
||||||
# condition: or(ne(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
|
|
||||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
|
||||||
# inputs:
|
|
||||||
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
|
||||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
|
||||||
# vstsFeed: '$(ArtifactFeed)'
|
|
||||||
# condition: and(succeeded(), eq(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
|
|
||||||
- script: |
|
|
||||||
yarn gulp electron-x64
|
|
||||||
displayName: Download Electron
|
|
||||||
- script: |
|
|
||||||
yarn gulp hygiene
|
|
||||||
displayName: Run Hygiene Checks
|
|
||||||
- script: |
|
|
||||||
yarn monaco-compile-check
|
|
||||||
displayName: Run Monaco Editor Checks
|
|
||||||
- script: |
|
|
||||||
yarn compile
|
|
||||||
displayName: Compile Sources
|
|
||||||
- script: |
|
|
||||||
yarn download-builtin-extensions
|
|
||||||
displayName: Download Built-in Extensions
|
|
||||||
- script: |
|
|
||||||
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
|
|
||||||
displayName: Run Unit Tests
|
|
||||||
- task: PublishTestResults@2
|
|
||||||
displayName: Publish Tests Results
|
|
||||||
inputs:
|
|
||||||
testResultsFiles: '*-results.xml'
|
|
||||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
|
||||||
condition: succeededOrFailed()
|
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
/*---------------------------------------------------------------------------------------------
|
|
||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
|
||||||
*--------------------------------------------------------------------------------------------*/
|
|
||||||
'use strict';
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const documentdb_1 = require("documentdb");
|
|
||||||
function createDefaultConfig(quality) {
|
|
||||||
return {
|
|
||||||
id: quality,
|
|
||||||
frozen: false
|
|
||||||
};
|
|
||||||
}
|
|
||||||
function getConfig(quality) {
|
|
||||||
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
|
||||||
const collection = 'dbs/builds/colls/config';
|
|
||||||
const query = {
|
|
||||||
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
|
|
||||||
parameters: [
|
|
||||||
{ name: '@quality', value: quality }
|
|
||||||
]
|
|
||||||
};
|
|
||||||
return new Promise((c, e) => {
|
|
||||||
client.queryDocuments(collection, query).toArray((err, results) => {
|
|
||||||
if (err && err.code !== 409) {
|
|
||||||
return e(err);
|
|
||||||
}
|
|
||||||
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
getConfig(process.argv[2])
|
|
||||||
.then(config => {
|
|
||||||
console.log(config.frozen);
|
|
||||||
process.exit(0);
|
|
||||||
})
|
|
||||||
.catch(err => {
|
|
||||||
console.error(err);
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
@@ -1,79 +0,0 @@
|
|||||||
steps:
|
|
||||||
- task: NodeTool@0
|
|
||||||
inputs:
|
|
||||||
versionSpec: "10.15.1"
|
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
|
||||||
inputs:
|
|
||||||
versionSpec: "1.10.1"
|
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
|
||||||
inputs:
|
|
||||||
azureSubscription: 'vscode-builds-subscription'
|
|
||||||
KeyVaultName: vscode
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
export npm_config_arch="$(VSCODE_ARCH)"
|
|
||||||
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
|
|
||||||
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
|
|
||||||
fi
|
|
||||||
|
|
||||||
cat << EOF > ~/.netrc
|
|
||||||
machine monacotools.visualstudio.com
|
|
||||||
password $(devops-pat)
|
|
||||||
machine github.com
|
|
||||||
login vscode
|
|
||||||
password $(github-distro-mixin-password)
|
|
||||||
EOF
|
|
||||||
|
|
||||||
git config user.email "vscode@microsoft.com"
|
|
||||||
git config user.name "VSCode"
|
|
||||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
|
||||||
git fetch distro
|
|
||||||
git merge $(node -p "require('./package.json').distro")
|
|
||||||
|
|
||||||
CHILD_CONCURRENCY=1 yarn
|
|
||||||
yarn gulp mixin
|
|
||||||
yarn gulp hygiene
|
|
||||||
yarn monaco-compile-check
|
|
||||||
node build/azure-pipelines/common/installDistro.js
|
|
||||||
node build/lib/builtInExtensions.js
|
|
||||||
displayName: Prepare build
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
|
||||||
./build/azure-pipelines/linux/build.sh
|
|
||||||
displayName: Build
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
yarn gulp "electron-$(VSCODE_ARCH)"
|
|
||||||
|
|
||||||
# xvfb seems to be crashing often, let's make sure it's always up
|
|
||||||
service xvfb start
|
|
||||||
|
|
||||||
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
|
|
||||||
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)"
|
|
||||||
displayName: Run unit tests
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
|
||||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
|
||||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
|
||||||
VSCODE_HOCKEYAPP_TOKEN="$(vscode-hockeyapp-token)" \
|
|
||||||
./build/azure-pipelines/linux/publish.sh
|
|
||||||
displayName: Publish
|
|
||||||
|
|
||||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
|
||||||
displayName: 'Component Detection'
|
|
||||||
continueOnError: true
|
|
||||||
|
|
||||||
- task: PublishPipelineArtifact@0
|
|
||||||
displayName: 'Publish Pipeline Artifact'
|
|
||||||
inputs:
|
|
||||||
artifactName: snap-$(VSCODE_ARCH)
|
|
||||||
targetPath: .build/linux/snap-tarball
|
|
||||||
@@ -1,64 +0,0 @@
|
|||||||
#!/usr/bin/env bash
|
|
||||||
set -e
|
|
||||||
REPO="$(pwd)"
|
|
||||||
ROOT="$REPO/.."
|
|
||||||
|
|
||||||
# Publish tarball
|
|
||||||
PLATFORM_LINUX="linux-$VSCODE_ARCH"
|
|
||||||
[[ "$VSCODE_ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
|
|
||||||
[[ "$VSCODE_ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
|
|
||||||
BUILDNAME="VSCode-$PLATFORM_LINUX"
|
|
||||||
BUILD="$ROOT/$BUILDNAME"
|
|
||||||
BUILD_VERSION="$(date +%s)"
|
|
||||||
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
|
|
||||||
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
|
|
||||||
PACKAGEJSON="$BUILD/resources/app/package.json"
|
|
||||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
|
||||||
|
|
||||||
rm -rf $ROOT/code-*.tar.*
|
|
||||||
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
|
||||||
|
|
||||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$VERSION" true "$TARBALL_PATH"
|
|
||||||
|
|
||||||
# Publish Remote Extension Host
|
|
||||||
if [[ "$VSCODE_ARCH" != "ia32" ]]; then
|
|
||||||
LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
|
|
||||||
SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX"
|
|
||||||
SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX.tar.gz"
|
|
||||||
SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
|
|
||||||
|
|
||||||
rm -rf $ROOT/vscode-server-*.tar.*
|
|
||||||
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
|
||||||
|
|
||||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$VERSION" true "$SERVER_TARBALL_PATH"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Publish hockeyapp symbols
|
|
||||||
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "$VSCODE_ARCH" "$VSCODE_HOCKEYAPP_ID_LINUX64"
|
|
||||||
|
|
||||||
# Publish DEB
|
|
||||||
yarn gulp "vscode-linux-$VSCODE_ARCH-build-deb"
|
|
||||||
PLATFORM_DEB="linux-deb-$VSCODE_ARCH"
|
|
||||||
[[ "$VSCODE_ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
|
|
||||||
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
|
|
||||||
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
|
|
||||||
|
|
||||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_DEB" package "$DEB_FILENAME" "$VERSION" true "$DEB_PATH"
|
|
||||||
|
|
||||||
# Publish RPM
|
|
||||||
yarn gulp "vscode-linux-$VSCODE_ARCH-build-rpm"
|
|
||||||
PLATFORM_RPM="linux-rpm-$VSCODE_ARCH"
|
|
||||||
[[ "$VSCODE_ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
|
|
||||||
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
|
|
||||||
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
|
|
||||||
|
|
||||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_RPM" package "$RPM_FILENAME" "$VERSION" true "$RPM_PATH"
|
|
||||||
|
|
||||||
# Publish Snap
|
|
||||||
yarn gulp "vscode-linux-$VSCODE_ARCH-prepare-snap"
|
|
||||||
|
|
||||||
# Pack snap tarball artifact, in order to preserve file perms
|
|
||||||
mkdir -p $REPO/.build/linux/snap-tarball
|
|
||||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$VSCODE_ARCH.tar.gz"
|
|
||||||
rm -rf $SNAP_TARBALL_PATH
|
|
||||||
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)
|
|
||||||
@@ -1,55 +0,0 @@
|
|||||||
steps:
|
|
||||||
- task: NodeTool@0
|
|
||||||
inputs:
|
|
||||||
versionSpec: "10.15.1"
|
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
|
||||||
inputs:
|
|
||||||
versionSpec: "1.10.1"
|
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
|
||||||
inputs:
|
|
||||||
azureSubscription: 'vscode-builds-subscription'
|
|
||||||
KeyVaultName: vscode
|
|
||||||
|
|
||||||
- task: DownloadPipelineArtifact@0
|
|
||||||
displayName: 'Download Pipeline Artifact'
|
|
||||||
inputs:
|
|
||||||
artifactName: snap-$(VSCODE_ARCH)
|
|
||||||
targetPath: .build/linux/snap-tarball
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# Get snapcraft version
|
|
||||||
snapcraft --version
|
|
||||||
|
|
||||||
# Make sure we get latest packages
|
|
||||||
sudo apt-get update
|
|
||||||
sudo apt-get upgrade -y
|
|
||||||
|
|
||||||
# Define variables
|
|
||||||
REPO="$(pwd)"
|
|
||||||
ARCH="$(VSCODE_ARCH)"
|
|
||||||
SNAP_ROOT="$REPO/.build/linux/snap/$ARCH"
|
|
||||||
|
|
||||||
# Install build dependencies
|
|
||||||
(cd build && yarn)
|
|
||||||
|
|
||||||
# Unpack snap tarball artifact, in order to preserve file perms
|
|
||||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$ARCH.tar.gz"
|
|
||||||
(cd .build/linux && tar -xzf $SNAP_TARBALL_PATH)
|
|
||||||
|
|
||||||
# Create snap package
|
|
||||||
BUILD_VERSION="$(date +%s)"
|
|
||||||
SNAP_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.snap"
|
|
||||||
PACKAGEJSON="$(ls $SNAP_ROOT/code*/usr/share/code*/resources/app/package.json)"
|
|
||||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
|
||||||
SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME"
|
|
||||||
(cd $SNAP_ROOT/code-* && sudo snapcraft snap --output "$SNAP_PATH")
|
|
||||||
|
|
||||||
# Publish snap package
|
|
||||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
|
||||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
|
||||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-$ARCH" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"
|
|
||||||
@@ -1,81 +0,0 @@
|
|||||||
resources:
|
|
||||||
containers:
|
|
||||||
- container: vscode-x64
|
|
||||||
endpoint: VSCodeHub
|
|
||||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:x64
|
|
||||||
- container: vscode-ia32
|
|
||||||
endpoint: VSCodeHub
|
|
||||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:ia32
|
|
||||||
- container: snapcraft
|
|
||||||
image: snapcore/snapcraft
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
- job: Windows
|
|
||||||
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
|
|
||||||
pool:
|
|
||||||
vmImage: VS2017-Win2016
|
|
||||||
variables:
|
|
||||||
VSCODE_ARCH: x64
|
|
||||||
steps:
|
|
||||||
- template: win32/product-build-win32.yml
|
|
||||||
|
|
||||||
- job: Windows32
|
|
||||||
condition: eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true')
|
|
||||||
pool:
|
|
||||||
vmImage: VS2017-Win2016
|
|
||||||
variables:
|
|
||||||
VSCODE_ARCH: ia32
|
|
||||||
steps:
|
|
||||||
- template: win32/product-build-win32.yml
|
|
||||||
|
|
||||||
- job: Linux
|
|
||||||
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
|
|
||||||
pool:
|
|
||||||
vmImage: 'Ubuntu-16.04'
|
|
||||||
variables:
|
|
||||||
VSCODE_ARCH: x64
|
|
||||||
container: vscode-x64
|
|
||||||
steps:
|
|
||||||
- template: linux/product-build-linux.yml
|
|
||||||
|
|
||||||
- job: LinuxSnap
|
|
||||||
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
|
|
||||||
pool:
|
|
||||||
vmImage: 'Ubuntu-16.04'
|
|
||||||
variables:
|
|
||||||
VSCODE_ARCH: x64
|
|
||||||
container: snapcraft
|
|
||||||
dependsOn: Linux
|
|
||||||
steps:
|
|
||||||
- template: linux/snap-build-linux.yml
|
|
||||||
|
|
||||||
- job: Linux32
|
|
||||||
condition: eq(variables['VSCODE_BUILD_LINUX_32BIT'], 'true')
|
|
||||||
pool:
|
|
||||||
vmImage: 'Ubuntu-16.04'
|
|
||||||
variables:
|
|
||||||
VSCODE_ARCH: ia32
|
|
||||||
container: vscode-ia32
|
|
||||||
steps:
|
|
||||||
- template: linux/product-build-linux.yml
|
|
||||||
|
|
||||||
- job: macOS
|
|
||||||
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
|
|
||||||
pool:
|
|
||||||
vmImage: macOS 10.13
|
|
||||||
steps:
|
|
||||||
- template: darwin/product-build-darwin.yml
|
|
||||||
|
|
||||||
- job: Mooncake
|
|
||||||
pool:
|
|
||||||
vmImage: 'Ubuntu-16.04'
|
|
||||||
condition: true
|
|
||||||
dependsOn:
|
|
||||||
- Windows
|
|
||||||
- Windows32
|
|
||||||
- Linux
|
|
||||||
- LinuxSnap
|
|
||||||
- Linux32
|
|
||||||
- macOS
|
|
||||||
steps:
|
|
||||||
- template: sync-mooncake.yml
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
node_modules/
|
|
||||||
*.js
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
/*---------------------------------------------------------------------------------------------
|
|
||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
|
||||||
*--------------------------------------------------------------------------------------------*/
|
|
||||||
'use strict';
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const cp = require("child_process");
|
|
||||||
let tag = '';
|
|
||||||
try {
|
|
||||||
tag = cp
|
|
||||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
|
||||||
.toString()
|
|
||||||
.trim();
|
|
||||||
if (!isValidTag(tag)) {
|
|
||||||
throw Error(`Invalid tag ${tag}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
console.error(err);
|
|
||||||
console.error('Failed to update types');
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
function isValidTag(t) {
|
|
||||||
if (t.split('.').length !== 3) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
const [major, minor, bug] = t.split('.');
|
|
||||||
// Only release for tags like 1.34.0
|
|
||||||
if (bug !== '0') {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
/*---------------------------------------------------------------------------------------------
|
|
||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
|
||||||
*--------------------------------------------------------------------------------------------*/
|
|
||||||
|
|
||||||
'use strict';
|
|
||||||
|
|
||||||
import * as cp from 'child_process';
|
|
||||||
|
|
||||||
let tag = '';
|
|
||||||
try {
|
|
||||||
tag = cp
|
|
||||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
|
||||||
.toString()
|
|
||||||
.trim();
|
|
||||||
|
|
||||||
if (!isValidTag(tag)) {
|
|
||||||
throw Error(`Invalid tag ${tag}`);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
console.error(err);
|
|
||||||
console.error('Failed to update types');
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
function isValidTag(t: string) {
|
|
||||||
if (t.split('.').length !== 3) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
const [major, minor, bug] = t.split('.');
|
|
||||||
|
|
||||||
// Only release for tags like 1.34.0
|
|
||||||
if (bug !== '0') {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
# Publish @types/vscode for each release
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
branches:
|
|
||||||
include: ['refs/tags/*']
|
|
||||||
|
|
||||||
pr: none
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- task: NodeTool@0
|
|
||||||
inputs:
|
|
||||||
versionSpec: "10.15.1"
|
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
|
||||||
inputs:
|
|
||||||
versionSpec: "1.10.1"
|
|
||||||
|
|
||||||
- bash: |
|
|
||||||
# Install build dependencies
|
|
||||||
(cd build && yarn)
|
|
||||||
node build/azure-pipelines/publish-types/check-version.js
|
|
||||||
displayName: Check version
|
|
||||||
|
|
||||||
- bash: |
|
|
||||||
git config --global user.email "vscode@microsoft.com"
|
|
||||||
git config --global user.name "VSCode"
|
|
||||||
|
|
||||||
git clone https://$(GITHUB_TOKEN)@github.com/DefinitelyTyped/DefinitelyTyped.git --depth=1
|
|
||||||
node build/azure-pipelines/publish-types/update-types.js
|
|
||||||
|
|
||||||
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
|
|
||||||
|
|
||||||
cd DefinitelyTyped
|
|
||||||
|
|
||||||
git diff --color | cat
|
|
||||||
git add -A
|
|
||||||
git status
|
|
||||||
git checkout -b "vscode-types-$TAG_VERSION"
|
|
||||||
git commit -m "VS Code $TAG_VERSION Extension API"
|
|
||||||
git push origin "vscode-types-$TAG_VERSION"
|
|
||||||
|
|
||||||
displayName: Push update to DefinitelyTyped
|
|
||||||
|
|
||||||
- bash: |
|
|
||||||
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
|
|
||||||
CHANNEL="G1C14HJ2F"
|
|
||||||
|
|
||||||
MESSAGE="DefinitelyTyped/DefinitelyTyped#vscode-types-$TAG_VERSION created. Endgame master, please open this link, examine changes and create a PR:"
|
|
||||||
LINK="https://github.com/DefinitelyTyped/DefinitelyTyped/compare/vscode-types-$TAG_VERSION?quick_pull=1&body=Updating%20VS%20Code%20Extension%20API.%20See%20https%3A%2F%2Fgithub.com%2Fmicrosoft%2Fvscode%2Fissues%2F70175%20for%20details."
|
|
||||||
MESSAGE2="[@octref, @jrieken, @kmaetzel, @egamma]. Please review and merge PR to publish @types/vscode."
|
|
||||||
|
|
||||||
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
|
|
||||||
-H 'Content-type: application/json; charset=utf-8' \
|
|
||||||
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \
|
|
||||||
https://slack.com/api/chat.postMessage
|
|
||||||
|
|
||||||
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
|
|
||||||
-H 'Content-type: application/json; charset=utf-8' \
|
|
||||||
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$LINK"'"}' \
|
|
||||||
https://slack.com/api/chat.postMessage
|
|
||||||
|
|
||||||
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
|
|
||||||
-H 'Content-type: application/json; charset=utf-8' \
|
|
||||||
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE2"'"}' \
|
|
||||||
https://slack.com/api/chat.postMessage
|
|
||||||
|
|
||||||
displayName: Send message on Slack
|
|
||||||
@@ -1,62 +0,0 @@
|
|||||||
/*---------------------------------------------------------------------------------------------
|
|
||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
|
||||||
*--------------------------------------------------------------------------------------------*/
|
|
||||||
'use strict';
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const fs = require("fs");
|
|
||||||
const cp = require("child_process");
|
|
||||||
const path = require("path");
|
|
||||||
let tag = '';
|
|
||||||
try {
|
|
||||||
tag = cp
|
|
||||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
|
||||||
.toString()
|
|
||||||
.trim();
|
|
||||||
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
|
|
||||||
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
|
|
||||||
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
|
|
||||||
updateDTSFile(outPath, tag);
|
|
||||||
console.log(`Done updating vscode.d.ts at ${outPath}`);
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
console.error(err);
|
|
||||||
console.error('Failed to update types');
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
function updateDTSFile(outPath, tag) {
|
|
||||||
const oldContent = fs.readFileSync(outPath, 'utf-8');
|
|
||||||
const newContent = getNewFileContent(oldContent, tag);
|
|
||||||
fs.writeFileSync(outPath, newContent);
|
|
||||||
}
|
|
||||||
function getNewFileContent(content, tag) {
|
|
||||||
const oldheader = [
|
|
||||||
`/*---------------------------------------------------------------------------------------------`,
|
|
||||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
|
||||||
` * Licensed under the Source EULA. See License.txt in the project root for license information.`,
|
|
||||||
` *--------------------------------------------------------------------------------------------*/`
|
|
||||||
].join('\n');
|
|
||||||
return getNewFileHeader(tag) + content.slice(oldheader.length);
|
|
||||||
}
|
|
||||||
function getNewFileHeader(tag) {
|
|
||||||
const [major, minor] = tag.split('.');
|
|
||||||
const shorttag = `${major}.${minor}`;
|
|
||||||
const header = [
|
|
||||||
`// Type definitions for Visual Studio Code ${shorttag}`,
|
|
||||||
`// Project: https://github.com/microsoft/vscode`,
|
|
||||||
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/Microsoft>`,
|
|
||||||
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
|
|
||||||
``,
|
|
||||||
`/*---------------------------------------------------------------------------------------------`,
|
|
||||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
|
||||||
` * Licensed under the Source EULA.`,
|
|
||||||
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
|
|
||||||
` *--------------------------------------------------------------------------------------------*/`,
|
|
||||||
``,
|
|
||||||
`/**`,
|
|
||||||
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
|
|
||||||
` * See https://code.visualstudio.com/api for more information`,
|
|
||||||
` */`
|
|
||||||
].join('\n');
|
|
||||||
return header;
|
|
||||||
}
|
|
||||||
@@ -1,73 +0,0 @@
|
|||||||
/*---------------------------------------------------------------------------------------------
|
|
||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
|
||||||
*--------------------------------------------------------------------------------------------*/
|
|
||||||
|
|
||||||
'use strict';
|
|
||||||
|
|
||||||
import * as fs from 'fs';
|
|
||||||
import * as cp from 'child_process';
|
|
||||||
import * as path from 'path';
|
|
||||||
|
|
||||||
let tag = '';
|
|
||||||
try {
|
|
||||||
tag = cp
|
|
||||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
|
||||||
.toString()
|
|
||||||
.trim();
|
|
||||||
|
|
||||||
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
|
|
||||||
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
|
|
||||||
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
|
|
||||||
|
|
||||||
updateDTSFile(outPath, tag);
|
|
||||||
|
|
||||||
console.log(`Done updating vscode.d.ts at ${outPath}`);
|
|
||||||
} catch (err) {
|
|
||||||
console.error(err);
|
|
||||||
console.error('Failed to update types');
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
function updateDTSFile(outPath: string, tag: string) {
|
|
||||||
const oldContent = fs.readFileSync(outPath, 'utf-8');
|
|
||||||
const newContent = getNewFileContent(oldContent, tag);
|
|
||||||
|
|
||||||
fs.writeFileSync(outPath, newContent);
|
|
||||||
}
|
|
||||||
|
|
||||||
function getNewFileContent(content: string, tag: string) {
|
|
||||||
const oldheader = [
|
|
||||||
`/*---------------------------------------------------------------------------------------------`,
|
|
||||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
|
||||||
` * Licensed under the Source EULA. See License.txt in the project root for license information.`,
|
|
||||||
` *--------------------------------------------------------------------------------------------*/`
|
|
||||||
].join('\n');
|
|
||||||
|
|
||||||
return getNewFileHeader(tag) + content.slice(oldheader.length);
|
|
||||||
}
|
|
||||||
|
|
||||||
function getNewFileHeader(tag: string) {
|
|
||||||
const [major, minor] = tag.split('.');
|
|
||||||
const shorttag = `${major}.${minor}`;
|
|
||||||
|
|
||||||
const header = [
|
|
||||||
`// Type definitions for Visual Studio Code ${shorttag}`,
|
|
||||||
`// Project: https://github.com/microsoft/vscode`,
|
|
||||||
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/Microsoft>`,
|
|
||||||
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
|
|
||||||
``,
|
|
||||||
`/*---------------------------------------------------------------------------------------------`,
|
|
||||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
|
||||||
` * Licensed under the Source EULA.`,
|
|
||||||
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
|
|
||||||
` *--------------------------------------------------------------------------------------------*/`,
|
|
||||||
``,
|
|
||||||
`/**`,
|
|
||||||
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
|
|
||||||
` * See https://code.visualstudio.com/api for more information`,
|
|
||||||
` */`
|
|
||||||
].join('\n');
|
|
||||||
|
|
||||||
return header;
|
|
||||||
}
|
|
||||||
@@ -1,24 +0,0 @@
|
|||||||
steps:
|
|
||||||
- task: NodeTool@0
|
|
||||||
inputs:
|
|
||||||
versionSpec: "10.15.1"
|
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
|
||||||
inputs:
|
|
||||||
versionSpec: "1.10.1"
|
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
|
||||||
inputs:
|
|
||||||
azureSubscription: 'vscode-builds-subscription'
|
|
||||||
KeyVaultName: vscode
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
set -e
|
|
||||||
|
|
||||||
(cd build ; yarn)
|
|
||||||
|
|
||||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
|
||||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
|
||||||
MOONCAKE_STORAGE_ACCESS_KEY="$(vscode-mooncake-storage-key)" \
|
|
||||||
node build/azure-pipelines/common/sync-mooncake.js "$VSCODE_QUALITY"
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
. build/azure-pipelines/win32/exec.ps1
|
|
||||||
$ErrorActionPreference = "Stop"
|
|
||||||
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-min" }
|
|
||||||
exec { yarn gulp "vscode-reh-win32-$env:VSCODE_ARCH-min" }
|
|
||||||
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-inno-updater" }
|
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
steps:
|
|
||||||
- task: NodeTool@0
|
|
||||||
inputs:
|
|
||||||
versionSpec: "10.15.1"
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
|
||||||
inputs:
|
|
||||||
versionSpec: "1.10.1"
|
|
||||||
- task: UsePythonVersion@0
|
|
||||||
inputs:
|
|
||||||
versionSpec: '2.x'
|
|
||||||
addToPath: true
|
|
||||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
|
||||||
# inputs:
|
|
||||||
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
|
||||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
|
||||||
# vstsFeed: '$(ArtifactFeed)'
|
|
||||||
# condition: eq(variables['System.PullRequest.PullRequestId'], '')
|
|
||||||
- powershell: |
|
|
||||||
yarn
|
|
||||||
displayName: Install Dependencies
|
|
||||||
# condition: or(ne(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
|
|
||||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
|
||||||
# inputs:
|
|
||||||
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
|
||||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
|
||||||
# vstsFeed: '$(ArtifactFeed)'
|
|
||||||
# condition: and(succeeded(), eq(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
|
|
||||||
- powershell: |
|
|
||||||
yarn gulp electron
|
|
||||||
displayName: Download Electron
|
|
||||||
- powershell: |
|
|
||||||
yarn gulp hygiene
|
|
||||||
displayName: Run Hygiene Checks
|
|
||||||
- powershell: |
|
|
||||||
yarn monaco-compile-check
|
|
||||||
displayName: Run Monaco Editor Checks
|
|
||||||
- powershell: |
|
|
||||||
yarn compile
|
|
||||||
displayName: Compile Sources
|
|
||||||
- powershell: |
|
|
||||||
yarn download-builtin-extensions
|
|
||||||
displayName: Download Built-in Extensions
|
|
||||||
- powershell: |
|
|
||||||
.\scripts\test.bat --tfs "Unit Tests"
|
|
||||||
displayName: Run Unit Tests
|
|
||||||
- powershell: |
|
|
||||||
.\scripts\test-integration.bat --tfs "Integration Tests"
|
|
||||||
displayName: Run Integration Tests
|
|
||||||
- task: PublishTestResults@2
|
|
||||||
displayName: Publish Tests Results
|
|
||||||
inputs:
|
|
||||||
testResultsFiles: '*-results.xml'
|
|
||||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
|
||||||
condition: succeededOrFailed()
|
|
||||||
@@ -1,149 +0,0 @@
|
|||||||
steps:
|
|
||||||
- task: NodeTool@0
|
|
||||||
inputs:
|
|
||||||
versionSpec: "10.15.1"
|
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
|
||||||
inputs:
|
|
||||||
versionSpec: "1.10.1"
|
|
||||||
|
|
||||||
- task: UsePythonVersion@0
|
|
||||||
inputs:
|
|
||||||
versionSpec: '2.x'
|
|
||||||
addToPath: true
|
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
|
||||||
inputs:
|
|
||||||
azureSubscription: 'vscode-builds-subscription'
|
|
||||||
KeyVaultName: vscode
|
|
||||||
|
|
||||||
- powershell: |
|
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
|
||||||
$ErrorActionPreference = "Stop"
|
|
||||||
"machine monacotools.visualstudio.com`npassword $(devops-pat)`nmachine github.com`nlogin vscode`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
|
|
||||||
$env:npm_config_arch="$(VSCODE_ARCH)"
|
|
||||||
$env:CHILD_CONCURRENCY="1"
|
|
||||||
|
|
||||||
exec { git config user.email "vscode@microsoft.com" }
|
|
||||||
exec { git config user.name "VSCode" }
|
|
||||||
exec { git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git" }
|
|
||||||
exec { git fetch distro }
|
|
||||||
exec { git merge $(node -p "require('./package.json').distro") }
|
|
||||||
|
|
||||||
exec { yarn }
|
|
||||||
exec { yarn gulp mixin }
|
|
||||||
exec { yarn gulp hygiene }
|
|
||||||
exec { yarn monaco-compile-check }
|
|
||||||
exec { node build/azure-pipelines/common/installDistro.js }
|
|
||||||
exec { node build/lib/builtInExtensions.js }
|
|
||||||
displayName: Prepare build
|
|
||||||
|
|
||||||
- powershell: |
|
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
|
||||||
$ErrorActionPreference = "Stop"
|
|
||||||
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
|
|
||||||
.\build\azure-pipelines\win32\build.ps1
|
|
||||||
displayName: Build
|
|
||||||
|
|
||||||
- powershell: |
|
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
|
||||||
$ErrorActionPreference = "Stop"
|
|
||||||
exec { yarn gulp "electron-$(VSCODE_ARCH)" }
|
|
||||||
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
|
|
||||||
# yarn smoketest -- --build "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
|
|
||||||
displayName: Run unit tests
|
|
||||||
|
|
||||||
- powershell: |
|
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
|
||||||
$ErrorActionPreference = "Stop"
|
|
||||||
exec { yarn gulp "electron-$(VSCODE_ARCH)" }
|
|
||||||
exec { .\scripts\test-integration.bat --build --tfs "Integration Tests" }
|
|
||||||
displayName: Run integration tests
|
|
||||||
|
|
||||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
|
||||||
inputs:
|
|
||||||
ConnectedServiceName: 'ESRP CodeSign'
|
|
||||||
FolderPath: '$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH),$(agent.builddirectory)/vscode-reh-win32-$(VSCODE_ARCH)'
|
|
||||||
Pattern: '*.dll,*.exe,*.node'
|
|
||||||
signConfigType: inlineSignParams
|
|
||||||
inlineOperation: |
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"keyCode": "CP-230012",
|
|
||||||
"operationSetCode": "SigntoolSign",
|
|
||||||
"parameters": [
|
|
||||||
{
|
|
||||||
"parameterName": "OpusName",
|
|
||||||
"parameterValue": "VS Code"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"parameterName": "OpusInfo",
|
|
||||||
"parameterValue": "https://code.visualstudio.com/"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"parameterName": "Append",
|
|
||||||
"parameterValue": "/as"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"parameterName": "FileDigest",
|
|
||||||
"parameterValue": "/fd \"SHA256\""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"parameterName": "PageHash",
|
|
||||||
"parameterValue": "/NPH"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"parameterName": "TimeStamp",
|
|
||||||
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"toolName": "sign",
|
|
||||||
"toolVersion": "1.0"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"keyCode": "CP-230012",
|
|
||||||
"operationSetCode": "SigntoolVerify",
|
|
||||||
"parameters": [
|
|
||||||
{
|
|
||||||
"parameterName": "VerifyAll",
|
|
||||||
"parameterValue": "/all"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"toolName": "sign",
|
|
||||||
"toolVersion": "1.0"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
SessionTimeout: 120
|
|
||||||
|
|
||||||
- task: NuGetCommand@2
|
|
||||||
displayName: Install ESRPClient.exe
|
|
||||||
inputs:
|
|
||||||
restoreSolution: 'build\azure-pipelines\win32\ESRPClient\packages.config'
|
|
||||||
feedsToUse: config
|
|
||||||
nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config'
|
|
||||||
externalFeedCredentials: 3fc0b7f7-da09-4ae7-a9c8-d69824b1819b
|
|
||||||
restoreDirectory: packages
|
|
||||||
|
|
||||||
- task: ESRPImportCertTask@1
|
|
||||||
displayName: Import ESRP Request Signing Certificate
|
|
||||||
inputs:
|
|
||||||
ESRP: 'ESRP CodeSign'
|
|
||||||
|
|
||||||
- powershell: |
|
|
||||||
$ErrorActionPreference = "Stop"
|
|
||||||
.\build\azure-pipelines\win32\import-esrp-auth-cert.ps1 -AuthCertificateBase64 $(esrp-auth-certificate) -AuthCertificateKey $(esrp-auth-certificate-key)
|
|
||||||
displayName: Import ESRP Auth Certificate
|
|
||||||
|
|
||||||
- powershell: |
|
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
|
||||||
$ErrorActionPreference = "Stop"
|
|
||||||
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(vscode-storage-key)"
|
|
||||||
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(builds-docdb-key-readwrite)"
|
|
||||||
$env:VSCODE_HOCKEYAPP_TOKEN = "$(vscode-hockeyapp-token)"
|
|
||||||
.\build\azure-pipelines\win32\publish.ps1
|
|
||||||
displayName: Publish
|
|
||||||
|
|
||||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
|
||||||
displayName: 'Component Detection'
|
|
||||||
continueOnError: true
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
. build/azure-pipelines/win32/exec.ps1
|
|
||||||
$ErrorActionPreference = "Stop"
|
|
||||||
|
|
||||||
$Arch = "$env:VSCODE_ARCH"
|
|
||||||
|
|
||||||
exec { yarn gulp "vscode-win32-$Arch-archive" "vscode-win32-$Arch-system-setup" "vscode-win32-$Arch-user-setup" --sign }
|
|
||||||
|
|
||||||
$Repo = "$(pwd)"
|
|
||||||
$Root = "$Repo\.."
|
|
||||||
$SystemExe = "$Repo\.build\win32-$Arch\system-setup\VSCodeSetup.exe"
|
|
||||||
$UserExe = "$Repo\.build\win32-$Arch\user-setup\VSCodeSetup.exe"
|
|
||||||
$Zip = "$Repo\.build\win32-$Arch\archive\VSCode-win32-$Arch.zip"
|
|
||||||
$LegacyServer = "$Root\vscode-reh-win32-$Arch"
|
|
||||||
$ServerName = "vscode-server-win32-$Arch"
|
|
||||||
$Server = "$Root\$ServerName"
|
|
||||||
$ServerZip = "$Repo\.build\vscode-server-win32-$Arch.zip"
|
|
||||||
$Build = "$Root\VSCode-win32-$Arch"
|
|
||||||
|
|
||||||
# Create server archive
|
|
||||||
exec { Rename-Item -Path $LegacyServer -NewName $ServerName }
|
|
||||||
exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r }
|
|
||||||
|
|
||||||
# get version
|
|
||||||
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
|
|
||||||
$Version = $PackageJson.version
|
|
||||||
$Quality = "$env:VSCODE_QUALITY"
|
|
||||||
|
|
||||||
$AssetPlatform = if ("$Arch" -eq "ia32") { "win32" } else { "win32-x64" }
|
|
||||||
|
|
||||||
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform-archive" archive "VSCode-win32-$Arch-$Version.zip" $Version true $Zip }
|
|
||||||
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform" setup "VSCodeSetup-$Arch-$Version.exe" $Version true $SystemExe }
|
|
||||||
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform-user" setup "VSCodeUserSetup-$Arch-$Version.exe" $Version true $UserExe }
|
|
||||||
exec { node build/azure-pipelines/common/publish.js $Quality "server-$AssetPlatform" archive "vscode-server-win32-$Arch.zip" $Version true $ServerZip }
|
|
||||||
|
|
||||||
# publish hockeyapp symbols
|
|
||||||
$hockeyAppId = if ("$Arch" -eq "ia32") { "$env:VSCODE_HOCKEYAPP_ID_WIN32" } else { "$env:VSCODE_HOCKEYAPP_ID_WIN64" }
|
|
||||||
exec { node build/azure-pipelines/common/symbols.js "$env:VSCODE_MIXIN_PASSWORD" "$env:VSCODE_HOCKEYAPP_TOKEN" "$Arch" $hockeyAppId }
|
|
||||||
@@ -1,70 +0,0 @@
|
|||||||
function Create-TmpJson($Obj) {
|
|
||||||
$FileName = [System.IO.Path]::GetTempFileName()
|
|
||||||
ConvertTo-Json -Depth 100 $Obj | Out-File -Encoding UTF8 $FileName
|
|
||||||
return $FileName
|
|
||||||
}
|
|
||||||
|
|
||||||
$Auth = Create-TmpJson @{
|
|
||||||
Version = "1.0.0"
|
|
||||||
AuthenticationType = "AAD_CERT"
|
|
||||||
ClientId = $env:ESRPClientId
|
|
||||||
AuthCert = @{
|
|
||||||
SubjectName = $env:ESRPAuthCertificateSubjectName
|
|
||||||
StoreLocation = "LocalMachine"
|
|
||||||
StoreName = "My"
|
|
||||||
}
|
|
||||||
RequestSigningCert = @{
|
|
||||||
SubjectName = $env:ESRPCertificateSubjectName
|
|
||||||
StoreLocation = "LocalMachine"
|
|
||||||
StoreName = "My"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
$Policy = Create-TmpJson @{
|
|
||||||
Version = "1.0.0"
|
|
||||||
}
|
|
||||||
|
|
||||||
$Input = Create-TmpJson @{
|
|
||||||
Version = "1.0.0"
|
|
||||||
SignBatches = @(
|
|
||||||
@{
|
|
||||||
SourceLocationType = "UNC"
|
|
||||||
SignRequestFiles = @(
|
|
||||||
@{
|
|
||||||
SourceLocation = $args[0]
|
|
||||||
}
|
|
||||||
)
|
|
||||||
SigningInfo = @{
|
|
||||||
Operations = @(
|
|
||||||
@{
|
|
||||||
KeyCode = "CP-230012"
|
|
||||||
OperationCode = "SigntoolSign"
|
|
||||||
Parameters = @{
|
|
||||||
OpusName = "VS Code"
|
|
||||||
OpusInfo = "https://code.visualstudio.com/"
|
|
||||||
Append = "/as"
|
|
||||||
FileDigest = "/fd `"SHA256`""
|
|
||||||
PageHash = "/NPH"
|
|
||||||
TimeStamp = "/tr `"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer`" /td sha256"
|
|
||||||
}
|
|
||||||
ToolName = "sign"
|
|
||||||
ToolVersion = "1.0"
|
|
||||||
},
|
|
||||||
@{
|
|
||||||
KeyCode = "CP-230012"
|
|
||||||
OperationCode = "SigntoolVerify"
|
|
||||||
Parameters = @{
|
|
||||||
VerifyAll = "/all"
|
|
||||||
}
|
|
||||||
ToolName = "sign"
|
|
||||||
ToolVersion = "1.0"
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
$Output = [System.IO.Path]::GetTempFileName()
|
|
||||||
$ScriptPath = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent
|
|
||||||
& "$ScriptPath\ESRPClient\packages\EsrpClient.1.0.27\tools\ESRPClient.exe" Sign -a $Auth -p $Policy -i $Input -o $Output
|
|
||||||
@@ -1,2 +1,12 @@
|
|||||||
[
|
[
|
||||||
|
{
|
||||||
|
"name": "ms-vscode.node-debug",
|
||||||
|
"version": "1.26.7",
|
||||||
|
"repo": "https://github.com/Microsoft/vscode-node-debug"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "ms-vscode.node-debug2",
|
||||||
|
"version": "1.26.8",
|
||||||
|
"repo": "https://github.com/Microsoft/vscode-node-debug2"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ function asYarnDependency(prefix, tree) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function getYarnProductionDependencies(cwd) {
|
function getYarnProductionDependencies(cwd) {
|
||||||
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'inherit'] });
|
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'ignore'] });
|
||||||
const match = /^{"type":"tree".*$/m.exec(raw);
|
const match = /^{"type":"tree".*$/m.exec(raw);
|
||||||
|
|
||||||
if (!match || match.length !== 1) {
|
if (!match || match.length !== 1) {
|
||||||
|
|||||||
@@ -1,91 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
/*---------------------------------------------------------------------------------------------
|
|
||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
|
||||||
*--------------------------------------------------------------------------------------------*/
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const https = require("https");
|
|
||||||
const fs = require("fs");
|
|
||||||
const path = require("path");
|
|
||||||
const cp = require("child_process");
|
|
||||||
function ensureDir(filepath) {
|
|
||||||
if (!fs.existsSync(filepath)) {
|
|
||||||
ensureDir(path.dirname(filepath));
|
|
||||||
fs.mkdirSync(filepath);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function download(options, destination) {
|
|
||||||
ensureDir(path.dirname(destination));
|
|
||||||
return new Promise((c, e) => {
|
|
||||||
const fd = fs.openSync(destination, 'w');
|
|
||||||
const req = https.get(options, (res) => {
|
|
||||||
res.on('data', (chunk) => {
|
|
||||||
fs.writeSync(fd, chunk);
|
|
||||||
});
|
|
||||||
res.on('end', () => {
|
|
||||||
fs.closeSync(fd);
|
|
||||||
c();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on('error', (reqErr) => {
|
|
||||||
console.error(`request to ${options.host}${options.path} failed.`);
|
|
||||||
console.error(reqErr);
|
|
||||||
e(reqErr);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
const MARKER_ARGUMENT = `_download_fork_`;
|
|
||||||
function base64encode(str) {
|
|
||||||
return Buffer.from(str, 'utf8').toString('base64');
|
|
||||||
}
|
|
||||||
function base64decode(str) {
|
|
||||||
return Buffer.from(str, 'base64').toString('utf8');
|
|
||||||
}
|
|
||||||
function downloadInExternalProcess(options) {
|
|
||||||
const url = `https://${options.requestOptions.host}${options.requestOptions.path}`;
|
|
||||||
console.log(`Downloading ${url}...`);
|
|
||||||
return new Promise((c, e) => {
|
|
||||||
const child = cp.fork(__filename, [MARKER_ARGUMENT, base64encode(JSON.stringify(options))], {
|
|
||||||
stdio: ['pipe', 'pipe', 'pipe', 'ipc']
|
|
||||||
});
|
|
||||||
let stderr = [];
|
|
||||||
child.stderr.on('data', (chunk) => {
|
|
||||||
stderr.push(typeof chunk === 'string' ? Buffer.from(chunk) : chunk);
|
|
||||||
});
|
|
||||||
child.on('exit', (code) => {
|
|
||||||
if (code === 0) {
|
|
||||||
// normal termination
|
|
||||||
console.log(`Finished downloading ${url}.`);
|
|
||||||
c();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
// abnormal termination
|
|
||||||
console.error(Buffer.concat(stderr).toString());
|
|
||||||
e(new Error(`Download of ${url} failed.`));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.downloadInExternalProcess = downloadInExternalProcess;
|
|
||||||
function _downloadInExternalProcess() {
|
|
||||||
let options;
|
|
||||||
try {
|
|
||||||
options = JSON.parse(base64decode(process.argv[3]));
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
console.error(`Cannot read arguments`);
|
|
||||||
console.error(err);
|
|
||||||
process.exit(-1);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
download(options.requestOptions, options.destinationPath).then(() => {
|
|
||||||
process.exit(0);
|
|
||||||
}, (err) => {
|
|
||||||
console.error(err);
|
|
||||||
process.exit(-2);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
if (process.argv.length >= 4 && process.argv[2] === MARKER_ARGUMENT) {
|
|
||||||
// running as forked download script
|
|
||||||
_downloadInExternalProcess();
|
|
||||||
}
|
|
||||||
@@ -1,111 +0,0 @@
|
|||||||
/*---------------------------------------------------------------------------------------------
|
|
||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
|
||||||
*--------------------------------------------------------------------------------------------*/
|
|
||||||
|
|
||||||
import * as https from 'https';
|
|
||||||
import * as fs from 'fs';
|
|
||||||
import * as path from 'path';
|
|
||||||
import * as cp from 'child_process';
|
|
||||||
|
|
||||||
function ensureDir(filepath: string) {
|
|
||||||
if (!fs.existsSync(filepath)) {
|
|
||||||
ensureDir(path.dirname(filepath));
|
|
||||||
fs.mkdirSync(filepath);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function download(options: https.RequestOptions, destination: string): Promise<void> {
|
|
||||||
ensureDir(path.dirname(destination));
|
|
||||||
|
|
||||||
return new Promise<void>((c, e) => {
|
|
||||||
const fd = fs.openSync(destination, 'w');
|
|
||||||
const req = https.get(options, (res) => {
|
|
||||||
res.on('data', (chunk) => {
|
|
||||||
fs.writeSync(fd, chunk);
|
|
||||||
});
|
|
||||||
res.on('end', () => {
|
|
||||||
fs.closeSync(fd);
|
|
||||||
c();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
req.on('error', (reqErr) => {
|
|
||||||
console.error(`request to ${options.host}${options.path} failed.`);
|
|
||||||
console.error(reqErr);
|
|
||||||
e(reqErr);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const MARKER_ARGUMENT = `_download_fork_`;
|
|
||||||
|
|
||||||
function base64encode(str: string): string {
|
|
||||||
return Buffer.from(str, 'utf8').toString('base64');
|
|
||||||
}
|
|
||||||
|
|
||||||
function base64decode(str: string): string {
|
|
||||||
return Buffer.from(str, 'base64').toString('utf8');
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface IDownloadRequestOptions {
|
|
||||||
host: string;
|
|
||||||
path: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export interface IDownloadOptions {
|
|
||||||
requestOptions: IDownloadRequestOptions;
|
|
||||||
destinationPath: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function downloadInExternalProcess(options: IDownloadOptions): Promise<void> {
|
|
||||||
const url = `https://${options.requestOptions.host}${options.requestOptions.path}`;
|
|
||||||
console.log(`Downloading ${url}...`);
|
|
||||||
return new Promise<void>((c, e) => {
|
|
||||||
const child = cp.fork(
|
|
||||||
__filename,
|
|
||||||
[MARKER_ARGUMENT, base64encode(JSON.stringify(options))],
|
|
||||||
{
|
|
||||||
stdio: ['pipe', 'pipe', 'pipe', 'ipc']
|
|
||||||
}
|
|
||||||
);
|
|
||||||
let stderr: Buffer[] = [];
|
|
||||||
child.stderr.on('data', (chunk) => {
|
|
||||||
stderr.push(typeof chunk === 'string' ? Buffer.from(chunk) : chunk);
|
|
||||||
});
|
|
||||||
child.on('exit', (code) => {
|
|
||||||
if (code === 0) {
|
|
||||||
// normal termination
|
|
||||||
console.log(`Finished downloading ${url}.`);
|
|
||||||
c();
|
|
||||||
} else {
|
|
||||||
// abnormal termination
|
|
||||||
console.error(Buffer.concat(stderr).toString());
|
|
||||||
e(new Error(`Download of ${url} failed.`));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function _downloadInExternalProcess() {
|
|
||||||
let options: IDownloadOptions;
|
|
||||||
try {
|
|
||||||
options = JSON.parse(base64decode(process.argv[3]));
|
|
||||||
} catch (err) {
|
|
||||||
console.error(`Cannot read arguments`);
|
|
||||||
console.error(err);
|
|
||||||
process.exit(-1);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
download(options.requestOptions, options.destinationPath).then(() => {
|
|
||||||
process.exit(0);
|
|
||||||
}, (err) => {
|
|
||||||
console.error(err);
|
|
||||||
process.exit(-2);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if (process.argv.length >= 4 && process.argv[2] === MARKER_ARGUMENT) {
|
|
||||||
// running as forked download script
|
|
||||||
_downloadInExternalProcess();
|
|
||||||
}
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
/*---------------------------------------------------------------------------------------------
|
|
||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
|
||||||
*--------------------------------------------------------------------------------------------*/
|
|
||||||
|
|
||||||
'use strict';
|
|
||||||
|
|
||||||
const util = require('./lib/util');
|
|
||||||
const task = require('./lib/task');
|
|
||||||
const compilation = require('./lib/compilation');
|
|
||||||
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
|
|
||||||
|
|
||||||
// Full compile, including nls and inline sources in sourcemaps, for build
|
|
||||||
const compileClientBuildTask = task.define('compile-client-build', task.series(util.rimraf('out-build'), compilation.compileTask('src', 'out-build', true)));
|
|
||||||
|
|
||||||
// All Build
|
|
||||||
const compileBuildTask = task.define('compile-build', task.parallel(compileClientBuildTask, compileExtensionsBuildTask));
|
|
||||||
exports.compileBuildTask = compileBuildTask;
|
|
||||||
@@ -6,7 +6,6 @@
|
|||||||
const gulp = require('gulp');
|
const gulp = require('gulp');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const util = require('./lib/util');
|
const util = require('./lib/util');
|
||||||
const task = require('./lib/task');
|
|
||||||
const common = require('./lib/optimize');
|
const common = require('./lib/optimize');
|
||||||
const es = require('event-stream');
|
const es = require('event-stream');
|
||||||
const File = require('vinyl');
|
const File = require('vinyl');
|
||||||
@@ -29,7 +28,7 @@ var editorEntryPoints = [
|
|||||||
name: 'vs/editor/editor.main',
|
name: 'vs/editor/editor.main',
|
||||||
include: [],
|
include: [],
|
||||||
exclude: ['vs/css', 'vs/nls'],
|
exclude: ['vs/css', 'vs/nls'],
|
||||||
prepend: ['out-editor-build/vs/css.js', 'out-editor-build/vs/nls.js'],
|
prepend: ['out-build/vs/css.js', 'out-build/vs/nls.js'],
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: 'vs/base/common/worker/simpleWorker',
|
name: 'vs/base/common/worker/simpleWorker',
|
||||||
@@ -49,6 +48,9 @@ var editorResources = [
|
|||||||
'!**/test/**'
|
'!**/test/**'
|
||||||
];
|
];
|
||||||
|
|
||||||
|
var editorOtherSources = [
|
||||||
|
];
|
||||||
|
|
||||||
var BUNDLED_FILE_HEADER = [
|
var BUNDLED_FILE_HEADER = [
|
||||||
'/*!-----------------------------------------------------------',
|
'/*!-----------------------------------------------------------',
|
||||||
' * Copyright (c) Microsoft Corporation. All rights reserved.',
|
' * Copyright (c) Microsoft Corporation. All rights reserved.',
|
||||||
@@ -61,7 +63,8 @@ var BUNDLED_FILE_HEADER = [
|
|||||||
|
|
||||||
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
|
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
|
||||||
|
|
||||||
const extractEditorSrcTask = task.define('extract-editor-src', () => {
|
gulp.task('clean-editor-src', util.rimraf('out-editor-src'));
|
||||||
|
gulp.task('extract-editor-src', ['clean-editor-src'], function () {
|
||||||
console.log(`If the build fails, consider tweaking shakeLevel below to a lower value.`);
|
console.log(`If the build fails, consider tweaking shakeLevel below to a lower value.`);
|
||||||
const apiusages = monacoapi.execute().usageContent;
|
const apiusages = monacoapi.execute().usageContent;
|
||||||
const extrausages = fs.readFileSync(path.join(root, 'build', 'monaco', 'monaco.usage.recipe')).toString();
|
const extrausages = fs.readFileSync(path.join(root, 'build', 'monaco', 'monaco.usage.recipe')).toString();
|
||||||
@@ -76,39 +79,35 @@ const extractEditorSrcTask = task.define('extract-editor-src', () => {
|
|||||||
apiusages,
|
apiusages,
|
||||||
extrausages
|
extrausages
|
||||||
],
|
],
|
||||||
typings: [
|
|
||||||
'typings/lib.ie11_safe_es6.d.ts',
|
|
||||||
'typings/thenable.d.ts',
|
|
||||||
'typings/es6-promise.d.ts',
|
|
||||||
'typings/require-monaco.d.ts',
|
|
||||||
"typings/lib.es2018.promise.d.ts",
|
|
||||||
'vs/monaco.d.ts'
|
|
||||||
],
|
|
||||||
libs: [
|
libs: [
|
||||||
`lib.es5.d.ts`,
|
`lib.d.ts`,
|
||||||
`lib.dom.d.ts`,
|
`lib.es2015.collection.d.ts`
|
||||||
`lib.webworker.importscripts.d.ts`
|
|
||||||
],
|
],
|
||||||
redirects: {
|
redirects: {
|
||||||
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
|
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
|
||||||
},
|
},
|
||||||
|
compilerOptions: {
|
||||||
|
module: 2, // ModuleKind.AMD
|
||||||
|
},
|
||||||
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
|
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
|
||||||
importIgnorePattern: /(^vs\/css!)|(promise-polyfill\/polyfill)/,
|
importIgnorePattern: /^vs\/css!/,
|
||||||
destRoot: path.join(root, 'out-editor-src')
|
destRoot: path.join(root, 'out-editor-src')
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
const compileEditorAMDTask = task.define('compile-editor-amd', compilation.compileTask('out-editor-src', 'out-editor-build', true));
|
// Full compile, including nls and inline sources in sourcemaps, for build
|
||||||
|
gulp.task('clean-editor-build', util.rimraf('out-editor-build'));
|
||||||
|
gulp.task('compile-editor-build', ['clean-editor-build', 'extract-editor-src'], compilation.compileTask('out-editor-src', 'out-editor-build', true));
|
||||||
|
|
||||||
const optimizeEditorAMDTask = task.define('optimize-editor-amd', common.optimizeTask({
|
gulp.task('clean-optimized-editor', util.rimraf('out-editor'));
|
||||||
|
gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-editor-build'], common.optimizeTask({
|
||||||
src: 'out-editor-build',
|
src: 'out-editor-build',
|
||||||
entryPoints: editorEntryPoints,
|
entryPoints: editorEntryPoints,
|
||||||
|
otherSources: editorOtherSources,
|
||||||
resources: editorResources,
|
resources: editorResources,
|
||||||
loaderConfig: {
|
loaderConfig: {
|
||||||
paths: {
|
paths: {
|
||||||
'vs': 'out-editor-build/vs',
|
'vs': 'out-editor-build/vs',
|
||||||
'vs/css': 'out-editor-build/vs/css.build',
|
|
||||||
'vs/nls': 'out-editor-build/vs/nls.build',
|
|
||||||
'vscode': 'empty:'
|
'vscode': 'empty:'
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -119,45 +118,29 @@ const optimizeEditorAMDTask = task.define('optimize-editor-amd', common.optimize
|
|||||||
languages: languages
|
languages: languages
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const minifyEditorAMDTask = task.define('minify-editor-amd', common.minifyTask('out-editor'));
|
gulp.task('clean-minified-editor', util.rimraf('out-editor-min'));
|
||||||
|
gulp.task('minify-editor', ['clean-minified-editor', 'optimize-editor'], common.minifyTask('out-editor'));
|
||||||
|
|
||||||
const createESMSourcesAndResourcesTask = task.define('extract-editor-esm', () => {
|
gulp.task('clean-editor-esm', util.rimraf('out-editor-esm'));
|
||||||
standalone.createESMSourcesAndResources2({
|
gulp.task('extract-editor-esm', ['clean-editor-esm', 'clean-editor-distro'], function () {
|
||||||
srcFolder: './out-editor-src',
|
standalone.createESMSourcesAndResources({
|
||||||
outFolder: './out-editor-esm',
|
entryPoints: [
|
||||||
outResourcesFolder: './out-monaco-editor-core/esm',
|
'vs/editor/editor.main',
|
||||||
ignores: [
|
'vs/editor/editor.worker'
|
||||||
'inlineEntryPoint:0.ts',
|
|
||||||
'inlineEntryPoint:1.ts',
|
|
||||||
'vs/loader.js',
|
|
||||||
'vs/nls.ts',
|
|
||||||
'vs/nls.build.js',
|
|
||||||
'vs/nls.d.ts',
|
|
||||||
'vs/css.js',
|
|
||||||
'vs/css.build.js',
|
|
||||||
'vs/css.d.ts',
|
|
||||||
'vs/base/worker/workerMain.ts',
|
|
||||||
],
|
],
|
||||||
renames: {
|
outFolder: './out-editor-esm/src',
|
||||||
'vs/nls.mock.ts': 'vs/nls.ts'
|
outResourcesFolder: './out-monaco-editor-core/esm',
|
||||||
|
redirects: {
|
||||||
|
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
|
||||||
|
'vs/nls': 'vs/nls.mock',
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
gulp.task('compile-editor-esm', ['extract-editor-esm', 'clean-editor-distro'], function () {
|
||||||
const compileEditorESMTask = task.define('compile-editor-esm', () => {
|
const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
|
||||||
if (process.platform === 'win32') {
|
cwd: path.join(__dirname, '../out-editor-esm')
|
||||||
const result = cp.spawnSync(`..\\node_modules\\.bin\\tsc.cmd`, {
|
});
|
||||||
cwd: path.join(__dirname, '../out-editor-esm')
|
console.log(result.stdout.toString());
|
||||||
});
|
|
||||||
console.log(result.stdout.toString());
|
|
||||||
console.log(result.stderr.toString());
|
|
||||||
} else {
|
|
||||||
const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
|
|
||||||
cwd: path.join(__dirname, '../out-editor-esm')
|
|
||||||
});
|
|
||||||
console.log(result.stdout.toString());
|
|
||||||
console.log(result.stderr.toString());
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
function toExternalDTS(contents) {
|
function toExternalDTS(contents) {
|
||||||
@@ -195,16 +178,8 @@ function toExternalDTS(contents) {
|
|||||||
return lines.join('\n');
|
return lines.join('\n');
|
||||||
}
|
}
|
||||||
|
|
||||||
function filterStream(testFunc) {
|
gulp.task('clean-editor-distro', util.rimraf('out-monaco-editor-core'));
|
||||||
return es.through(function (data) {
|
gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify-editor', 'optimize-editor'], function () {
|
||||||
if (!testFunc(data.relative)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
this.emit('data', data);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const finalEditorResourcesTask = task.define('final-editor-resources', () => {
|
|
||||||
return es.merge(
|
return es.merge(
|
||||||
// other assets
|
// other assets
|
||||||
es.merge(
|
es.merge(
|
||||||
@@ -219,7 +194,7 @@ const finalEditorResourcesTask = task.define('final-editor-resources', () => {
|
|||||||
this.emit('data', new File({
|
this.emit('data', new File({
|
||||||
path: data.path.replace(/monaco\.d\.ts/, 'editor.api.d.ts'),
|
path: data.path.replace(/monaco\.d\.ts/, 'editor.api.d.ts'),
|
||||||
base: data.base,
|
base: data.base,
|
||||||
contents: Buffer.from(toExternalDTS(data.contents.toString()))
|
contents: new Buffer(toExternalDTS(data.contents.toString()))
|
||||||
}));
|
}));
|
||||||
}))
|
}))
|
||||||
.pipe(gulp.dest('out-monaco-editor-core/esm/vs/editor')),
|
.pipe(gulp.dest('out-monaco-editor-core/esm/vs/editor')),
|
||||||
@@ -234,14 +209,6 @@ const finalEditorResourcesTask = task.define('final-editor-resources', () => {
|
|||||||
}))
|
}))
|
||||||
.pipe(gulp.dest('out-monaco-editor-core')),
|
.pipe(gulp.dest('out-monaco-editor-core')),
|
||||||
|
|
||||||
// version.txt
|
|
||||||
gulp.src('build/monaco/version.txt')
|
|
||||||
.pipe(es.through(function (data) {
|
|
||||||
data.contents = Buffer.from(`monaco-editor-core: https://github.com/Microsoft/vscode/tree/${sha1}`);
|
|
||||||
this.emit('data', data);
|
|
||||||
}))
|
|
||||||
.pipe(gulp.dest('out-monaco-editor-core')),
|
|
||||||
|
|
||||||
// README.md
|
// README.md
|
||||||
gulp.src('build/monaco/README-npm.md')
|
gulp.src('build/monaco/README-npm.md')
|
||||||
.pipe(es.through(function (data) {
|
.pipe(es.through(function (data) {
|
||||||
@@ -275,7 +242,7 @@ const finalEditorResourcesTask = task.define('final-editor-resources', () => {
|
|||||||
|
|
||||||
var strContents = data.contents.toString();
|
var strContents = data.contents.toString();
|
||||||
var newStr = '//# sourceMappingURL=' + relativePathToMap.replace(/\\/g, '/');
|
var newStr = '//# sourceMappingURL=' + relativePathToMap.replace(/\\/g, '/');
|
||||||
strContents = strContents.replace(/\/\/# sourceMappingURL=[^ ]+$/, newStr);
|
strContents = strContents.replace(/\/\/\# sourceMappingURL=[^ ]+$/, newStr);
|
||||||
|
|
||||||
data.contents = Buffer.from(strContents);
|
data.contents = Buffer.from(strContents);
|
||||||
this.emit('data', data);
|
this.emit('data', data);
|
||||||
@@ -291,31 +258,59 @@ const finalEditorResourcesTask = task.define('final-editor-resources', () => {
|
|||||||
);
|
);
|
||||||
});
|
});
|
||||||
|
|
||||||
gulp.task('editor-distro',
|
gulp.task('analyze-editor-distro', function () {
|
||||||
task.series(
|
// @ts-ignore
|
||||||
task.parallel(
|
var bundleInfo = require('../out-editor/bundleInfo.json');
|
||||||
util.rimraf('out-editor-src'),
|
var graph = bundleInfo.graph;
|
||||||
util.rimraf('out-editor-build'),
|
var bundles = bundleInfo.bundles;
|
||||||
util.rimraf('out-editor-esm'),
|
|
||||||
util.rimraf('out-monaco-editor-core'),
|
var inverseGraph = {};
|
||||||
util.rimraf('out-editor'),
|
Object.keys(graph).forEach(function (module) {
|
||||||
util.rimraf('out-editor-min')
|
var dependencies = graph[module];
|
||||||
),
|
dependencies.forEach(function (dep) {
|
||||||
extractEditorSrcTask,
|
inverseGraph[dep] = inverseGraph[dep] || [];
|
||||||
task.parallel(
|
inverseGraph[dep].push(module);
|
||||||
task.series(
|
});
|
||||||
compileEditorAMDTask,
|
});
|
||||||
optimizeEditorAMDTask,
|
|
||||||
minifyEditorAMDTask
|
var detailed = {};
|
||||||
),
|
Object.keys(bundles).forEach(function (entryPoint) {
|
||||||
task.series(
|
var included = bundles[entryPoint];
|
||||||
createESMSourcesAndResourcesTask,
|
var includedMap = {};
|
||||||
compileEditorESMTask
|
included.forEach(function (included) {
|
||||||
)
|
includedMap[included] = true;
|
||||||
),
|
});
|
||||||
finalEditorResourcesTask
|
|
||||||
)
|
var explanation = [];
|
||||||
);
|
included.map(function (included) {
|
||||||
|
if (included.indexOf('!') >= 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var reason = (inverseGraph[included] || []).filter(function (mod) {
|
||||||
|
return !!includedMap[mod];
|
||||||
|
});
|
||||||
|
explanation.push({
|
||||||
|
module: included,
|
||||||
|
reason: reason
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
detailed[entryPoint] = explanation;
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(JSON.stringify(detailed, null, '\t'));
|
||||||
|
});
|
||||||
|
|
||||||
|
function filterStream(testFunc) {
|
||||||
|
return es.through(function (data) {
|
||||||
|
if (!testFunc(data.relative)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
this.emit('data', data);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
//#region monaco type checking
|
//#region monaco type checking
|
||||||
|
|
||||||
@@ -335,7 +330,6 @@ function createTscCompileTask(watch) {
|
|||||||
let errors = [];
|
let errors = [];
|
||||||
let reporter = createReporter();
|
let reporter = createReporter();
|
||||||
let report;
|
let report;
|
||||||
// eslint-disable-next-line no-control-regex
|
|
||||||
let magic = /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g; // https://stackoverflow.com/questions/25245716/remove-all-ansi-colors-styles-from-strings
|
let magic = /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g; // https://stackoverflow.com/questions/25245716/remove-all-ansi-colors-styles-from-strings
|
||||||
|
|
||||||
child.stdout.on('data', data => {
|
child.stdout.on('data', data => {
|
||||||
@@ -369,10 +363,7 @@ function createTscCompileTask(watch) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const monacoTypecheckWatchTask = task.define('monaco-typecheck-watch', createTscCompileTask(true));
|
gulp.task('monaco-typecheck-watch', createTscCompileTask(true));
|
||||||
exports.monacoTypecheckWatchTask = monacoTypecheckWatchTask;
|
gulp.task('monaco-typecheck', createTscCompileTask(false));
|
||||||
|
|
||||||
const monacoTypecheckTask = task.define('monaco-typecheck', createTscCompileTask(false));
|
|
||||||
exports.monacoTypecheckTask = monacoTypecheckTask;
|
|
||||||
|
|
||||||
//#endregion
|
//#endregion
|
||||||
|
|||||||
@@ -11,8 +11,8 @@ const path = require('path');
|
|||||||
const tsb = require('gulp-tsb');
|
const tsb = require('gulp-tsb');
|
||||||
const es = require('event-stream');
|
const es = require('event-stream');
|
||||||
const filter = require('gulp-filter');
|
const filter = require('gulp-filter');
|
||||||
|
const rimraf = require('rimraf');
|
||||||
const util = require('./lib/util');
|
const util = require('./lib/util');
|
||||||
const task = require('./lib/task');
|
|
||||||
const watcher = require('./lib/watch');
|
const watcher = require('./lib/watch');
|
||||||
const createReporter = require('./lib/reporter').createReporter;
|
const createReporter = require('./lib/reporter').createReporter;
|
||||||
const glob = require('glob');
|
const glob = require('glob');
|
||||||
@@ -21,7 +21,6 @@ const nlsDev = require('vscode-nls-dev');
|
|||||||
const root = path.dirname(__dirname);
|
const root = path.dirname(__dirname);
|
||||||
const commit = util.getVersion(root);
|
const commit = util.getVersion(root);
|
||||||
const plumber = require('gulp-plumber');
|
const plumber = require('gulp-plumber');
|
||||||
const _ = require('underscore');
|
|
||||||
|
|
||||||
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
|
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
|
||||||
|
|
||||||
@@ -36,13 +35,22 @@ const tasks = compilations.map(function (tsconfigFile) {
|
|||||||
const absolutePath = path.join(extensionsPath, tsconfigFile);
|
const absolutePath = path.join(extensionsPath, tsconfigFile);
|
||||||
const relativeDirname = path.dirname(tsconfigFile);
|
const relativeDirname = path.dirname(tsconfigFile);
|
||||||
|
|
||||||
const tsconfig = require(absolutePath);
|
const tsOptions = require(absolutePath).compilerOptions;
|
||||||
const tsOptions = _.assign({}, tsconfig.extends ? require(path.join(extensionsPath, relativeDirname, tsconfig.extends)).compilerOptions : {}, tsconfig.compilerOptions);
|
|
||||||
tsOptions.verbose = false;
|
tsOptions.verbose = false;
|
||||||
tsOptions.sourceMap = true;
|
tsOptions.sourceMap = true;
|
||||||
|
|
||||||
const name = relativeDirname.replace(/\//g, '-');
|
const name = relativeDirname.replace(/\//g, '-');
|
||||||
|
|
||||||
|
// Tasks
|
||||||
|
const clean = 'clean-extension:' + name;
|
||||||
|
const compile = 'compile-extension:' + name;
|
||||||
|
const watch = 'watch-extension:' + name;
|
||||||
|
|
||||||
|
// Build Tasks
|
||||||
|
const cleanBuild = 'clean-extension-build:' + name;
|
||||||
|
const compileBuild = 'compile-extension-build:' + name;
|
||||||
|
const watchBuild = 'watch-extension-build:' + name;
|
||||||
|
|
||||||
const root = path.join('extensions', relativeDirname);
|
const root = path.join('extensions', relativeDirname);
|
||||||
const srcBase = path.join(root, 'src');
|
const srcBase = path.join(root, 'src');
|
||||||
const src = path.join(srcBase, '**');
|
const src = path.join(srcBase, '**');
|
||||||
@@ -101,18 +109,18 @@ const tasks = compilations.map(function (tsconfigFile) {
|
|||||||
|
|
||||||
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
|
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
|
||||||
|
|
||||||
const cleanTask = task.define(`clean-extension-${name}`, util.rimraf(out));
|
gulp.task(clean, cb => rimraf(out, cb));
|
||||||
|
|
||||||
const compileTask = task.define(`compile-extension:${name}`, task.series(cleanTask, () => {
|
gulp.task(compile, [clean], () => {
|
||||||
const pipeline = createPipeline(false, true);
|
const pipeline = createPipeline(false, true);
|
||||||
const input = gulp.src(src, srcOpts);
|
const input = gulp.src(src, srcOpts);
|
||||||
|
|
||||||
return input
|
return input
|
||||||
.pipe(pipeline())
|
.pipe(pipeline())
|
||||||
.pipe(gulp.dest(out));
|
.pipe(gulp.dest(out));
|
||||||
}));
|
});
|
||||||
|
|
||||||
const watchTask = task.define(`watch-extension:${name}`, task.series(cleanTask, () => {
|
gulp.task(watch, [clean], () => {
|
||||||
const pipeline = createPipeline(false);
|
const pipeline = createPipeline(false);
|
||||||
const input = gulp.src(src, srcOpts);
|
const input = gulp.src(src, srcOpts);
|
||||||
const watchInput = watcher(src, srcOpts);
|
const watchInput = watcher(src, srcOpts);
|
||||||
@@ -120,35 +128,43 @@ const tasks = compilations.map(function (tsconfigFile) {
|
|||||||
return watchInput
|
return watchInput
|
||||||
.pipe(util.incremental(pipeline, input))
|
.pipe(util.incremental(pipeline, input))
|
||||||
.pipe(gulp.dest(out));
|
.pipe(gulp.dest(out));
|
||||||
}));
|
});
|
||||||
|
|
||||||
const compileBuildTask = task.define(`compile-build-extension-${name}`, task.series(cleanTask, () => {
|
gulp.task(cleanBuild, cb => rimraf(out, cb));
|
||||||
|
|
||||||
|
gulp.task(compileBuild, [clean], () => {
|
||||||
const pipeline = createPipeline(true, true);
|
const pipeline = createPipeline(true, true);
|
||||||
const input = gulp.src(src, srcOpts);
|
const input = gulp.src(src, srcOpts);
|
||||||
|
|
||||||
return input
|
return input
|
||||||
.pipe(pipeline())
|
.pipe(pipeline())
|
||||||
.pipe(gulp.dest(out));
|
.pipe(gulp.dest(out));
|
||||||
}));
|
});
|
||||||
|
|
||||||
// Tasks
|
gulp.task(watchBuild, [clean], () => {
|
||||||
gulp.task(compileTask);
|
const pipeline = createPipeline(true);
|
||||||
gulp.task(watchTask);
|
const input = gulp.src(src, srcOpts);
|
||||||
|
const watchInput = watcher(src, srcOpts);
|
||||||
|
|
||||||
|
return watchInput
|
||||||
|
.pipe(util.incremental(() => pipeline(), input))
|
||||||
|
.pipe(gulp.dest(out));
|
||||||
|
});
|
||||||
|
|
||||||
return {
|
return {
|
||||||
compileTask: compileTask,
|
clean: clean,
|
||||||
watchTask: watchTask,
|
compile: compile,
|
||||||
compileBuildTask: compileBuildTask
|
watch: watch,
|
||||||
|
cleanBuild: cleanBuild,
|
||||||
|
compileBuild: compileBuild,
|
||||||
|
watchBuild: watchBuild
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
|
|
||||||
const compileExtensionsTask = task.define('compile-extensions', task.parallel(...tasks.map(t => t.compileTask)));
|
gulp.task('clean-extensions', tasks.map(t => t.clean));
|
||||||
gulp.task(compileExtensionsTask);
|
gulp.task('compile-extensions', tasks.map(t => t.compile));
|
||||||
exports.compileExtensionsTask = compileExtensionsTask;
|
gulp.task('watch-extensions', tasks.map(t => t.watch));
|
||||||
|
|
||||||
const watchExtensionsTask = task.define('watch-extensions', task.parallel(...tasks.map(t => t.watchTask)));
|
gulp.task('clean-extensions-build', tasks.map(t => t.cleanBuild));
|
||||||
gulp.task(watchExtensionsTask);
|
gulp.task('compile-extensions-build', tasks.map(t => t.compileBuild));
|
||||||
exports.watchExtensionsTask = watchExtensionsTask;
|
gulp.task('watch-extensions-build', tasks.map(t => t.watchBuild));
|
||||||
|
|
||||||
const compileExtensionsBuildTask = task.define('compile-extensions-build', task.parallel(...tasks.map(t => t.compileBuildTask)));
|
|
||||||
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;
|
|
||||||
@@ -42,15 +42,12 @@ const indentationFilter = [
|
|||||||
|
|
||||||
// except specific files
|
// except specific files
|
||||||
'!ThirdPartyNotices.txt',
|
'!ThirdPartyNotices.txt',
|
||||||
'!LICENSE.{txt,rtf}',
|
'!LICENSE.txt',
|
||||||
'!LICENSES.chromium.html',
|
|
||||||
'!**/LICENSE',
|
|
||||||
'!src/vs/nls.js',
|
'!src/vs/nls.js',
|
||||||
'!src/vs/nls.build.js',
|
|
||||||
'!src/vs/css.js',
|
'!src/vs/css.js',
|
||||||
'!src/vs/css.build.js',
|
|
||||||
'!src/vs/loader.js',
|
'!src/vs/loader.js',
|
||||||
'!src/vs/base/common/marked/marked.js',
|
'!src/vs/base/common/marked/marked.js',
|
||||||
|
'!src/vs/base/common/winjs.base.js',
|
||||||
'!src/vs/base/node/terminateProcess.sh',
|
'!src/vs/base/node/terminateProcess.sh',
|
||||||
'!src/vs/base/node/cpuUsage.sh',
|
'!src/vs/base/node/cpuUsage.sh',
|
||||||
'!test/assert.js',
|
'!test/assert.js',
|
||||||
@@ -81,22 +78,13 @@ const indentationFilter = [
|
|||||||
'!src/vs/*/**/*.d.ts',
|
'!src/vs/*/**/*.d.ts',
|
||||||
'!src/typings/**/*.d.ts',
|
'!src/typings/**/*.d.ts',
|
||||||
'!extensions/**/*.d.ts',
|
'!extensions/**/*.d.ts',
|
||||||
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns}',
|
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe}',
|
||||||
'!build/{lib,tslintRules,download}/**/*.js',
|
'!build/{lib,tslintRules}/**/*.js',
|
||||||
'!build/**/*.sh',
|
'!build/**/*.sh',
|
||||||
'!build/azure-pipelines/**/*.js',
|
'!build/tfs/**/*.js',
|
||||||
'!build/azure-pipelines/**/*.config',
|
'!build/tfs/**/*.config',
|
||||||
'!**/Dockerfile',
|
'!**/Dockerfile',
|
||||||
'!**/Dockerfile.*',
|
'!extensions/markdown-language-features/media/*.js'
|
||||||
'!**/*.Dockerfile',
|
|
||||||
'!**/*.dockerfile',
|
|
||||||
'!extensions/markdown-language-features/media/*.js',
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
'!**/*.{xlf,docx,sql,vsix}',
|
|
||||||
'!extensions/mssql/sqltoolsservice/**',
|
|
||||||
'!extensions/import/flatfileimportservice/**',
|
|
||||||
'!extensions/admin-tool-ext-win/ssmsmin/**',
|
|
||||||
'!extensions/resource-deployment/notebooks/**'
|
|
||||||
];
|
];
|
||||||
|
|
||||||
const copyrightFilter = [
|
const copyrightFilter = [
|
||||||
@@ -108,8 +96,6 @@ const copyrightFilter = [
|
|||||||
'!**/*.md',
|
'!**/*.md',
|
||||||
'!**/*.bat',
|
'!**/*.bat',
|
||||||
'!**/*.cmd',
|
'!**/*.cmd',
|
||||||
'!**/*.ico',
|
|
||||||
'!**/*.icns',
|
|
||||||
'!**/*.xml',
|
'!**/*.xml',
|
||||||
'!**/*.sh',
|
'!**/*.sh',
|
||||||
'!**/*.txt',
|
'!**/*.txt',
|
||||||
@@ -117,46 +103,13 @@ const copyrightFilter = [
|
|||||||
'!**/*.opts',
|
'!**/*.opts',
|
||||||
'!**/*.disabled',
|
'!**/*.disabled',
|
||||||
'!**/*.code-workspace',
|
'!**/*.code-workspace',
|
||||||
'!**/promise-polyfill/polyfill.js',
|
|
||||||
'!build/**/*.init',
|
'!build/**/*.init',
|
||||||
'!resources/linux/snap/snapcraft.yaml',
|
'!resources/linux/snap/snapcraft.yaml',
|
||||||
'!resources/linux/snap/electron-launch',
|
'!resources/linux/snap/electron-launch',
|
||||||
'!resources/win32/bin/code.js',
|
'!resources/win32/bin/code.js',
|
||||||
'!resources/completions/**',
|
|
||||||
'!extensions/markdown-language-features/media/highlight.css',
|
'!extensions/markdown-language-features/media/highlight.css',
|
||||||
'!extensions/html-language-features/server/src/modes/typescript/*',
|
'!extensions/html-language-features/server/src/modes/typescript/*',
|
||||||
'!extensions/*/server/bin/*',
|
'!extensions/*/server/bin/*'
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
'!extensions/notebook/src/intellisense/text.ts',
|
|
||||||
'!extensions/mssql/src/objectExplorerNodeProvider/webhdfs.ts',
|
|
||||||
'!src/sql/workbench/parts/notebook/outputs/tableRenderers.ts',
|
|
||||||
'!src/sql/workbench/parts/notebook/outputs/common/url.ts',
|
|
||||||
'!src/sql/workbench/parts/notebook/outputs/common/renderMimeInterfaces.ts',
|
|
||||||
'!src/sql/workbench/parts/notebook/outputs/common/outputProcessor.ts',
|
|
||||||
'!src/sql/workbench/parts/notebook/outputs/common/mimemodel.ts',
|
|
||||||
'!src/sql/workbench/parts/notebook/cellViews/media/*.css',
|
|
||||||
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
|
|
||||||
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
|
|
||||||
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
|
|
||||||
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
|
|
||||||
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
|
|
||||||
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
|
|
||||||
'!src/sql/workbench/parts/notebook/outputs/sanitizer.ts',
|
|
||||||
'!src/sql/workbench/parts/notebook/outputs/renderers.ts',
|
|
||||||
'!src/sql/workbench/parts/notebook/outputs/registry.ts',
|
|
||||||
'!src/sql/workbench/parts/notebook/outputs/factories.ts',
|
|
||||||
'!src/sql/workbench/parts/notebook/models/nbformat.ts',
|
|
||||||
'!extensions/markdown-language-features/media/tomorrow.css',
|
|
||||||
'!src/sql/workbench/electron-browser/modelComponents/media/highlight.css',
|
|
||||||
'!src/sql/parts/modelComponents/highlight.css',
|
|
||||||
'!extensions/mssql/sqltoolsservice/**',
|
|
||||||
'!extensions/import/flatfileimportservice/**',
|
|
||||||
'!extensions/notebook/src/prompts/**',
|
|
||||||
'!extensions/mssql/src/prompts/**',
|
|
||||||
'!extensions/notebook/resources/jupyter_config/**',
|
|
||||||
'!**/*.gif',
|
|
||||||
'!**/*.xlf',
|
|
||||||
'!**/*.dacpac'
|
|
||||||
];
|
];
|
||||||
|
|
||||||
const eslintFilter = [
|
const eslintFilter = [
|
||||||
@@ -167,6 +120,7 @@ const eslintFilter = [
|
|||||||
'!src/vs/nls.js',
|
'!src/vs/nls.js',
|
||||||
'!src/vs/css.build.js',
|
'!src/vs/css.build.js',
|
||||||
'!src/vs/nls.build.js',
|
'!src/vs/nls.build.js',
|
||||||
|
'!src/**/winjs.base.js',
|
||||||
'!src/**/marked.js',
|
'!src/**/marked.js',
|
||||||
'!**/test/**'
|
'!**/test/**'
|
||||||
];
|
];
|
||||||
@@ -185,11 +139,6 @@ const tslintFilter = [
|
|||||||
'!extensions/html-language-features/server/lib/jquery.d.ts'
|
'!extensions/html-language-features/server/lib/jquery.d.ts'
|
||||||
];
|
];
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
const useStrictFilter = [
|
|
||||||
'src/**'
|
|
||||||
];
|
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
const copyrightHeaderLines = [
|
const copyrightHeaderLines = [
|
||||||
'/*---------------------------------------------------------------------------------------------',
|
'/*---------------------------------------------------------------------------------------------',
|
||||||
@@ -207,7 +156,8 @@ gulp.task('eslint', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
gulp.task('tslint', () => {
|
gulp.task('tslint', () => {
|
||||||
const options = { emitError: true };
|
// {{SQL CARBON EDIT}}
|
||||||
|
const options = { emitError: false };
|
||||||
|
|
||||||
return vfs.src(all, { base: '.', follow: true, allowEmpty: true })
|
return vfs.src(all, { base: '.', follow: true, allowEmpty: true })
|
||||||
.pipe(filter(tslintFilter))
|
.pipe(filter(tslintFilter))
|
||||||
@@ -240,8 +190,8 @@ function hygiene(some) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
const copyrights = es.through(function (file) {
|
const copyrights = es.through(function (file) {
|
||||||
|
|
||||||
const lines = file.__lines;
|
const lines = file.__lines;
|
||||||
|
|
||||||
for (let i = 0; i < copyrightHeaderLines.length; i++) {
|
for (let i = 0; i < copyrightHeaderLines.length; i++) {
|
||||||
if (lines[i] !== copyrightHeaderLines[i]) {
|
if (lines[i] !== copyrightHeaderLines[i]) {
|
||||||
console.error(file.relative + ': Missing or bad copyright statement');
|
console.error(file.relative + ': Missing or bad copyright statement');
|
||||||
@@ -253,23 +203,6 @@ function hygiene(some) {
|
|||||||
this.emit('data', file);
|
this.emit('data', file);
|
||||||
});
|
});
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
// Check for unnecessary 'use strict' lines. These are automatically added by the alwaysStrict compiler option so don't need to be added manually
|
|
||||||
const useStrict = es.through(function (file) {
|
|
||||||
const lines = file.__lines;
|
|
||||||
// Only take the first 10 lines to reduce false positives- the compiler will throw an error if it's not the first non-comment line in a file
|
|
||||||
// (10 is used to account for copyright and extraneous newlines)
|
|
||||||
lines.slice(0, 10).forEach((line, i) => {
|
|
||||||
if (/\s*'use\s*strict\s*'/.test(line)) {
|
|
||||||
console.error(file.relative + '(' + (i + 1) + ',1): Unnecessary \'use strict\' - this is already added by the compiler');
|
|
||||||
errorCount++;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
this.emit('data', file);
|
|
||||||
});
|
|
||||||
// {{SQL CARBON EDIT}} END
|
|
||||||
|
|
||||||
const formatting = es.map(function (file, cb) {
|
const formatting = es.map(function (file, cb) {
|
||||||
tsfmt.processString(file.path, file.contents.toString('utf8'), {
|
tsfmt.processString(file.path, file.contents.toString('utf8'), {
|
||||||
verify: false,
|
verify: false,
|
||||||
@@ -290,7 +223,7 @@ function hygiene(some) {
|
|||||||
let formatted = result.dest.replace(/\r\n/gm, '\n');
|
let formatted = result.dest.replace(/\r\n/gm, '\n');
|
||||||
|
|
||||||
if (original !== formatted) {
|
if (original !== formatted) {
|
||||||
console.error("File not formatted. Run the 'Format Document' command to fix it:", file.relative);
|
console.error('File not formatted:', file.relative);
|
||||||
errorCount++;
|
errorCount++;
|
||||||
}
|
}
|
||||||
cb(null, file);
|
cb(null, file);
|
||||||
@@ -322,52 +255,27 @@ function hygiene(some) {
|
|||||||
.pipe(filter(f => !f.stat.isDirectory()))
|
.pipe(filter(f => !f.stat.isDirectory()))
|
||||||
.pipe(filter(indentationFilter))
|
.pipe(filter(indentationFilter))
|
||||||
.pipe(indentation)
|
.pipe(indentation)
|
||||||
.pipe(filter(copyrightFilter))
|
.pipe(filter(copyrightFilter));
|
||||||
.pipe(copyrights);
|
// {{SQL CARBON EDIT}}
|
||||||
|
// .pipe(copyrights);
|
||||||
|
|
||||||
const typescript = result
|
const typescript = result
|
||||||
.pipe(filter(tslintFilter))
|
.pipe(filter(tslintFilter))
|
||||||
.pipe(formatting)
|
.pipe(formatting)
|
||||||
.pipe(tsl)
|
.pipe(tsl);
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
.pipe(filter(useStrictFilter))
|
|
||||||
.pipe(useStrict);
|
|
||||||
|
|
||||||
const javascript = result
|
const javascript = result
|
||||||
.pipe(filter(eslintFilter))
|
.pipe(filter(eslintFilter))
|
||||||
.pipe(gulpeslint('src/.eslintrc'))
|
.pipe(gulpeslint('src/.eslintrc'))
|
||||||
.pipe(gulpeslint.formatEach('compact'))
|
.pipe(gulpeslint.formatEach('compact'));
|
||||||
.pipe(gulpeslint.failAfterError());
|
// {{SQL CARBON EDIT}}
|
||||||
|
// .pipe(gulpeslint.failAfterError());
|
||||||
|
|
||||||
let count = 0;
|
let count = 0;
|
||||||
return es.merge(typescript, javascript)
|
return es.merge(typescript, javascript)
|
||||||
.pipe(es.through(function (data) {
|
.pipe(es.through(function (data) {
|
||||||
count++;
|
// {{SQL CARBON EDIT}}
|
||||||
if (process.env['TRAVIS'] && count % 10 === 0) {
|
this.emit('end');
|
||||||
process.stdout.write('.');
|
|
||||||
}
|
|
||||||
this.emit('data', data);
|
|
||||||
}, function () {
|
|
||||||
process.stdout.write('\n');
|
|
||||||
|
|
||||||
const tslintResult = tsLinter.getResult();
|
|
||||||
if (tslintResult.failures.length > 0) {
|
|
||||||
for (const failure of tslintResult.failures) {
|
|
||||||
const name = failure.getFileName();
|
|
||||||
const position = failure.getStartPosition();
|
|
||||||
const line = position.getLineAndCharacter().line;
|
|
||||||
const character = position.getLineAndCharacter().character;
|
|
||||||
|
|
||||||
console.error(`${name}:${line + 1}:${character + 1}:${failure.getFailure()}`);
|
|
||||||
}
|
|
||||||
errorCount += tslintResult.failures.length;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (errorCount > 0) {
|
|
||||||
this.emit('error', 'Hygiene failed with ' + errorCount + ' errors. Check \'build/gulpfile.hygiene.js\'.');
|
|
||||||
} else {
|
|
||||||
this.emit('end');
|
|
||||||
}
|
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -385,7 +293,7 @@ function createGitIndexVinyls(paths) {
|
|||||||
return e(err);
|
return e(err);
|
||||||
}
|
}
|
||||||
|
|
||||||
cp.exec(`git show ":${relativePath}"`, { maxBuffer: 2000 * 1024, encoding: 'buffer' }, (err, out) => {
|
cp.exec(`git show :${relativePath}`, { maxBuffer: 2000 * 1024, encoding: 'buffer' }, (err, out) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
return e(err);
|
return e(err);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,13 +6,22 @@
|
|||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const gulp = require('gulp');
|
const gulp = require('gulp');
|
||||||
|
const json = require('gulp-json-editor');
|
||||||
|
const buffer = require('gulp-buffer');
|
||||||
|
const filter = require('gulp-filter');
|
||||||
|
const es = require('event-stream');
|
||||||
|
const util = require('./lib/util');
|
||||||
|
const remote = require('gulp-remote-src');
|
||||||
|
const zip = require('gulp-vinyl-zip');
|
||||||
|
const assign = require('object-assign');
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
const jeditor = require('gulp-json-editor');
|
const jeditor = require('gulp-json-editor');
|
||||||
const product = require('../product.json');
|
|
||||||
|
const pkg = require('../package.json');
|
||||||
|
|
||||||
gulp.task('mixin', function () {
|
gulp.task('mixin', function () {
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
const updateUrl = process.env['SQLOPS_UPDATEURL'];
|
const updateUrl = process.env['SQLOPS_UPDATEURL'];
|
||||||
if (!updateUrl) {
|
if (!updateUrl) {
|
||||||
console.log('Missing SQLOPS_UPDATEURL, skipping mixin');
|
console.log('Missing SQLOPS_UPDATEURL, skipping mixin');
|
||||||
@@ -26,53 +35,19 @@ gulp.task('mixin', function () {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}} - apply ADS insiders values if needed
|
// {{SQL CARBON EDIT}}
|
||||||
|
let serviceUrl = 'https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery.json';
|
||||||
|
if (quality === 'insider') {
|
||||||
|
serviceUrl = `https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery-${quality}.json`;
|
||||||
|
}
|
||||||
let newValues = {
|
let newValues = {
|
||||||
"nameShort": product.nameShort,
|
|
||||||
"nameLong": product.nameLong,
|
|
||||||
"applicationName": product.applicationName,
|
|
||||||
"dataFolderName": product.dataFolderName,
|
|
||||||
"win32MutexName": product.win32MutexName,
|
|
||||||
"win32DirName": product.win32DirName,
|
|
||||||
"win32NameVersion": product.win32NameVersion,
|
|
||||||
"win32RegValueName": product.win32RegValueName,
|
|
||||||
"win32AppId": product.win32AppId,
|
|
||||||
"win32x64AppId": product.win32x64AppId,
|
|
||||||
"win32UserAppId": product.win32UserAppId,
|
|
||||||
"win32x64UserAppId": product.win32x64UserAppId,
|
|
||||||
"win32AppUserModelId": product.win32AppUserModelId,
|
|
||||||
"win32ShellNameShort": product.win32ShellNameShort,
|
|
||||||
"darwinBundleIdentifier": product.darwinBundleIdentifier,
|
|
||||||
"updateUrl": updateUrl,
|
"updateUrl": updateUrl,
|
||||||
"quality": quality,
|
"quality": quality,
|
||||||
"extensionsGallery": {
|
"extensionsGallery": {
|
||||||
"serviceUrl": 'https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery.json'
|
"serviceUrl": serviceUrl
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if (quality === 'insider') {
|
|
||||||
let dashSuffix = '-insiders';
|
|
||||||
let dotSuffix = '.insiders';
|
|
||||||
let displaySuffix = ' - Insiders';
|
|
||||||
|
|
||||||
newValues.extensionsGallery.serviceUrl = `https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery-${quality}.json`;
|
|
||||||
newValues.nameShort += dashSuffix;
|
|
||||||
newValues.nameLong += displaySuffix;
|
|
||||||
newValues.applicationName += dashSuffix;
|
|
||||||
newValues.dataFolderName += dashSuffix;
|
|
||||||
newValues.win32MutexName += dashSuffix;
|
|
||||||
newValues.win32DirName += displaySuffix;
|
|
||||||
newValues.win32NameVersion += displaySuffix;
|
|
||||||
newValues.win32RegValueName += dashSuffix;
|
|
||||||
newValues.win32AppId = "{{9F0801B2-DEE3-4272-A2C6-FBDF25BAAF0F}";
|
|
||||||
newValues.win32x64AppId = "{{6748A5FD-29EB-4BA6-B3C6-E7B981B8D6B0}";
|
|
||||||
newValues.win32UserAppId = "{{0F8CD1ED-483C-40EB-8AD2-8ED784651AA1}";
|
|
||||||
newValues.win32x64UserAppId += dashSuffix;
|
|
||||||
newValues.win32AppUserModelId += dotSuffix;
|
|
||||||
newValues.win32ShellNameShort += displaySuffix;
|
|
||||||
newValues.darwinBundleIdentifier += dotSuffix;
|
|
||||||
}
|
|
||||||
|
|
||||||
return gulp.src('./product.json')
|
return gulp.src('./product.json')
|
||||||
.pipe(jeditor(newValues))
|
.pipe(jeditor(newValues))
|
||||||
.pipe(gulp.dest('.'));
|
.pipe(gulp.dest('.'));
|
||||||
|
|||||||
@@ -1,16 +0,0 @@
|
|||||||
/*---------------------------------------------------------------------------------------------
|
|
||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
|
||||||
*--------------------------------------------------------------------------------------------*/
|
|
||||||
|
|
||||||
'use strict';
|
|
||||||
|
|
||||||
const gulp = require('gulp');
|
|
||||||
|
|
||||||
const noop = () => { return Promise.resolve(); };
|
|
||||||
|
|
||||||
gulp.task('vscode-reh-win32-ia32-min', noop);
|
|
||||||
gulp.task('vscode-reh-win32-x64-min', noop);
|
|
||||||
gulp.task('vscode-reh-darwin-min', noop);
|
|
||||||
gulp.task('vscode-reh-linux-x64-min', noop);
|
|
||||||
gulp.task('vscode-reh-linux-arm-min', noop);
|
|
||||||
@@ -28,6 +28,7 @@ const formatFiles = (some) => {
|
|||||||
console.info('ran formatting on file ' + file.path + ' result: ' + result.message);
|
console.info('ran formatting on file ' + file.path + ' result: ' + result.message);
|
||||||
if (result.error) {
|
if (result.error) {
|
||||||
console.error(result.message);
|
console.error(result.message);
|
||||||
|
errorCount++;
|
||||||
}
|
}
|
||||||
cb(null, file);
|
cb(null, file);
|
||||||
|
|
||||||
@@ -39,7 +40,7 @@ const formatFiles = (some) => {
|
|||||||
.pipe(filter(f => !f.stat.isDirectory()))
|
.pipe(filter(f => !f.stat.isDirectory()))
|
||||||
.pipe(formatting);
|
.pipe(formatting);
|
||||||
|
|
||||||
};
|
}
|
||||||
|
|
||||||
const formatStagedFiles = () => {
|
const formatStagedFiles = () => {
|
||||||
const cp = require('child_process');
|
const cp = require('child_process');
|
||||||
@@ -80,4 +81,4 @@ const formatStagedFiles = () => {
|
|||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
};
|
}
|
||||||
15
build/gulpfile.test.js
Normal file
15
build/gulpfile.test.js
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
const gulp = require('gulp');
|
||||||
|
const mocha = require('gulp-mocha');
|
||||||
|
|
||||||
|
gulp.task('test', function () {
|
||||||
|
return gulp.src('test/all.js')
|
||||||
|
.pipe(mocha({ ui: 'tdd', delay: true }))
|
||||||
|
.once('end', function () { process.exit(); });
|
||||||
|
});
|
||||||
@@ -20,7 +20,6 @@ const filter = require('gulp-filter');
|
|||||||
const json = require('gulp-json-editor');
|
const json = require('gulp-json-editor');
|
||||||
const _ = require('underscore');
|
const _ = require('underscore');
|
||||||
const util = require('./lib/util');
|
const util = require('./lib/util');
|
||||||
const task = require('./lib/task');
|
|
||||||
const ext = require('./lib/extensions');
|
const ext = require('./lib/extensions');
|
||||||
const buildfile = require('../src/buildfile');
|
const buildfile = require('../src/buildfile');
|
||||||
const common = require('./lib/optimize');
|
const common = require('./lib/optimize');
|
||||||
@@ -33,17 +32,17 @@ const i18n = require('./lib/i18n');
|
|||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
|
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
|
||||||
const platformInfo = require('service-downloader/out/platform').PlatformInformation;
|
const platformInfo = require('service-downloader/out/platform').PlatformInformation;
|
||||||
// {{SQL CARBON EDIT}} - End
|
const glob = require('glob');
|
||||||
const deps = require('./dependencies');
|
const deps = require('./dependencies');
|
||||||
const getElectronVersion = require('./lib/electron').getElectronVersion;
|
const getElectronVersion = require('./lib/electron').getElectronVersion;
|
||||||
const createAsar = require('./lib/asar').createAsar;
|
const createAsar = require('./lib/asar').createAsar;
|
||||||
const { compileBuildTask } = require('./gulpfile.compile');
|
|
||||||
|
|
||||||
const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname));
|
const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname));
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
var del = require('del');
|
var del = require('del');
|
||||||
|
const extensionsRoot = path.join(root, 'extensions');
|
||||||
|
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
|
||||||
const baseModules = Object.keys(process.binding('natives')).filter(n => !/^_|\//.test(n));
|
const baseModules = Object.keys(process.binding('natives')).filter(n => !/^_|\//.test(n));
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
const nodeModules = [
|
const nodeModules = [
|
||||||
@@ -52,12 +51,33 @@ const nodeModules = [
|
|||||||
'rxjs/Observable',
|
'rxjs/Observable',
|
||||||
'rxjs/Subject',
|
'rxjs/Subject',
|
||||||
'rxjs/Observer',
|
'rxjs/Observer',
|
||||||
'ng2-charts']
|
'ng2-charts/ng2-charts']
|
||||||
.concat(Object.keys(product.dependencies || {}))
|
.concat(Object.keys(product.dependencies || {}))
|
||||||
.concat(_.uniq(productionDependencies.map(d => d.name)))
|
.concat(_.uniq(productionDependencies.map(d => d.name)))
|
||||||
.concat(baseModules);
|
.concat(baseModules);
|
||||||
|
|
||||||
|
|
||||||
// Build
|
// Build
|
||||||
|
const builtInExtensions = require('./builtInExtensions.json');
|
||||||
|
|
||||||
|
const excludedExtensions = [
|
||||||
|
'vscode-api-tests',
|
||||||
|
'vscode-colorize-tests',
|
||||||
|
'ms-vscode.node-debug',
|
||||||
|
'ms-vscode.node-debug2',
|
||||||
|
];
|
||||||
|
|
||||||
|
// {{SQL CARBON EDIT}}
|
||||||
|
const vsce = require('vsce');
|
||||||
|
const sqlBuiltInExtensions = [
|
||||||
|
// Add SQL built-in extensions here.
|
||||||
|
// the extension will be excluded from SQLOps package and will have separate vsix packages
|
||||||
|
'agent',
|
||||||
|
'import',
|
||||||
|
'profiler'
|
||||||
|
];
|
||||||
|
var azureExtensions = [ 'azurecore'];
|
||||||
|
|
||||||
const vscodeEntryPoints = _.flatten([
|
const vscodeEntryPoints = _.flatten([
|
||||||
buildfile.entrypoint('vs/workbench/workbench.main'),
|
buildfile.entrypoint('vs/workbench/workbench.main'),
|
||||||
buildfile.base,
|
buildfile.base,
|
||||||
@@ -70,27 +90,22 @@ const vscodeResources = [
|
|||||||
'out-build/cli.js',
|
'out-build/cli.js',
|
||||||
'out-build/driver.js',
|
'out-build/driver.js',
|
||||||
'out-build/bootstrap.js',
|
'out-build/bootstrap.js',
|
||||||
'out-build/bootstrap-fork.js',
|
|
||||||
'out-build/bootstrap-amd.js',
|
'out-build/bootstrap-amd.js',
|
||||||
'out-build/bootstrap-window.js',
|
|
||||||
'out-build/paths.js',
|
'out-build/paths.js',
|
||||||
'out-build/vs/**/*.{svg,png,cur,html}',
|
'out-build/vs/**/*.{svg,png,cur,html}',
|
||||||
'!out-build/vs/code/browser/**/*.html',
|
|
||||||
'out-build/vs/base/common/performance.js',
|
'out-build/vs/base/common/performance.js',
|
||||||
'out-build/vs/base/node/languagePacks.js',
|
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh}',
|
||||||
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh,ps.sh}',
|
|
||||||
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
|
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
|
||||||
'out-build/vs/workbench/browser/media/*-theme.css',
|
'out-build/vs/workbench/browser/media/*-theme.css',
|
||||||
'out-build/vs/workbench/contrib/debug/**/*.json',
|
'out-build/vs/workbench/electron-browser/bootstrap/**',
|
||||||
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
|
'out-build/vs/workbench/parts/debug/**/*.json',
|
||||||
'out-build/vs/workbench/contrib/webview/browser/pre/*.js',
|
'out-build/vs/workbench/parts/execution/**/*.scpt',
|
||||||
'out-build/vs/workbench/contrib/webview/electron-browser/pre/*.js',
|
'out-build/vs/workbench/parts/webview/electron-browser/webview-pre.js',
|
||||||
'out-build/vs/**/markdown.css',
|
'out-build/vs/**/markdown.css',
|
||||||
'out-build/vs/workbench/contrib/tasks/**/*.json',
|
'out-build/vs/workbench/parts/tasks/**/*.json',
|
||||||
'out-build/vs/workbench/contrib/welcome/walkThrough/**/*.md',
|
'out-build/vs/workbench/parts/welcome/walkThrough/**/*.md',
|
||||||
'out-build/vs/workbench/services/files/**/*.exe',
|
'out-build/vs/workbench/services/files/**/*.exe',
|
||||||
'out-build/vs/workbench/services/files/**/*.md',
|
'out-build/vs/workbench/services/files/**/*.md',
|
||||||
'out-build/vs/code/electron-browser/workbench/**',
|
|
||||||
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
|
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
|
||||||
'out-build/vs/code/electron-browser/issue/issueReporter.js',
|
'out-build/vs/code/electron-browser/issue/issueReporter.js',
|
||||||
'out-build/vs/code/electron-browser/processExplorer/processExplorer.js',
|
'out-build/vs/code/electron-browser/processExplorer/processExplorer.js',
|
||||||
@@ -102,17 +117,18 @@ const vscodeResources = [
|
|||||||
'out-build/sql/parts/admin/**/*.html',
|
'out-build/sql/parts/admin/**/*.html',
|
||||||
'out-build/sql/parts/connection/connectionDialog/media/*.{gif,png,svg}',
|
'out-build/sql/parts/connection/connectionDialog/media/*.{gif,png,svg}',
|
||||||
'out-build/sql/parts/common/dblist/**/*.html',
|
'out-build/sql/parts/common/dblist/**/*.html',
|
||||||
'out-build/sql/workbench/parts/dashboard/**/*.html',
|
'out-build/sql/parts/dashboard/**/*.html',
|
||||||
'out-build/sql/parts/disasterRecovery/**/*.html',
|
'out-build/sql/parts/disasterRecovery/**/*.html',
|
||||||
'out-build/sql/parts/common/modal/media/**',
|
'out-build/sql/parts/common/modal/media/**',
|
||||||
'out-build/sql/workbench/parts/grid/media/**',
|
'out-build/sql/parts/grid/load/lib/**',
|
||||||
'out-build/sql/workbench/parts/grid/views/**/*.html',
|
'out-build/sql/parts/grid/load/loadJquery.js',
|
||||||
|
'out-build/sql/parts/grid/media/**',
|
||||||
|
'out-build/sql/parts/grid/views/**/*.html',
|
||||||
'out-build/sql/parts/tasks/**/*.html',
|
'out-build/sql/parts/tasks/**/*.html',
|
||||||
'out-build/sql/parts/taskHistory/viewlet/media/**',
|
'out-build/sql/parts/taskHistory/viewlet/media/**',
|
||||||
'out-build/sql/parts/jobManagement/common/media/*.svg',
|
'out-build/sql/parts/jobManagement/common/media/*.svg',
|
||||||
'out-build/sql/media/objectTypes/*.svg',
|
'out-build/sql/media/objectTypes/*.svg',
|
||||||
'out-build/sql/media/icons/*.svg',
|
'out-build/sql/media/icons/*.svg',
|
||||||
'out-build/sql/workbench/parts/notebook/media/**/*.svg',
|
|
||||||
'!**/test/**'
|
'!**/test/**'
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -122,84 +138,65 @@ const BUNDLED_FILE_HEADER = [
|
|||||||
' *--------------------------------------------------------*/'
|
' *--------------------------------------------------------*/'
|
||||||
].join('\n');
|
].join('\n');
|
||||||
|
|
||||||
const optimizeVSCodeTask = task.define('optimize-vscode', task.series(
|
gulp.task('clean-optimized-vscode', util.rimraf('out-vscode'));
|
||||||
task.parallel(
|
gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compile-extensions-build'], common.optimizeTask({
|
||||||
util.rimraf('out-vscode'),
|
src: 'out-build',
|
||||||
compileBuildTask
|
entryPoints: vscodeEntryPoints,
|
||||||
),
|
otherSources: [],
|
||||||
common.optimizeTask({
|
resources: vscodeResources,
|
||||||
src: 'out-build',
|
loaderConfig: common.loaderConfig(nodeModules),
|
||||||
entryPoints: vscodeEntryPoints,
|
header: BUNDLED_FILE_HEADER,
|
||||||
resources: vscodeResources,
|
out: 'out-vscode',
|
||||||
loaderConfig: common.loaderConfig(nodeModules),
|
bundleInfo: undefined
|
||||||
header: BUNDLED_FILE_HEADER,
|
}));
|
||||||
out: 'out-vscode',
|
|
||||||
bundleInfo: undefined
|
|
||||||
})
|
|
||||||
));
|
|
||||||
|
|
||||||
|
|
||||||
const optimizeIndexJSTask = task.define('optimize-index-js', task.series(
|
gulp.task('optimize-index-js', ['optimize-vscode'], () => {
|
||||||
optimizeVSCodeTask,
|
const fullpath = path.join(process.cwd(), 'out-vscode/vs/workbench/electron-browser/bootstrap/index.js');
|
||||||
() => {
|
const contents = fs.readFileSync(fullpath).toString();
|
||||||
const fullpath = path.join(process.cwd(), 'out-vscode/bootstrap-window.js');
|
const newContents = contents.replace('[/*BUILD->INSERT_NODE_MODULES*/]', JSON.stringify(nodeModules));
|
||||||
const contents = fs.readFileSync(fullpath).toString();
|
fs.writeFileSync(fullpath, newContents);
|
||||||
const newContents = contents.replace('[/*BUILD->INSERT_NODE_MODULES*/]', JSON.stringify(nodeModules));
|
});
|
||||||
fs.writeFileSync(fullpath, newContents);
|
|
||||||
}
|
|
||||||
));
|
|
||||||
|
|
||||||
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`;
|
const baseUrl = `https://ticino.blob.core.windows.net/sourcemaps/${commit}/core`;
|
||||||
const minifyVSCodeTask = task.define('minify-vscode', task.series(
|
gulp.task('clean-minified-vscode', util.rimraf('out-vscode-min'));
|
||||||
task.parallel(
|
gulp.task('minify-vscode', ['clean-minified-vscode', 'optimize-index-js'], common.minifyTask('out-vscode', baseUrl));
|
||||||
util.rimraf('out-vscode-min'),
|
|
||||||
optimizeIndexJSTask
|
|
||||||
),
|
|
||||||
common.minifyTask('out-vscode', `${sourceMappingURLBase}/core`)
|
|
||||||
));
|
|
||||||
|
|
||||||
// Package
|
// Package
|
||||||
|
|
||||||
// @ts-ignore JSON checking: darwinCredits is optional
|
// @ts-ignore JSON checking: darwinCredits is optional
|
||||||
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
|
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
|
||||||
|
|
||||||
function darwinBundleDocumentType(extensions, icon) {
|
|
||||||
return {
|
|
||||||
name: product.nameLong + ' document',
|
|
||||||
role: 'Editor',
|
|
||||||
ostypes: ["TEXT", "utxt", "TUTX", "****"],
|
|
||||||
extensions: extensions,
|
|
||||||
iconFile: icon
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const config = {
|
const config = {
|
||||||
version: getElectronVersion(),
|
version: getElectronVersion(),
|
||||||
productAppName: product.nameLong,
|
productAppName: product.nameLong,
|
||||||
companyName: 'Microsoft Corporation',
|
companyName: 'Microsoft Corporation',
|
||||||
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
|
copyright: 'Copyright (C) 2018 Microsoft. All rights reserved',
|
||||||
darwinIcon: 'resources/darwin/code.icns',
|
darwinIcon: 'resources/darwin/code.icns',
|
||||||
darwinBundleIdentifier: product.darwinBundleIdentifier,
|
darwinBundleIdentifier: product.darwinBundleIdentifier,
|
||||||
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
||||||
darwinHelpBookFolder: 'VS Code HelpBook',
|
darwinHelpBookFolder: 'VS Code HelpBook',
|
||||||
darwinHelpBookName: 'VS Code HelpBook',
|
darwinHelpBookName: 'VS Code HelpBook',
|
||||||
darwinBundleDocumentTypes: [
|
darwinBundleDocumentTypes: [{
|
||||||
// {{SQL CARBON EDIT}} - Remove most document types and replace with ours
|
name: product.nameLong + ' document',
|
||||||
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
|
role: 'Editor',
|
||||||
],
|
ostypes: ["TEXT", "utxt", "TUTX", "****"],
|
||||||
|
// {{SQL CARBON EDIT}}
|
||||||
|
extensions: ["csv", "json", "sqlplan", "sql", "xml"],
|
||||||
|
iconFile: 'resources/darwin/code_file.icns'
|
||||||
|
}],
|
||||||
darwinBundleURLTypes: [{
|
darwinBundleURLTypes: [{
|
||||||
role: 'Viewer',
|
role: 'Viewer',
|
||||||
name: product.nameLong,
|
name: product.nameLong,
|
||||||
urlSchemes: [product.urlProtocol]
|
urlSchemes: [product.urlProtocol]
|
||||||
}],
|
}],
|
||||||
darwinForceDarkModeSupport: true,
|
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : void 0,
|
||||||
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
|
|
||||||
linuxExecutableName: product.applicationName,
|
linuxExecutableName: product.applicationName,
|
||||||
winIcon: 'resources/win32/code.ico',
|
winIcon: 'resources/win32/code.ico',
|
||||||
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
|
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || void 0,
|
||||||
|
|
||||||
// @ts-ignore JSON checking: electronRepository is optional
|
// @ts-ignore JSON checking: electronRepository is optional
|
||||||
repo: product.electronRepository || undefined
|
repo: product.electronRepository || void 0
|
||||||
};
|
};
|
||||||
|
|
||||||
function getElectron(arch) {
|
function getElectron(arch) {
|
||||||
@@ -212,18 +209,18 @@ function getElectron(arch) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
return gulp.src('package.json')
|
return gulp.src('package.json')
|
||||||
.pipe(json({ name: product.nameShort }))
|
.pipe(json({ name: product.nameShort }))
|
||||||
.pipe(electron(electronOpts))
|
.pipe(electron(electronOpts))
|
||||||
.pipe(filter(['**', '!**/app/package.json']))
|
.pipe(filter(['**', '!**/app/package.json']))
|
||||||
.pipe(vfs.dest('.build/electron'));
|
.pipe(vfs.dest('.build/electron'));
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
gulp.task(task.define('electron', task.series(util.rimraf('.build/electron'), getElectron(process.arch))));
|
gulp.task('clean-electron', util.rimraf('.build/electron'));
|
||||||
gulp.task(task.define('electron-ia32', task.series(util.rimraf('.build/electron'), getElectron('ia32'))));
|
gulp.task('electron', ['clean-electron'], getElectron(process.arch));
|
||||||
gulp.task(task.define('electron-x64', task.series(util.rimraf('.build/electron'), getElectron('x64'))));
|
gulp.task('electron-ia32', ['clean-electron'], getElectron('ia32'));
|
||||||
gulp.task(task.define('electron-arm', task.series(util.rimraf('.build/electron'), getElectron('armv7l'))));
|
gulp.task('electron-x64', ['clean-electron'], getElectron('x64'));
|
||||||
gulp.task(task.define('electron-arm64', task.series(util.rimraf('.build/electron'), getElectron('arm64'))));
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Compute checksums for some files.
|
* Compute checksums for some files.
|
||||||
@@ -259,35 +256,115 @@ function computeChecksum(filename) {
|
|||||||
return hash;
|
return hash;
|
||||||
}
|
}
|
||||||
|
|
||||||
function packageTask(platform, arch, sourceFolderName, destinationFolderName, opts) {
|
function packageBuiltInExtensions() {
|
||||||
opts = opts || {};
|
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||||
|
.map(manifestPath => {
|
||||||
|
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||||
|
const extensionName = path.basename(extensionPath);
|
||||||
|
return { name: extensionName, path: extensionPath };
|
||||||
|
})
|
||||||
|
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||||
|
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||||
|
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
|
||||||
|
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
|
||||||
|
const packagePath = path.join(path.dirname(root), element.name + '.vsix');
|
||||||
|
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
||||||
|
vsce.createVSIX({
|
||||||
|
cwd: element.path,
|
||||||
|
packagePath: packagePath,
|
||||||
|
useYarn: true
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// {{SQL CARBON EDIT}}
|
||||||
|
function packageAzureCoreTask(platform, arch) {
|
||||||
|
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
||||||
|
if (platform === 'darwin') {
|
||||||
|
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', 'azurecore');
|
||||||
|
} else {
|
||||||
|
destination = path.join(destination, 'resources', 'app', 'extensions', 'azurecore');
|
||||||
|
}
|
||||||
|
|
||||||
const destination = path.join(path.dirname(root), destinationFolderName);
|
|
||||||
platform = platform || process.platform;
|
platform = platform || process.platform;
|
||||||
|
|
||||||
return () => {
|
return () => {
|
||||||
const out = sourceFolderName;
|
const root = path.resolve(path.join(__dirname, '..'));
|
||||||
|
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||||
|
.map(manifestPath => {
|
||||||
|
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||||
|
const extensionName = path.basename(extensionPath);
|
||||||
|
return { name: extensionName, path: extensionPath };
|
||||||
|
})
|
||||||
|
.filter(({ name }) => azureExtensions.indexOf(name) > -1);
|
||||||
|
|
||||||
|
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
||||||
|
return ext.fromLocal(extension.path);
|
||||||
|
}));
|
||||||
|
|
||||||
|
let result = localExtensions
|
||||||
|
.pipe(util.skipDirectories())
|
||||||
|
.pipe(util.fixWin32DirectoryPermissions())
|
||||||
|
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
|
||||||
|
|
||||||
|
return result.pipe(vfs.dest(destination));
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function packageTask(platform, arch, opts) {
|
||||||
|
opts = opts || {};
|
||||||
|
|
||||||
|
// {{SQL CARBON EDIT}}
|
||||||
|
const destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
||||||
|
platform = platform || process.platform;
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
const out = opts.minified ? 'out-vscode-min' : 'out-vscode';
|
||||||
|
|
||||||
const checksums = computeChecksums(out, [
|
const checksums = computeChecksums(out, [
|
||||||
'vs/workbench/workbench.main.js',
|
'vs/workbench/workbench.main.js',
|
||||||
'vs/workbench/workbench.main.css',
|
'vs/workbench/workbench.main.css',
|
||||||
'vs/code/electron-browser/workbench/workbench.html',
|
'vs/workbench/electron-browser/bootstrap/index.html',
|
||||||
'vs/code/electron-browser/workbench/workbench.js'
|
'vs/workbench/electron-browser/bootstrap/index.js',
|
||||||
|
'vs/workbench/electron-browser/bootstrap/preload.js'
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const src = gulp.src(out + '/**', { base: '.' })
|
const src = gulp.src(out + '/**', { base: '.' })
|
||||||
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }))
|
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }));
|
||||||
.pipe(util.setExecutableBit(['**/*.sh']))
|
|
||||||
.pipe(filter(['**', '!**/*.js.map']));
|
|
||||||
|
|
||||||
const root = path.resolve(path.join(__dirname, '..'));
|
const root = path.resolve(path.join(__dirname, '..'));
|
||||||
|
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||||
|
.map(manifestPath => {
|
||||||
|
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||||
|
const extensionName = path.basename(extensionPath);
|
||||||
|
return { name: extensionName, path: extensionPath };
|
||||||
|
})
|
||||||
|
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||||
|
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||||
|
// {{SQL CARBON EDIT}}
|
||||||
|
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1)
|
||||||
|
.filter(({ name }) => azureExtensions.indexOf(name) === -1);
|
||||||
|
|
||||||
|
packageBuiltInExtensions();
|
||||||
|
|
||||||
|
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
||||||
|
return ext.fromLocal(extension.path)
|
||||||
|
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||||
|
}));
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
ext.packageBuiltInExtensions();
|
const extensionDepsSrc = [
|
||||||
|
..._.flatten(extensionsProductionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
|
||||||
|
];
|
||||||
|
|
||||||
const sources = es.merge(src, ext.packageExtensionsStream({
|
const localExtensionDependencies = gulp.src(extensionDepsSrc, { base: '.', dot: true })
|
||||||
sourceMappingURLBase: sourceMappingURLBase
|
.pipe(filter(['**', '!**/package-lock.json']))
|
||||||
}));
|
.pipe(util.cleanNodeModule('account-provider-azure', ['node_modules/date-utils/doc/**', 'node_modules/adal_node/node_modules/**'], undefined))
|
||||||
|
.pipe(util.cleanNodeModule('typescript', ['**/**'], undefined));
|
||||||
|
|
||||||
|
const sources = es.merge(src, localExtensions, localExtensionDependencies)
|
||||||
|
.pipe(util.setExecutableBit(['**/*.sh']))
|
||||||
|
.pipe(filter(['**', '!**/*.js.map']));
|
||||||
|
|
||||||
let version = packageJson.version;
|
let version = packageJson.version;
|
||||||
// @ts-ignore JSON checking: quality is optional
|
// @ts-ignore JSON checking: quality is optional
|
||||||
@@ -299,15 +376,8 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
|||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
const name = (platform === 'darwin') ? 'Azure Data Studio' : product.nameShort;
|
const name = (platform === 'darwin') ? 'Azure Data Studio' : product.nameShort;
|
||||||
const packageJsonUpdates = { name, version };
|
|
||||||
|
|
||||||
// for linux url handling
|
|
||||||
if (platform === 'linux') {
|
|
||||||
packageJsonUpdates.desktopName = `${product.applicationName}-url-handler.desktop`;
|
|
||||||
}
|
|
||||||
|
|
||||||
const packageJsonStream = gulp.src(['package.json'], { base: '.' })
|
const packageJsonStream = gulp.src(['package.json'], { base: '.' })
|
||||||
.pipe(json(packageJsonUpdates));
|
.pipe(json({ name, version }));
|
||||||
|
|
||||||
const date = new Date().toISOString();
|
const date = new Date().toISOString();
|
||||||
const productJsonUpdate = { commit, date, checksums };
|
const productJsonUpdate = { commit, date, checksums };
|
||||||
@@ -319,13 +389,14 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
|||||||
const productJsonStream = gulp.src(['product.json'], { base: '.' })
|
const productJsonStream = gulp.src(['product.json'], { base: '.' })
|
||||||
.pipe(json(productJsonUpdate));
|
.pipe(json(productJsonUpdate));
|
||||||
|
|
||||||
const license = gulp.src(['LICENSES.chromium.html', product.licenseFileName, 'ThirdPartyNotices.txt', 'licenses/**'], { base: '.', allowEmpty: true });
|
const license = gulp.src(['LICENSES.chromium.html', 'LICENSE.txt', 'ThirdPartyNotices.txt', 'licenses/**'], { base: '.' });
|
||||||
|
|
||||||
|
const watermark = gulp.src(['resources/letterpress.svg', 'resources/letterpress-dark.svg', 'resources/letterpress-hc.svg'], { base: '.' });
|
||||||
|
|
||||||
// TODO the API should be copied to `out` during compile, not here
|
// TODO the API should be copied to `out` during compile, not here
|
||||||
const api = gulp.src('src/vs/vscode.d.ts').pipe(rename('out/vs/vscode.d.ts'));
|
const api = gulp.src('src/vs/vscode.d.ts').pipe(rename('out/vs/vscode.d.ts'));
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
const dataApi = gulp.src('src/sql/azdata.d.ts').pipe(rename('out/sql/azdata.d.ts'));
|
const dataApi = gulp.src('src/vs/data.d.ts').pipe(rename('out/sql/data.d.ts'));
|
||||||
const sqlopsAPI = gulp.src('src/sql/sqlops.d.ts').pipe(rename('out/sql/sqlops.d.ts'));
|
|
||||||
|
|
||||||
const depsSrc = [
|
const depsSrc = [
|
||||||
..._.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
|
..._.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
|
||||||
@@ -335,7 +406,29 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
|||||||
|
|
||||||
const deps = gulp.src(depsSrc, { base: '.', dot: true })
|
const deps = gulp.src(depsSrc, { base: '.', dot: true })
|
||||||
.pipe(filter(['**', '!**/package-lock.json']))
|
.pipe(filter(['**', '!**/package-lock.json']))
|
||||||
.pipe(util.cleanNodeModules(path.join(__dirname, '.nativeignore')))
|
.pipe(util.cleanNodeModule('fsevents', ['binding.gyp', 'fsevents.cc', 'build/**', 'src/**', 'test/**'], ['**/*.node']))
|
||||||
|
.pipe(util.cleanNodeModule('oniguruma', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/*.js']))
|
||||||
|
.pipe(util.cleanNodeModule('windows-mutex', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
|
||||||
|
.pipe(util.cleanNodeModule('native-keymap', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
|
||||||
|
.pipe(util.cleanNodeModule('native-is-elevated', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
|
||||||
|
.pipe(util.cleanNodeModule('native-watchdog', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
|
||||||
|
.pipe(util.cleanNodeModule('spdlog', ['binding.gyp', 'build/**', 'deps/**', 'src/**', 'test/**'], ['**/*.node']))
|
||||||
|
.pipe(util.cleanNodeModule('jschardet', ['dist/**']))
|
||||||
|
.pipe(util.cleanNodeModule('windows-foreground-love', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
|
||||||
|
.pipe(util.cleanNodeModule('windows-process-tree', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
|
||||||
|
.pipe(util.cleanNodeModule('gc-signals', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/index.js']))
|
||||||
|
.pipe(util.cleanNodeModule('keytar', ['binding.gyp', 'build/**', 'src/**', 'script/**', 'node_modules/**'], ['**/*.node']))
|
||||||
|
.pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/*.exe', 'build/Release/*.dll', 'build/Release/*.node']))
|
||||||
|
// {{SQL CARBON EDIT}}
|
||||||
|
.pipe(util.cleanNodeModule('chart.js', ['node_modules/**'], undefined))
|
||||||
|
.pipe(util.cleanNodeModule('emmet', ['node_modules/**'], undefined))
|
||||||
|
.pipe(util.cleanNodeModule('pty.js', ['build/**'], ['build/Release/**']))
|
||||||
|
.pipe(util.cleanNodeModule('jquery-ui', ['external/**', 'demos/**'], undefined))
|
||||||
|
.pipe(util.cleanNodeModule('core-js', ['**/**'], undefined))
|
||||||
|
.pipe(util.cleanNodeModule('slickgrid', ['node_modules/**', 'examples/**'], undefined))
|
||||||
|
.pipe(util.cleanNodeModule('nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
|
||||||
|
.pipe(util.cleanNodeModule('vscode-nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
|
||||||
|
.pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node']))
|
||||||
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*'], 'app/node_modules.asar'));
|
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*'], 'app/node_modules.asar'));
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
@@ -345,33 +438,24 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
|||||||
'node_modules/slickgrid/**/*.*',
|
'node_modules/slickgrid/**/*.*',
|
||||||
'node_modules/underscore/**/*.*',
|
'node_modules/underscore/**/*.*',
|
||||||
'node_modules/zone.js/**/*.*',
|
'node_modules/zone.js/**/*.*',
|
||||||
'node_modules/chart.js/**/*.*',
|
'node_modules/chart.js/**/*.*'
|
||||||
'node_modules/chartjs-color/**/*.*',
|
|
||||||
'node_modules/chartjs-color-string/**/*.*',
|
|
||||||
'node_modules/color-convert/**/*.*',
|
|
||||||
'node_modules/color-name/**/*.*',
|
|
||||||
'node_modules/moment/**/*.*'
|
|
||||||
], { base: '.', dot: true });
|
], { base: '.', dot: true });
|
||||||
|
|
||||||
let all = es.merge(
|
let all = es.merge(
|
||||||
packageJsonStream,
|
packageJsonStream,
|
||||||
productJsonStream,
|
productJsonStream,
|
||||||
license,
|
license,
|
||||||
|
watermark,
|
||||||
api,
|
api,
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
copiedModules,
|
copiedModules,
|
||||||
dataApi,
|
dataApi,
|
||||||
sqlopsAPI,
|
|
||||||
sources,
|
sources,
|
||||||
deps
|
deps
|
||||||
);
|
);
|
||||||
|
|
||||||
if (platform === 'win32') {
|
if (platform === 'win32') {
|
||||||
all = es.merge(all, gulp.src([
|
all = es.merge(all, gulp.src(['resources/win32/code_file.ico', 'resources/win32/code_70x70.png', 'resources/win32/code_150x150.png'], { base: '.' }));
|
||||||
// {{SQL CARBON EDIT}} remove unused icons
|
|
||||||
'resources/win32/code_70x70.png',
|
|
||||||
'resources/win32/code_150x150.png'
|
|
||||||
], { base: '.' }));
|
|
||||||
} else if (platform === 'linux') {
|
} else if (platform === 'linux') {
|
||||||
all = es.merge(all, gulp.src('resources/linux/code.png', { base: '.' }));
|
all = es.merge(all, gulp.src('resources/linux/code.png', { base: '.' }));
|
||||||
} else if (platform === 'darwin') {
|
} else if (platform === 'darwin') {
|
||||||
@@ -387,10 +471,8 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
|||||||
.pipe(electron(_.extend({}, config, { platform, arch, ffmpegChromium: true })))
|
.pipe(electron(_.extend({}, config, { platform, arch, ffmpegChromium: true })))
|
||||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
|
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
|
||||||
|
|
||||||
// result = es.merge(result, gulp.src('resources/completions/**', { base: '.' }));
|
|
||||||
|
|
||||||
if (platform === 'win32') {
|
if (platform === 'win32') {
|
||||||
result = es.merge(result, gulp.src('resources/win32/bin/code.js', { base: 'resources/win32', allowEmpty: true }));
|
result = es.merge(result, gulp.src('resources/win32/bin/code.js', { base: 'resources/win32' }));
|
||||||
|
|
||||||
result = es.merge(result, gulp.src('resources/win32/bin/code.cmd', { base: 'resources/win32' })
|
result = es.merge(result, gulp.src('resources/win32/bin/code.cmd', { base: 'resources/win32' })
|
||||||
.pipe(replace('@@NAME@@', product.nameShort))
|
.pipe(replace('@@NAME@@', product.nameShort))
|
||||||
@@ -398,66 +480,47 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
|||||||
|
|
||||||
result = es.merge(result, gulp.src('resources/win32/bin/code.sh', { base: 'resources/win32' })
|
result = es.merge(result, gulp.src('resources/win32/bin/code.sh', { base: 'resources/win32' })
|
||||||
.pipe(replace('@@NAME@@', product.nameShort))
|
.pipe(replace('@@NAME@@', product.nameShort))
|
||||||
.pipe(replace('@@PRODNAME@@', product.nameLong))
|
|
||||||
.pipe(replace('@@VERSION@@', version))
|
|
||||||
.pipe(replace('@@COMMIT@@', commit))
|
|
||||||
.pipe(replace('@@APPNAME@@', product.applicationName))
|
|
||||||
.pipe(replace('@@QUALITY@@', quality))
|
|
||||||
.pipe(rename(function (f) { f.basename = product.applicationName; f.extname = ''; })));
|
.pipe(rename(function (f) { f.basename = product.applicationName; f.extname = ''; })));
|
||||||
|
|
||||||
result = es.merge(result, gulp.src('resources/win32/VisualElementsManifest.xml', { base: 'resources/win32' })
|
result = es.merge(result, gulp.src('resources/win32/VisualElementsManifest.xml', { base: 'resources/win32' })
|
||||||
.pipe(rename(product.nameShort + '.VisualElementsManifest.xml')));
|
.pipe(rename(product.nameShort + '.VisualElementsManifest.xml')));
|
||||||
} else if (platform === 'linux') {
|
} else if (platform === 'linux') {
|
||||||
result = es.merge(result, gulp.src('resources/linux/bin/code.sh', { base: '.' })
|
result = es.merge(result, gulp.src('resources/linux/bin/code.sh', { base: '.' })
|
||||||
.pipe(replace('@@PRODNAME@@', product.nameLong))
|
|
||||||
.pipe(replace('@@NAME@@', product.applicationName))
|
.pipe(replace('@@NAME@@', product.applicationName))
|
||||||
.pipe(rename('bin/' + product.applicationName)));
|
.pipe(rename('bin/' + product.applicationName)));
|
||||||
}
|
}
|
||||||
|
|
||||||
// submit all stats that have been collected
|
|
||||||
// during the build phase
|
|
||||||
if (opts.stats) {
|
|
||||||
result.on('end', () => {
|
|
||||||
const { submitAllStats } = require('./lib/stats');
|
|
||||||
submitAllStats(product, commit).then(() => console.log('Submitted bundle stats!'));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return result.pipe(vfs.dest(destination));
|
return result.pipe(vfs.dest(destination));
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const buildRoot = path.dirname(root);
|
const buildRoot = path.dirname(root);
|
||||||
|
|
||||||
const BUILD_TARGETS = [
|
// {{SQL CARBON EDIT}}
|
||||||
{ platform: 'win32', arch: 'ia32' },
|
gulp.task('vscode-win32-x64-azurecore', ['optimize-vscode'], packageAzureCoreTask('win32', 'x64'));
|
||||||
{ platform: 'win32', arch: 'x64' },
|
gulp.task('vscode-darwin-azurecore', ['optimize-vscode'], packageAzureCoreTask('darwin'));
|
||||||
{ platform: 'darwin', arch: null, opts: { stats: true } },
|
gulp.task('vscode-linux-x64-azurecore', ['optimize-vscode'], packageAzureCoreTask('linux', 'x64'));
|
||||||
{ platform: 'linux', arch: 'ia32' },
|
|
||||||
{ platform: 'linux', arch: 'x64' },
|
|
||||||
{ platform: 'linux', arch: 'arm' },
|
|
||||||
{ platform: 'linux', arch: 'arm64' },
|
|
||||||
];
|
|
||||||
BUILD_TARGETS.forEach(buildTarget => {
|
|
||||||
const dashed = (str) => (str ? `-${str}` : ``);
|
|
||||||
const platform = buildTarget.platform;
|
|
||||||
const arch = buildTarget.arch;
|
|
||||||
const opts = buildTarget.opts;
|
|
||||||
|
|
||||||
['', 'min'].forEach(minified => {
|
gulp.task('clean-vscode-win32-ia32', util.rimraf(path.join(buildRoot, 'azuredatastudio-win32-ia32')));
|
||||||
const sourceFolderName = `out-vscode${dashed(minified)}`;
|
gulp.task('clean-vscode-win32-x64', util.rimraf(path.join(buildRoot, 'azuredatastudio-win32-x64')));
|
||||||
const destinationFolderName = `azuredatastudio${dashed(platform)}${dashed(arch)}`;
|
gulp.task('clean-vscode-darwin', util.rimraf(path.join(buildRoot, 'azuredatastudio-darwin')));
|
||||||
|
gulp.task('clean-vscode-linux-ia32', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-ia32')));
|
||||||
|
gulp.task('clean-vscode-linux-x64', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-x64')));
|
||||||
|
gulp.task('clean-vscode-linux-arm', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-arm')));
|
||||||
|
|
||||||
const vscodeTask = task.define(`vscode${dashed(platform)}${dashed(arch)}${dashed(minified)}`, task.series(
|
gulp.task('vscode-win32-ia32', ['optimize-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32'));
|
||||||
task.parallel(
|
gulp.task('vscode-win32-x64', ['vscode-win32-x64-azurecore', 'optimize-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64'));
|
||||||
minified ? minifyVSCodeTask : optimizeVSCodeTask,
|
gulp.task('vscode-darwin', ['vscode-darwin-azurecore', 'optimize-vscode', 'clean-vscode-darwin'], packageTask('darwin'));
|
||||||
util.rimraf(path.join(buildRoot, destinationFolderName))
|
gulp.task('vscode-linux-ia32', ['optimize-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32'));
|
||||||
),
|
gulp.task('vscode-linux-x64', ['vscode-linux-x64-azurecore', 'optimize-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64'));
|
||||||
packageTask(platform, arch, sourceFolderName, destinationFolderName, opts)
|
gulp.task('vscode-linux-arm', ['optimize-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm'));
|
||||||
));
|
|
||||||
gulp.task(vscodeTask);
|
gulp.task('vscode-win32-ia32-min', ['minify-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32', { minified: true }));
|
||||||
});
|
gulp.task('vscode-win32-x64-min', ['minify-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64', { minified: true }));
|
||||||
});
|
gulp.task('vscode-darwin-min', ['minify-vscode', 'clean-vscode-darwin'], packageTask('darwin', null, { minified: true }));
|
||||||
|
gulp.task('vscode-linux-ia32-min', ['minify-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32', { minified: true }));
|
||||||
|
gulp.task('vscode-linux-x64-min', ['minify-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64', { minified: true }));
|
||||||
|
gulp.task('vscode-linux-arm-min', ['minify-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm', { minified: true }));
|
||||||
|
|
||||||
// Transifex Localizations
|
// Transifex Localizations
|
||||||
|
|
||||||
@@ -480,78 +543,68 @@ const apiHostname = process.env.TRANSIFEX_API_URL;
|
|||||||
const apiName = process.env.TRANSIFEX_API_NAME;
|
const apiName = process.env.TRANSIFEX_API_NAME;
|
||||||
const apiToken = process.env.TRANSIFEX_API_TOKEN;
|
const apiToken = process.env.TRANSIFEX_API_TOKEN;
|
||||||
|
|
||||||
gulp.task(task.define(
|
gulp.task('vscode-translations-push', ['optimize-vscode'], function () {
|
||||||
'vscode-translations-push',
|
const pathToMetadata = './out-vscode/nls.metadata.json';
|
||||||
task.series(
|
const pathToExtensions = './extensions/*';
|
||||||
optimizeVSCodeTask,
|
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
|
||||||
function () {
|
|
||||||
const pathToMetadata = './out-vscode/nls.metadata.json';
|
|
||||||
const pathToExtensions = './extensions/*';
|
|
||||||
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
|
|
||||||
|
|
||||||
return es.merge(
|
return es.merge(
|
||||||
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
|
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
|
||||||
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
|
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
|
||||||
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
|
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
|
||||||
).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
|
).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
|
||||||
).pipe(i18n.pushXlfFiles(apiHostname, apiName, apiToken));
|
).pipe(i18n.pushXlfFiles(apiHostname, apiName, apiToken));
|
||||||
}
|
});
|
||||||
)
|
|
||||||
));
|
|
||||||
|
|
||||||
gulp.task(task.define(
|
gulp.task('vscode-translations-push-test', ['optimize-vscode'], function () {
|
||||||
'vscode-translations-export',
|
const pathToMetadata = './out-vscode/nls.metadata.json';
|
||||||
task.series(
|
const pathToExtensions = './extensions/*';
|
||||||
optimizeVSCodeTask,
|
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
|
||||||
function () {
|
|
||||||
const pathToMetadata = './out-vscode/nls.metadata.json';
|
|
||||||
const pathToExtensions = './extensions/*';
|
|
||||||
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
|
|
||||||
|
|
||||||
return es.merge(
|
return es.merge(
|
||||||
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
|
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
|
||||||
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
|
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
|
||||||
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
|
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
|
||||||
).pipe(vfs.dest('../vscode-translations-export'));
|
// {{SQL CARBON EDIT}}
|
||||||
}
|
// disable since function makes calls to VS Code Transifex API
|
||||||
)
|
// ).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
|
||||||
));
|
).pipe(vfs.dest('../vscode-transifex-input'));
|
||||||
|
});
|
||||||
|
|
||||||
gulp.task('vscode-translations-pull', function () {
|
gulp.task('vscode-translations-pull', function () {
|
||||||
return es.merge([...i18n.defaultLanguages, ...i18n.extraLanguages].map(language => {
|
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
|
||||||
|
i18n.pullCoreAndExtensionsXlfFiles(apiHostname, apiName, apiToken, language).pipe(vfs.dest(`../vscode-localization/${language.id}/build`));
|
||||||
|
|
||||||
let includeDefault = !!innoSetupConfig[language.id].defaultInfo;
|
let includeDefault = !!innoSetupConfig[language.id].defaultInfo;
|
||||||
return i18n.pullSetupXlfFiles(apiHostname, apiName, apiToken, language, includeDefault).pipe(vfs.dest(`../vscode-translations-import/${language.id}/setup`));
|
i18n.pullSetupXlfFiles(apiHostname, apiName, apiToken, language, includeDefault).pipe(vfs.dest(`../vscode-localization/${language.id}/setup`));
|
||||||
}));
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
gulp.task('vscode-translations-import', function () {
|
gulp.task('vscode-translations-import', function () {
|
||||||
// {{SQL CARBON EDIT}} - Replace function body with our own
|
|
||||||
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
|
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
|
||||||
gulp.src(`../vscode-localization/${language.id}/build/*/*.xlf`)
|
gulp.src(`../vscode-localization/${language.id}/build/*/*.xlf`)
|
||||||
.pipe(i18n.prepareI18nFiles())
|
.pipe(i18n.prepareI18nFiles())
|
||||||
.pipe(vfs.dest(`./i18n/${language.folderName}`));
|
.pipe(vfs.dest(`./i18n/${language.folderName}`));
|
||||||
|
|
||||||
|
// {{SQL CARBON EDIT}}
|
||||||
|
// gulp.src(`../vscode-localization/${language.id}/setup/*/*.xlf`)
|
||||||
|
// .pipe(i18n.prepareIslFiles(language, innoSetupConfig[language.id]))
|
||||||
|
// .pipe(vfs.dest(`./build/win32/i18n`));
|
||||||
});
|
});
|
||||||
// {{SQL CARBON EDIT}} - End
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Sourcemaps
|
// Sourcemaps
|
||||||
|
|
||||||
gulp.task('upload-vscode-sourcemaps', () => {
|
gulp.task('upload-vscode-sourcemaps', ['minify-vscode'], () => {
|
||||||
const vs = gulp.src('out-vscode-min/**/*.map', { base: 'out-vscode-min' })
|
const vs = gulp.src('out-vscode-min/**/*.map', { base: 'out-vscode-min' })
|
||||||
.pipe(es.mapSync(f => {
|
.pipe(es.mapSync(f => {
|
||||||
f.path = `${f.base}/core/${f.relative}`;
|
f.path = `${f.base}/core/${f.relative}`;
|
||||||
return f;
|
return f;
|
||||||
}));
|
}));
|
||||||
|
|
||||||
const extensionsOut = gulp.src('extensions/**/out/**/*.map', { base: '.' });
|
const extensions = gulp.src('extensions/**/out/**/*.map', { base: '.' });
|
||||||
const extensionsDist = gulp.src('extensions/**/dist/**/*.map', { base: '.' });
|
|
||||||
|
|
||||||
return es.merge(vs, extensionsOut, extensionsDist)
|
return es.merge(vs, extensions)
|
||||||
.pipe(es.through(function (data) {
|
|
||||||
// debug
|
|
||||||
console.log('Uploading Sourcemap', data.relative);
|
|
||||||
this.emit('data', data);
|
|
||||||
}))
|
|
||||||
.pipe(azure.upload({
|
.pipe(azure.upload({
|
||||||
account: process.env.AZURE_STORAGE_ACCOUNT,
|
account: process.env.AZURE_STORAGE_ACCOUNT,
|
||||||
key: process.env.AZURE_STORAGE_ACCESS_KEY,
|
key: process.env.AZURE_STORAGE_ACCESS_KEY,
|
||||||
@@ -560,8 +613,57 @@ gulp.task('upload-vscode-sourcemaps', () => {
|
|||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const allConfigDetailsPath = path.join(os.tmpdir(), 'configuration.json');
|
||||||
|
gulp.task('upload-vscode-configuration', ['generate-vscode-configuration'], () => {
|
||||||
|
if (!shouldSetupSettingsSearch()) {
|
||||||
|
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||||
|
console.log(`Only runs on master and release branches, not ${branch}`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!fs.existsSync(allConfigDetailsPath)) {
|
||||||
|
throw new Error(`configuration file at ${allConfigDetailsPath} does not exist`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const settingsSearchBuildId = getSettingsSearchBuildId(packageJson);
|
||||||
|
if (!settingsSearchBuildId) {
|
||||||
|
throw new Error('Failed to compute build number');
|
||||||
|
}
|
||||||
|
|
||||||
|
return gulp.src(allConfigDetailsPath)
|
||||||
|
.pipe(azure.upload({
|
||||||
|
account: process.env.AZURE_STORAGE_ACCOUNT,
|
||||||
|
key: process.env.AZURE_STORAGE_ACCESS_KEY,
|
||||||
|
container: 'configuration',
|
||||||
|
prefix: `${settingsSearchBuildId}/${commit}/`
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
|
||||||
|
function shouldSetupSettingsSearch() {
|
||||||
|
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||||
|
return branch && (/\/master$/.test(branch) || branch.indexOf('/release/') >= 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSettingsSearchBuildId(packageJson) {
|
||||||
|
try {
|
||||||
|
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||||
|
const branchId = branch.indexOf('/release/') >= 0 ? 0 :
|
||||||
|
/\/master$/.test(branch) ? 1 :
|
||||||
|
2; // Some unexpected branch
|
||||||
|
|
||||||
|
const out = cp.execSync(`git rev-list HEAD --count`);
|
||||||
|
const count = parseInt(out.toString());
|
||||||
|
|
||||||
|
// <version number><commit count><branchId (avoid unlikely conflicts)>
|
||||||
|
// 1.25.1, 1,234,567 commits, master = 1250112345671
|
||||||
|
return util.versionStringToNumber(packageJson.version) * 1e8 + count * 10 + branchId;
|
||||||
|
} catch (e) {
|
||||||
|
throw new Error('Could not determine build number: ' + e.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// This task is only run for the MacOS build
|
// This task is only run for the MacOS build
|
||||||
const generateVSCodeConfigurationTask = task.define('generate-vscode-configuration', () => {
|
gulp.task('generate-vscode-configuration', () => {
|
||||||
return new Promise((resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
|
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
|
||||||
if (!buildDir) {
|
if (!buildDir) {
|
||||||
@@ -598,61 +700,6 @@ const generateVSCodeConfigurationTask = task.define('generate-vscode-configurati
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
const allConfigDetailsPath = path.join(os.tmpdir(), 'configuration.json');
|
|
||||||
gulp.task(task.define(
|
|
||||||
'upload-vscode-configuration',
|
|
||||||
task.series(
|
|
||||||
generateVSCodeConfigurationTask,
|
|
||||||
() => {
|
|
||||||
if (!shouldSetupSettingsSearch()) {
|
|
||||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
|
||||||
console.log(`Only runs on master and release branches, not ${branch}`);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!fs.existsSync(allConfigDetailsPath)) {
|
|
||||||
throw new Error(`configuration file at ${allConfigDetailsPath} does not exist`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const settingsSearchBuildId = getSettingsSearchBuildId(packageJson);
|
|
||||||
if (!settingsSearchBuildId) {
|
|
||||||
throw new Error('Failed to compute build number');
|
|
||||||
}
|
|
||||||
|
|
||||||
return gulp.src(allConfigDetailsPath)
|
|
||||||
.pipe(azure.upload({
|
|
||||||
account: process.env.AZURE_STORAGE_ACCOUNT,
|
|
||||||
key: process.env.AZURE_STORAGE_ACCESS_KEY,
|
|
||||||
container: 'configuration',
|
|
||||||
prefix: `${settingsSearchBuildId}/${commit}/`
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
)
|
|
||||||
));
|
|
||||||
|
|
||||||
function shouldSetupSettingsSearch() {
|
|
||||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
|
||||||
return branch && (/\/master$/.test(branch) || branch.indexOf('/release/') >= 0);
|
|
||||||
}
|
|
||||||
|
|
||||||
function getSettingsSearchBuildId(packageJson) {
|
|
||||||
try {
|
|
||||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
|
||||||
const branchId = branch.indexOf('/release/') >= 0 ? 0 :
|
|
||||||
/\/master$/.test(branch) ? 1 :
|
|
||||||
2; // Some unexpected branch
|
|
||||||
|
|
||||||
const out = cp.execSync(`git rev-list HEAD --count`);
|
|
||||||
const count = parseInt(out.toString());
|
|
||||||
|
|
||||||
// <version number><commit count><branchId (avoid unlikely conflicts)>
|
|
||||||
// 1.25.1, 1,234,567 commits, master = 1250112345671
|
|
||||||
return util.versionStringToNumber(packageJson.version) * 1e8 + count * 10 + branchId;
|
|
||||||
} catch (e) {
|
|
||||||
throw new Error('Could not determine build number: ' + e.toString());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
// Install service locally before building carbon
|
// Install service locally before building carbon
|
||||||
|
|
||||||
@@ -675,28 +722,6 @@ function installService() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
gulp.task('install-sqltoolsservice', () => {
|
gulp.task('install-sqltoolsservice', () => {
|
||||||
return installService();
|
return installService();
|
||||||
});
|
});
|
||||||
|
|
||||||
function installSsmsMin() {
|
|
||||||
const config = require('../extensions/admin-tool-ext-win/src/config.json');
|
|
||||||
return platformInfo.getCurrent().then(p => {
|
|
||||||
const runtime = p.runtimeId;
|
|
||||||
// fix path since it won't be correct
|
|
||||||
config.installDirectory = path.join(__dirname, '..', 'extensions', 'admin-tool-ext-win', config.installDirectory);
|
|
||||||
var installer = new serviceDownloader(config);
|
|
||||||
const serviceInstallFolder = installer.getInstallDirectory(runtime);
|
|
||||||
const serviceCleanupFolder = path.join(serviceInstallFolder, '..');
|
|
||||||
console.log('Cleaning up the install folder: ' + serviceCleanupFolder);
|
|
||||||
return del(serviceCleanupFolder + '/*').then(() => {
|
|
||||||
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
|
|
||||||
return installer.installService(runtime);
|
|
||||||
}, delError => {
|
|
||||||
console.log('failed to delete the install folder error: ' + delError);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
gulp.task('install-ssmsmin', () => {
|
|
||||||
return installSsmsMin();
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -12,39 +12,29 @@ const shell = require('gulp-shell');
|
|||||||
const es = require('event-stream');
|
const es = require('event-stream');
|
||||||
const vfs = require('vinyl-fs');
|
const vfs = require('vinyl-fs');
|
||||||
const util = require('./lib/util');
|
const util = require('./lib/util');
|
||||||
const task = require('./lib/task');
|
|
||||||
const packageJson = require('../package.json');
|
const packageJson = require('../package.json');
|
||||||
const product = require('../product.json');
|
const product = require('../product.json');
|
||||||
const rpmDependencies = require('../resources/linux/rpm/dependencies.json');
|
const rpmDependencies = require('../resources/linux/rpm/dependencies.json');
|
||||||
const path = require('path');
|
|
||||||
const root = path.dirname(__dirname);
|
|
||||||
const commit = util.getVersion(root);
|
|
||||||
|
|
||||||
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
|
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
|
||||||
|
|
||||||
function getDebPackageArch(arch) {
|
function getDebPackageArch(arch) {
|
||||||
return { x64: 'amd64', ia32: 'i386', arm: 'armhf', arm64: "arm64" }[arch];
|
return { x64: 'amd64', ia32: 'i386', arm: 'armhf' }[arch];
|
||||||
}
|
}
|
||||||
|
|
||||||
function prepareDebPackage(arch) {
|
function prepareDebPackage(arch) {
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
const binaryDir = '../azuredatastudio-linux-' + arch;
|
const binaryDir = '../azuredatastudio-linux-' + arch;
|
||||||
const debArch = getDebPackageArch(arch);
|
const debArch = getDebPackageArch(arch);
|
||||||
const destination = '.build/linux/deb/' + debArch + '/' + product.applicationName + '-' + debArch;
|
const destination = '.build/linux/deb/' + debArch + '/' + product.applicationName + '-' + debArch;
|
||||||
|
|
||||||
return function () {
|
return function () {
|
||||||
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
|
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
|
||||||
.pipe(rename('usr/share/applications/' + product.applicationName + '.desktop'));
|
|
||||||
|
|
||||||
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
|
|
||||||
.pipe(rename('usr/share/applications/' + product.applicationName + '-url-handler.desktop'));
|
|
||||||
|
|
||||||
const desktops = es.merge(desktop, desktopUrlHandler)
|
|
||||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||||
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
||||||
.pipe(replace('@@NAME@@', product.applicationName))
|
.pipe(replace('@@NAME@@', product.applicationName))
|
||||||
.pipe(replace('@@ICON@@', product.linuxIconName))
|
.pipe(replace('@@ICON@@', product.applicationName))
|
||||||
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
|
.pipe(rename('usr/share/applications/' + product.applicationName + '.desktop'));
|
||||||
|
|
||||||
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
|
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
|
||||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||||
@@ -53,13 +43,7 @@ function prepareDebPackage(arch) {
|
|||||||
.pipe(rename('usr/share/appdata/' + product.applicationName + '.appdata.xml'));
|
.pipe(rename('usr/share/appdata/' + product.applicationName + '.appdata.xml'));
|
||||||
|
|
||||||
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
||||||
.pipe(rename('usr/share/pixmaps/' + product.linuxIconName + '.png'));
|
.pipe(rename('usr/share/pixmaps/' + product.applicationName + '.png'));
|
||||||
|
|
||||||
// const bash_completion = gulp.src('resources/completions/bash/code')
|
|
||||||
// .pipe(rename('usr/share/bash-completion/completions/code'));
|
|
||||||
|
|
||||||
// const zsh_completion = gulp.src('resources/completions/zsh/_code')
|
|
||||||
// .pipe(rename('usr/share/zsh/vendor-completions/_code'));
|
|
||||||
|
|
||||||
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
|
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
|
||||||
.pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; }));
|
.pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; }));
|
||||||
@@ -95,7 +79,7 @@ function prepareDebPackage(arch) {
|
|||||||
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
|
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
|
||||||
.pipe(rename('DEBIAN/postinst'));
|
.pipe(rename('DEBIAN/postinst'));
|
||||||
|
|
||||||
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, icon, /* bash_completion, zsh_completion, */ code);
|
const all = es.merge(control, postinst, postrm, prerm, desktop, appdata, icon, code);
|
||||||
|
|
||||||
return all.pipe(vfs.dest(destination));
|
return all.pipe(vfs.dest(destination));
|
||||||
};
|
};
|
||||||
@@ -115,7 +99,7 @@ function getRpmBuildPath(rpmArch) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function getRpmPackageArch(arch) {
|
function getRpmPackageArch(arch) {
|
||||||
return { x64: 'x86_64', ia32: 'i386', arm: 'armhf', arm64: "arm64" }[arch];
|
return { x64: 'x86_64', ia32: 'i386', arm: 'armhf' }[arch];
|
||||||
}
|
}
|
||||||
|
|
||||||
function prepareRpmPackage(arch) {
|
function prepareRpmPackage(arch) {
|
||||||
@@ -125,17 +109,11 @@ function prepareRpmPackage(arch) {
|
|||||||
|
|
||||||
return function () {
|
return function () {
|
||||||
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
|
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
|
||||||
.pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '.desktop'));
|
|
||||||
|
|
||||||
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
|
|
||||||
.pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '-url-handler.desktop'));
|
|
||||||
|
|
||||||
const desktops = es.merge(desktop, desktopUrlHandler)
|
|
||||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||||
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
||||||
.pipe(replace('@@NAME@@', product.applicationName))
|
.pipe(replace('@@NAME@@', product.applicationName))
|
||||||
.pipe(replace('@@ICON@@', product.linuxIconName))
|
.pipe(replace('@@ICON@@', product.applicationName))
|
||||||
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
|
.pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '.desktop'));
|
||||||
|
|
||||||
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
|
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
|
||||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||||
@@ -144,13 +122,7 @@ function prepareRpmPackage(arch) {
|
|||||||
.pipe(rename('usr/share/appdata/' + product.applicationName + '.appdata.xml'));
|
.pipe(rename('usr/share/appdata/' + product.applicationName + '.appdata.xml'));
|
||||||
|
|
||||||
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
||||||
.pipe(rename('BUILD/usr/share/pixmaps/' + product.linuxIconName + '.png'));
|
.pipe(rename('BUILD/usr/share/pixmaps/' + product.applicationName + '.png'));
|
||||||
|
|
||||||
// const bash_completion = gulp.src('resources/completions/bash/code')
|
|
||||||
// .pipe(rename('BUILD/usr/share/bash-completion/completions/code'));
|
|
||||||
|
|
||||||
// const zsh_completion = gulp.src('resources/completions/zsh/_code')
|
|
||||||
// .pipe(rename('BUILD/usr/share/zsh/site-functions/_code'));
|
|
||||||
|
|
||||||
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
|
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
|
||||||
.pipe(rename(function (p) { p.dirname = 'BUILD/usr/share/' + product.applicationName + '/' + p.dirname; }));
|
.pipe(rename(function (p) { p.dirname = 'BUILD/usr/share/' + product.applicationName + '/' + p.dirname; }));
|
||||||
@@ -158,7 +130,6 @@ function prepareRpmPackage(arch) {
|
|||||||
const spec = gulp.src('resources/linux/rpm/code.spec.template', { base: '.' })
|
const spec = gulp.src('resources/linux/rpm/code.spec.template', { base: '.' })
|
||||||
.pipe(replace('@@NAME@@', product.applicationName))
|
.pipe(replace('@@NAME@@', product.applicationName))
|
||||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||||
.pipe(replace('@@ICON@@', product.linuxIconName))
|
|
||||||
.pipe(replace('@@VERSION@@', packageJson.version))
|
.pipe(replace('@@VERSION@@', packageJson.version))
|
||||||
.pipe(replace('@@RELEASE@@', linuxPackageRevision))
|
.pipe(replace('@@RELEASE@@', linuxPackageRevision))
|
||||||
.pipe(replace('@@ARCHITECTURE@@', rpmArch))
|
.pipe(replace('@@ARCHITECTURE@@', rpmArch))
|
||||||
@@ -173,7 +144,7 @@ function prepareRpmPackage(arch) {
|
|||||||
const specIcon = gulp.src('resources/linux/rpm/code.xpm', { base: '.' })
|
const specIcon = gulp.src('resources/linux/rpm/code.xpm', { base: '.' })
|
||||||
.pipe(rename('SOURCES/' + product.applicationName + '.xpm'));
|
.pipe(rename('SOURCES/' + product.applicationName + '.xpm'));
|
||||||
|
|
||||||
const all = es.merge(code, desktops, appdata, icon, /* bash_completion, zsh_completion, */ spec, specIcon);
|
const all = es.merge(code, desktop, appdata, icon, spec, specIcon);
|
||||||
|
|
||||||
return all.pipe(vfs.dest(getRpmBuildPath(rpmArch)));
|
return all.pipe(vfs.dest(getRpmBuildPath(rpmArch)));
|
||||||
};
|
};
|
||||||
@@ -191,45 +162,37 @@ function buildRpmPackage(arch) {
|
|||||||
'cp "' + rpmOut + '/$(ls ' + rpmOut + ')" ' + destination + '/'
|
'cp "' + rpmOut + '/$(ls ' + rpmOut + ')" ' + destination + '/'
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
function getSnapBuildPath(arch) {
|
function getSnapBuildPath(arch) {
|
||||||
return `.build/linux/snap/${arch}/${product.applicationName}-${arch}`;
|
return `.build/linux/snap/${arch}/${product.applicationName}-${arch}`;
|
||||||
}
|
}
|
||||||
|
|
||||||
function prepareSnapPackage(arch) {
|
function prepareSnapPackage(arch) {
|
||||||
// {{SQL CARBON EDIT}}
|
const binaryDir = '../VSCode-linux-' + arch;
|
||||||
const binaryDir = '../azuredatastudio-linux-' + arch;
|
|
||||||
const destination = getSnapBuildPath(arch);
|
const destination = getSnapBuildPath(arch);
|
||||||
|
|
||||||
return function () {
|
return function () {
|
||||||
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
|
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
|
||||||
.pipe(rename(`usr/share/applications/${product.applicationName}.desktop`));
|
|
||||||
|
|
||||||
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
|
|
||||||
.pipe(rename(`usr/share/applications/${product.applicationName}-url-handler.desktop`));
|
|
||||||
|
|
||||||
const desktops = es.merge(desktop, desktopUrlHandler)
|
|
||||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||||
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
||||||
.pipe(replace('@@NAME@@', product.applicationName))
|
.pipe(replace('@@NAME@@', product.applicationName))
|
||||||
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
|
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.applicationName}.png`))
|
||||||
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
|
.pipe(rename(`usr/share/applications/${product.applicationName}.desktop`));
|
||||||
|
|
||||||
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
||||||
.pipe(rename(`usr/share/pixmaps/${product.linuxIconName}.png`));
|
.pipe(rename(`usr/share/pixmaps/${product.applicationName}.png`));
|
||||||
|
|
||||||
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
|
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
|
||||||
.pipe(rename(function (p) { p.dirname = `usr/share/${product.applicationName}/${p.dirname}`; }));
|
.pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; }));
|
||||||
|
|
||||||
const snapcraft = gulp.src('resources/linux/snap/snapcraft.yaml', { base: '.' })
|
const snapcraft = gulp.src('resources/linux/snap/snapcraft.yaml', { base: '.' })
|
||||||
.pipe(replace('@@NAME@@', product.applicationName))
|
.pipe(replace('@@NAME@@', product.applicationName))
|
||||||
.pipe(replace('@@VERSION@@', commit.substr(0, 8)))
|
.pipe(replace('@@VERSION@@', packageJson.version))
|
||||||
.pipe(rename('snap/snapcraft.yaml'));
|
.pipe(rename('snap/snapcraft.yaml'));
|
||||||
|
|
||||||
const electronLaunch = gulp.src('resources/linux/snap/electron-launch', { base: '.' })
|
const electronLaunch = gulp.src('resources/linux/snap/electron-launch', { base: '.' })
|
||||||
.pipe(rename('electron-launch'));
|
.pipe(rename('electron-launch'));
|
||||||
|
|
||||||
const all = es.merge(desktops, icon, code, snapcraft, electronLaunch);
|
const all = es.merge(desktop, icon, code, snapcraft, electronLaunch);
|
||||||
|
|
||||||
return all.pipe(vfs.dest(destination));
|
return all.pipe(vfs.dest(destination));
|
||||||
};
|
};
|
||||||
@@ -237,39 +200,117 @@ function prepareSnapPackage(arch) {
|
|||||||
|
|
||||||
function buildSnapPackage(arch) {
|
function buildSnapPackage(arch) {
|
||||||
const snapBuildPath = getSnapBuildPath(arch);
|
const snapBuildPath = getSnapBuildPath(arch);
|
||||||
return shell.task(`cd ${snapBuildPath} && snapcraft build`);
|
const snapFilename = `${product.applicationName}-${packageJson.version}-${linuxPackageRevision}-${arch}.snap`;
|
||||||
|
return shell.task([
|
||||||
|
`chmod +x ${snapBuildPath}/electron-launch`,
|
||||||
|
`cd ${snapBuildPath} && snapcraft snap --output ../${snapFilename}`
|
||||||
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
const BUILD_TARGETS = [
|
function getFlatpakArch(arch) {
|
||||||
{ arch: 'ia32' },
|
return { x64: 'x86_64', ia32: 'i386', arm: 'arm' }[arch];
|
||||||
{ arch: 'x64' },
|
}
|
||||||
{ arch: 'arm' },
|
|
||||||
{ arch: 'arm64' },
|
|
||||||
];
|
|
||||||
|
|
||||||
BUILD_TARGETS.forEach((buildTarget) => {
|
function prepareFlatpak(arch) {
|
||||||
const arch = buildTarget.arch;
|
// {{SQL CARBON EDIT}}
|
||||||
|
const binaryDir = '../azuredatastudio-linux-' + arch;
|
||||||
|
const flatpakArch = getFlatpakArch(arch);
|
||||||
|
const destination = '.build/linux/flatpak/' + flatpakArch;
|
||||||
|
|
||||||
{
|
return function () {
|
||||||
const debArch = getDebPackageArch(arch);
|
// This is not imported in the global scope to avoid requiring ImageMagick
|
||||||
const prepareDebTask = task.define(`vscode-linux-${arch}-prepare-deb`, task.series(util.rimraf(`.build/linux/deb/${debArch}`), prepareDebPackage(arch)));
|
// (or GraphicsMagick) when not building building Flatpak bundles.
|
||||||
// gulp.task(prepareDebTask);
|
const imgResize = require('gulp-image-resize');
|
||||||
const buildDebTask = task.define(`vscode-linux-${arch}-build-deb`, task.series(prepareDebTask, buildDebPackage(arch)));
|
|
||||||
gulp.task(buildDebTask);
|
const all = [16, 24, 32, 48, 64, 128, 192, 256, 512].map(function (size) {
|
||||||
|
return gulp.src('resources/linux/code.png', { base: '.' })
|
||||||
|
.pipe(imgResize({ width: size, height: size, format: "png", noProfile: true }))
|
||||||
|
.pipe(rename('share/icons/hicolor/' + size + 'x' + size + '/apps/' + flatpakManifest.appId + '.png'));
|
||||||
|
});
|
||||||
|
|
||||||
|
all.push(gulp.src('resources/linux/code.desktop', { base: '.' })
|
||||||
|
.pipe(replace('Exec=/usr/share/@@NAME@@/@@NAME@@', 'Exec=' + product.applicationName))
|
||||||
|
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||||
|
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
||||||
|
.pipe(replace('@@NAME@@', product.applicationName))
|
||||||
|
.pipe(rename('share/applications/' + flatpakManifest.appId + '.desktop')));
|
||||||
|
|
||||||
|
all.push(gulp.src('resources/linux/code.appdata.xml', { base: '.' })
|
||||||
|
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||||
|
.pipe(replace('@@NAME@@', flatpakManifest.appId))
|
||||||
|
.pipe(replace('@@LICENSE@@', product.licenseName))
|
||||||
|
.pipe(rename('share/appdata/' + flatpakManifest.appId + '.appdata.xml')));
|
||||||
|
|
||||||
|
all.push(gulp.src(binaryDir + '/**/*', { base: binaryDir })
|
||||||
|
.pipe(rename(function (p) {
|
||||||
|
p.dirname = 'share/' + product.applicationName + '/' + p.dirname;
|
||||||
|
})));
|
||||||
|
|
||||||
|
return es.merge(all).pipe(vfs.dest(destination));
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildFlatpak(arch) {
|
||||||
|
const flatpakArch = getFlatpakArch(arch);
|
||||||
|
const manifest = {};
|
||||||
|
for (var k in flatpakManifest) {
|
||||||
|
manifest[k] = flatpakManifest[k];
|
||||||
}
|
}
|
||||||
|
manifest.files = [
|
||||||
{
|
['.build/linux/flatpak/' + flatpakArch, '/'],
|
||||||
const rpmArch = getRpmPackageArch(arch);
|
];
|
||||||
const prepareRpmTask = task.define(`vscode-linux-${arch}-prepare-rpm`, task.series(util.rimraf(`.build/linux/rpm/${rpmArch}`), prepareRpmPackage(arch)));
|
const buildOptions = {
|
||||||
// gulp.task(prepareRpmTask);
|
arch: flatpakArch,
|
||||||
const buildRpmTask = task.define(`vscode-linux-${arch}-build-rpm`, task.series(prepareRpmTask, buildRpmPackage(arch)));
|
subject: product.nameLong + ' ' + packageJson.version + '.' + linuxPackageRevision,
|
||||||
gulp.task(buildRpmTask);
|
};
|
||||||
|
// If requested, use the configured path for the OSTree repository.
|
||||||
|
if (process.env.FLATPAK_REPO) {
|
||||||
|
buildOptions.repoDir = process.env.FLATPAK_REPO;
|
||||||
|
} else {
|
||||||
|
buildOptions.bundlePath = manifest.appId + '-' + flatpakArch + '.flatpak';
|
||||||
}
|
}
|
||||||
|
// Setup PGP signing if requested.
|
||||||
{
|
if (process.env.GPG_KEY_ID !== undefined) {
|
||||||
const prepareSnapTask = task.define(`vscode-linux-${arch}-prepare-snap`, task.series(util.rimraf(`.build/linux/snap/${arch}`), prepareSnapPackage(arch)));
|
buildOptions.gpgSign = process.env.GPG_KEY_ID;
|
||||||
gulp.task(prepareSnapTask);
|
if (process.env.GPG_HOMEDIR) {
|
||||||
const buildSnapTask = task.define(`vscode-linux-${arch}-build-snap`, task.series(prepareSnapTask, buildSnapPackage(arch)));
|
buildOptions.gpgHomedir = process.env.GPG_HOME_DIR;
|
||||||
gulp.task(buildSnapTask);
|
}
|
||||||
}
|
}
|
||||||
});
|
return function (cb) {
|
||||||
|
require('flatpak-bundler').bundle(manifest, buildOptions, cb);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
gulp.task('clean-vscode-linux-ia32-deb', util.rimraf('.build/linux/deb/i386'));
|
||||||
|
gulp.task('clean-vscode-linux-x64-deb', util.rimraf('.build/linux/deb/amd64'));
|
||||||
|
gulp.task('clean-vscode-linux-arm-deb', util.rimraf('.build/linux/deb/armhf'));
|
||||||
|
gulp.task('clean-vscode-linux-ia32-rpm', util.rimraf('.build/linux/rpm/i386'));
|
||||||
|
gulp.task('clean-vscode-linux-x64-rpm', util.rimraf('.build/linux/rpm/x86_64'));
|
||||||
|
gulp.task('clean-vscode-linux-arm-rpm', util.rimraf('.build/linux/rpm/armhf'));
|
||||||
|
gulp.task('clean-vscode-linux-ia32-snap', util.rimraf('.build/linux/snap/x64'));
|
||||||
|
gulp.task('clean-vscode-linux-x64-snap', util.rimraf('.build/linux/snap/x64'));
|
||||||
|
gulp.task('clean-vscode-linux-arm-snap', util.rimraf('.build/linux/snap/x64'));
|
||||||
|
gulp.task('clean-vscode-linux-ia32-flatpak', util.rimraf('.build/linux/flatpak/i386'));
|
||||||
|
gulp.task('clean-vscode-linux-x64-flatpak', util.rimraf('.build/linux/flatpak/x86_64'));
|
||||||
|
gulp.task('clean-vscode-linux-arm-flatpak', util.rimraf('.build/linux/flatpak/arm'));
|
||||||
|
|
||||||
|
gulp.task('vscode-linux-ia32-prepare-deb', ['clean-vscode-linux-ia32-deb'], prepareDebPackage('ia32'));
|
||||||
|
gulp.task('vscode-linux-x64-prepare-deb', ['clean-vscode-linux-x64-deb'], prepareDebPackage('x64'));
|
||||||
|
gulp.task('vscode-linux-arm-prepare-deb', ['clean-vscode-linux-arm-deb'], prepareDebPackage('arm'));
|
||||||
|
gulp.task('vscode-linux-ia32-build-deb', ['vscode-linux-ia32-prepare-deb'], buildDebPackage('ia32'));
|
||||||
|
gulp.task('vscode-linux-x64-build-deb', ['vscode-linux-x64-prepare-deb'], buildDebPackage('x64'));
|
||||||
|
gulp.task('vscode-linux-arm-build-deb', ['vscode-linux-arm-prepare-deb'], buildDebPackage('arm'));
|
||||||
|
|
||||||
|
gulp.task('vscode-linux-ia32-prepare-rpm', ['clean-vscode-linux-ia32-rpm'], prepareRpmPackage('ia32'));
|
||||||
|
gulp.task('vscode-linux-x64-prepare-rpm', ['clean-vscode-linux-x64-rpm'], prepareRpmPackage('x64'));
|
||||||
|
gulp.task('vscode-linux-arm-prepare-rpm', ['clean-vscode-linux-arm-rpm'], prepareRpmPackage('arm'));
|
||||||
|
gulp.task('vscode-linux-ia32-build-rpm', ['vscode-linux-ia32-prepare-rpm'], buildRpmPackage('ia32'));
|
||||||
|
gulp.task('vscode-linux-x64-build-rpm', ['vscode-linux-x64-prepare-rpm'], buildRpmPackage('x64'));
|
||||||
|
gulp.task('vscode-linux-arm-build-rpm', ['vscode-linux-arm-prepare-rpm'], buildRpmPackage('arm'));
|
||||||
|
|
||||||
|
gulp.task('vscode-linux-ia32-prepare-snap', ['clean-vscode-linux-ia32-snap'], prepareSnapPackage('ia32'));
|
||||||
|
gulp.task('vscode-linux-x64-prepare-snap', ['clean-vscode-linux-x64-snap'], prepareSnapPackage('x64'));
|
||||||
|
gulp.task('vscode-linux-arm-prepare-snap', ['clean-vscode-linux-arm-snap'], prepareSnapPackage('arm'));
|
||||||
|
gulp.task('vscode-linux-ia32-build-snap', ['vscode-linux-ia32-prepare-snap'], buildSnapPackage('ia32'));
|
||||||
|
gulp.task('vscode-linux-x64-build-snap', ['vscode-linux-x64-prepare-snap'], buildSnapPackage('x64'));
|
||||||
|
gulp.task('vscode-linux-arm-build-snap', ['vscode-linux-arm-prepare-snap'], buildSnapPackage('arm'));
|
||||||
|
|||||||
@@ -12,11 +12,9 @@ const assert = require('assert');
|
|||||||
const cp = require('child_process');
|
const cp = require('child_process');
|
||||||
const _7z = require('7zip')['7z'];
|
const _7z = require('7zip')['7z'];
|
||||||
const util = require('./lib/util');
|
const util = require('./lib/util');
|
||||||
const task = require('./lib/task');
|
|
||||||
const pkg = require('../package.json');
|
const pkg = require('../package.json');
|
||||||
const product = require('../product.json');
|
const product = require('../product.json');
|
||||||
const vfs = require('vinyl-fs');
|
const vfs = require('vinyl-fs');
|
||||||
const rcedit = require('rcedit');
|
|
||||||
const mkdirp = require('mkdirp');
|
const mkdirp = require('mkdirp');
|
||||||
|
|
||||||
const repoPath = path.dirname(__dirname);
|
const repoPath = path.dirname(__dirname);
|
||||||
@@ -27,21 +25,18 @@ const zipPath = arch => path.join(zipDir(arch), `VSCode-win32-${arch}.zip`);
|
|||||||
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
|
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
|
||||||
const issPath = path.join(__dirname, 'win32', 'code.iss');
|
const issPath = path.join(__dirname, 'win32', 'code.iss');
|
||||||
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup-compiler'))), 'bin', 'ISCC.exe');
|
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup-compiler'))), 'bin', 'ISCC.exe');
|
||||||
// const signPS1 = path.join(repoPath, 'build', 'azure-pipelines', 'win32', 'sign.ps1');
|
const signPS1 = path.join(repoPath, 'build', 'tfs', 'win32', 'sign.ps1');
|
||||||
|
|
||||||
function packageInnoSetup(iss, options, cb) {
|
function packageInnoSetup(iss, options, cb) {
|
||||||
options = options || {};
|
options = options || {};
|
||||||
|
|
||||||
const definitions = options.definitions || {};
|
const definitions = options.definitions || {};
|
||||||
|
const debug = process.argv.some(arg => arg === '--debug-inno');
|
||||||
|
|
||||||
if (process.argv.some(arg => arg === '--debug-inno')) {
|
if (debug) {
|
||||||
definitions['Debug'] = 'true';
|
definitions['Debug'] = 'true';
|
||||||
}
|
}
|
||||||
|
|
||||||
if (process.argv.some(arg => arg === '--sign')) {
|
|
||||||
definitions['Sign'] = 'true';
|
|
||||||
}
|
|
||||||
|
|
||||||
const keys = Object.keys(definitions);
|
const keys = Object.keys(definitions);
|
||||||
|
|
||||||
keys.forEach(key => assert(typeof definitions[key] === 'string', `Missing value for '${key}' in Inno Setup package step`));
|
keys.forEach(key => assert(typeof definitions[key] === 'string', `Missing value for '${key}' in Inno Setup package step`));
|
||||||
@@ -108,8 +103,8 @@ function buildWin32Setup(arch, target) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function defineWin32SetupTasks(arch, target) {
|
function defineWin32SetupTasks(arch, target) {
|
||||||
const cleanTask = util.rimraf(setupDir(arch, target));
|
gulp.task(`clean-vscode-win32-${arch}-${target}-setup`, util.rimraf(setupDir(arch, target)));
|
||||||
gulp.task(task.define(`vscode-win32-${arch}-${target}-setup`, task.series(cleanTask, buildWin32Setup(arch, target))));
|
gulp.task(`vscode-win32-${arch}-${target}-setup`, [`clean-vscode-win32-${arch}-${target}-setup`], buildWin32Setup(arch, target));
|
||||||
}
|
}
|
||||||
|
|
||||||
defineWin32SetupTasks('ia32', 'system');
|
defineWin32SetupTasks('ia32', 'system');
|
||||||
@@ -127,8 +122,11 @@ function archiveWin32Setup(arch) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
gulp.task(task.define('vscode-win32-ia32-archive', task.series(util.rimraf(zipDir('ia32')), archiveWin32Setup('ia32'))));
|
gulp.task('clean-vscode-win32-ia32-archive', util.rimraf(zipDir('ia32')));
|
||||||
gulp.task(task.define('vscode-win32-x64-archive', task.series(util.rimraf(zipDir('x64')), archiveWin32Setup('x64'))));
|
gulp.task('vscode-win32-ia32-archive', ['clean-vscode-win32-ia32-archive'], archiveWin32Setup('ia32'));
|
||||||
|
|
||||||
|
gulp.task('clean-vscode-win32-x64-archive', util.rimraf(zipDir('x64')));
|
||||||
|
gulp.task('vscode-win32-x64-archive', ['clean-vscode-win32-x64-archive'], archiveWin32Setup('x64'));
|
||||||
|
|
||||||
function copyInnoUpdater(arch) {
|
function copyInnoUpdater(arch) {
|
||||||
return () => {
|
return () => {
|
||||||
@@ -137,12 +135,5 @@ function copyInnoUpdater(arch) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function patchInnoUpdater(arch) {
|
gulp.task('vscode-win32-ia32-copy-inno-updater', copyInnoUpdater('ia32'));
|
||||||
return cb => {
|
gulp.task('vscode-win32-x64-copy-inno-updater', copyInnoUpdater('x64'));
|
||||||
const icon = path.join(repoPath, 'resources', 'win32', 'code.ico');
|
|
||||||
rcedit(path.join(buildPath(arch), 'tools', 'inno_updater.exe'), { icon }, cb);
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
gulp.task(task.define('vscode-win32-ia32-inno-updater', task.series(copyInnoUpdater('ia32'), patchInnoUpdater('ia32'))));
|
|
||||||
gulp.task(task.define('vscode-win32-x64-inno-updater', task.series(copyInnoUpdater('x64'), patchInnoUpdater('x64'))));
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
{
|
|
||||||
"compilerOptions": {
|
|
||||||
"module": "commonjs",
|
|
||||||
"target": "es2017",
|
|
||||||
"jsx": "preserve",
|
|
||||||
"checkJs": true
|
|
||||||
},
|
|
||||||
"include": [
|
|
||||||
"**/*.js"
|
|
||||||
],
|
|
||||||
"exclude": [
|
|
||||||
"node_modules",
|
|
||||||
"**/node_modules/*"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
@@ -4,33 +4,33 @@
|
|||||||
*--------------------------------------------------------------------------------------------*/
|
*--------------------------------------------------------------------------------------------*/
|
||||||
'use strict';
|
'use strict';
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const path = require("path");
|
var path = require("path");
|
||||||
const es = require("event-stream");
|
var es = require("event-stream");
|
||||||
const pickle = require('chromium-pickle-js');
|
var pickle = require("chromium-pickle-js");
|
||||||
const Filesystem = require('asar/lib/filesystem');
|
var Filesystem = require("asar/lib/filesystem");
|
||||||
const VinylFile = require("vinyl");
|
var VinylFile = require("vinyl");
|
||||||
const minimatch = require("minimatch");
|
var minimatch = require("minimatch");
|
||||||
function createAsar(folderPath, unpackGlobs, destFilename) {
|
function createAsar(folderPath, unpackGlobs, destFilename) {
|
||||||
const shouldUnpackFile = (file) => {
|
var shouldUnpackFile = function (file) {
|
||||||
for (let i = 0; i < unpackGlobs.length; i++) {
|
for (var i = 0; i < unpackGlobs.length; i++) {
|
||||||
if (minimatch(file.relative, unpackGlobs[i])) {
|
if (minimatch(file.relative, unpackGlobs[i])) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
const filesystem = new Filesystem(folderPath);
|
var filesystem = new Filesystem(folderPath);
|
||||||
const out = [];
|
var out = [];
|
||||||
// Keep track of pending inserts
|
// Keep track of pending inserts
|
||||||
let pendingInserts = 0;
|
var pendingInserts = 0;
|
||||||
let onFileInserted = () => { pendingInserts--; };
|
var onFileInserted = function () { pendingInserts--; };
|
||||||
// Do not insert twice the same directory
|
// Do not insert twice the same directory
|
||||||
const seenDir = {};
|
var seenDir = {};
|
||||||
const insertDirectoryRecursive = (dir) => {
|
var insertDirectoryRecursive = function (dir) {
|
||||||
if (seenDir[dir]) {
|
if (seenDir[dir]) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let lastSlash = dir.lastIndexOf('/');
|
var lastSlash = dir.lastIndexOf('/');
|
||||||
if (lastSlash === -1) {
|
if (lastSlash === -1) {
|
||||||
lastSlash = dir.lastIndexOf('\\');
|
lastSlash = dir.lastIndexOf('\\');
|
||||||
}
|
}
|
||||||
@@ -40,8 +40,8 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
|
|||||||
seenDir[dir] = true;
|
seenDir[dir] = true;
|
||||||
filesystem.insertDirectory(dir);
|
filesystem.insertDirectory(dir);
|
||||||
};
|
};
|
||||||
const insertDirectoryForFile = (file) => {
|
var insertDirectoryForFile = function (file) {
|
||||||
let lastSlash = file.lastIndexOf('/');
|
var lastSlash = file.lastIndexOf('/');
|
||||||
if (lastSlash === -1) {
|
if (lastSlash === -1) {
|
||||||
lastSlash = file.lastIndexOf('\\');
|
lastSlash = file.lastIndexOf('\\');
|
||||||
}
|
}
|
||||||
@@ -49,7 +49,7 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
|
|||||||
insertDirectoryRecursive(file.substring(0, lastSlash));
|
insertDirectoryRecursive(file.substring(0, lastSlash));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
const insertFile = (relativePath, stat, shouldUnpack) => {
|
var insertFile = function (relativePath, stat, shouldUnpack) {
|
||||||
insertDirectoryForFile(relativePath);
|
insertDirectoryForFile(relativePath);
|
||||||
pendingInserts++;
|
pendingInserts++;
|
||||||
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
|
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
|
||||||
@@ -59,13 +59,13 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (!file.stat.isFile()) {
|
if (!file.stat.isFile()) {
|
||||||
throw new Error(`unknown item in stream!`);
|
throw new Error("unknown item in stream!");
|
||||||
}
|
}
|
||||||
const shouldUnpack = shouldUnpackFile(file);
|
var shouldUnpack = shouldUnpackFile(file);
|
||||||
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
|
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
|
||||||
if (shouldUnpack) {
|
if (shouldUnpack) {
|
||||||
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
|
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
|
||||||
const relative = path.relative(folderPath, file.path);
|
var relative = path.relative(folderPath, file.path);
|
||||||
this.queue(new VinylFile({
|
this.queue(new VinylFile({
|
||||||
cwd: folderPath,
|
cwd: folderPath,
|
||||||
base: folderPath,
|
base: folderPath,
|
||||||
@@ -79,33 +79,34 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
|
|||||||
out.push(file.contents);
|
out.push(file.contents);
|
||||||
}
|
}
|
||||||
}, function () {
|
}, function () {
|
||||||
let finish = () => {
|
var _this = this;
|
||||||
|
var finish = function () {
|
||||||
{
|
{
|
||||||
const headerPickle = pickle.createEmpty();
|
var headerPickle = pickle.createEmpty();
|
||||||
headerPickle.writeString(JSON.stringify(filesystem.header));
|
headerPickle.writeString(JSON.stringify(filesystem.header));
|
||||||
const headerBuf = headerPickle.toBuffer();
|
var headerBuf = headerPickle.toBuffer();
|
||||||
const sizePickle = pickle.createEmpty();
|
var sizePickle = pickle.createEmpty();
|
||||||
sizePickle.writeUInt32(headerBuf.length);
|
sizePickle.writeUInt32(headerBuf.length);
|
||||||
const sizeBuf = sizePickle.toBuffer();
|
var sizeBuf = sizePickle.toBuffer();
|
||||||
out.unshift(headerBuf);
|
out.unshift(headerBuf);
|
||||||
out.unshift(sizeBuf);
|
out.unshift(sizeBuf);
|
||||||
}
|
}
|
||||||
const contents = Buffer.concat(out);
|
var contents = Buffer.concat(out);
|
||||||
out.length = 0;
|
out.length = 0;
|
||||||
this.queue(new VinylFile({
|
_this.queue(new VinylFile({
|
||||||
cwd: folderPath,
|
cwd: folderPath,
|
||||||
base: folderPath,
|
base: folderPath,
|
||||||
path: destFilename,
|
path: destFilename,
|
||||||
contents: contents
|
contents: contents
|
||||||
}));
|
}));
|
||||||
this.queue(null);
|
_this.queue(null);
|
||||||
};
|
};
|
||||||
// Call finish() only when all file inserts have finished...
|
// Call finish() only when all file inserts have finished...
|
||||||
if (pendingInserts === 0) {
|
if (pendingInserts === 0) {
|
||||||
finish();
|
finish();
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
onFileInserted = () => {
|
onFileInserted = function () {
|
||||||
pendingInserts--;
|
pendingInserts--;
|
||||||
if (pendingInserts === 0) {
|
if (pendingInserts === 0) {
|
||||||
finish();
|
finish();
|
||||||
|
|||||||
@@ -7,8 +7,8 @@
|
|||||||
|
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import * as es from 'event-stream';
|
import * as es from 'event-stream';
|
||||||
const pickle = require('chromium-pickle-js');
|
import * as pickle from 'chromium-pickle-js';
|
||||||
const Filesystem = require('asar/lib/filesystem');
|
import * as Filesystem from 'asar/lib/filesystem';
|
||||||
import * as VinylFile from 'vinyl';
|
import * as VinylFile from 'vinyl';
|
||||||
import * as minimatch from 'minimatch';
|
import * as minimatch from 'minimatch';
|
||||||
|
|
||||||
|
|||||||
@@ -14,8 +14,7 @@ const es = require('event-stream');
|
|||||||
const rename = require('gulp-rename');
|
const rename = require('gulp-rename');
|
||||||
const vfs = require('vinyl-fs');
|
const vfs = require('vinyl-fs');
|
||||||
const ext = require('./extensions');
|
const ext = require('./extensions');
|
||||||
const fancyLog = require('fancy-log');
|
const util = require('gulp-util');
|
||||||
const ansiColors = require('ansi-colors');
|
|
||||||
|
|
||||||
const root = path.dirname(path.dirname(__dirname));
|
const root = path.dirname(path.dirname(__dirname));
|
||||||
const builtInExtensions = require('../builtInExtensions.json');
|
const builtInExtensions = require('../builtInExtensions.json');
|
||||||
@@ -44,22 +43,22 @@ function isUpToDate(extension) {
|
|||||||
|
|
||||||
function syncMarketplaceExtension(extension) {
|
function syncMarketplaceExtension(extension) {
|
||||||
if (isUpToDate(extension)) {
|
if (isUpToDate(extension)) {
|
||||||
fancyLog(ansiColors.blue('[marketplace]'), `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));
|
util.log(util.colors.blue('[marketplace]'), `${extension.name}@${extension.version}`, util.colors.green('✔︎'));
|
||||||
return es.readArray([]);
|
return es.readArray([]);
|
||||||
}
|
}
|
||||||
|
|
||||||
rimraf.sync(getExtensionPath(extension));
|
rimraf.sync(getExtensionPath(extension));
|
||||||
|
|
||||||
return ext.fromMarketplace(extension.name, extension.version, extension.metadata)
|
return ext.fromMarketplace(extension.name, extension.version)
|
||||||
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
|
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
|
||||||
.pipe(vfs.dest('.build/builtInExtensions'))
|
.pipe(vfs.dest('.build/builtInExtensions'))
|
||||||
.on('end', () => fancyLog(ansiColors.blue('[marketplace]'), extension.name, ansiColors.green('✔︎')));
|
.on('end', () => util.log(util.colors.blue('[marketplace]'), extension.name, util.colors.green('✔︎')));
|
||||||
}
|
}
|
||||||
|
|
||||||
function syncExtension(extension, controlState) {
|
function syncExtension(extension, controlState) {
|
||||||
switch (controlState) {
|
switch (controlState) {
|
||||||
case 'disabled':
|
case 'disabled':
|
||||||
fancyLog(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
|
util.log(util.colors.blue('[disabled]'), util.colors.gray(extension.name));
|
||||||
return es.readArray([]);
|
return es.readArray([]);
|
||||||
|
|
||||||
case 'marketplace':
|
case 'marketplace':
|
||||||
@@ -67,15 +66,15 @@ function syncExtension(extension, controlState) {
|
|||||||
|
|
||||||
default:
|
default:
|
||||||
if (!fs.existsSync(controlState)) {
|
if (!fs.existsSync(controlState)) {
|
||||||
fancyLog(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
util.log(util.colors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
||||||
return es.readArray([]);
|
return es.readArray([]);
|
||||||
|
|
||||||
} else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
|
} else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
|
||||||
fancyLog(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
util.log(util.colors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
||||||
return es.readArray([]);
|
return es.readArray([]);
|
||||||
}
|
}
|
||||||
|
|
||||||
fancyLog(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎'));
|
util.log(util.colors.blue('[local]'), `${extension.name}: ${util.colors.cyan(controlState)}`, util.colors.green('✔︎'));
|
||||||
return es.readArray([]);
|
return es.readArray([]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -94,8 +93,8 @@ function writeControlFile(control) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function main() {
|
function main() {
|
||||||
fancyLog('Syncronizing built-in extensions...');
|
util.log('Syncronizing built-in extensions...');
|
||||||
fancyLog(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
|
util.log(`You can manage built-in extensions with the ${util.colors.cyan('--builtin')} flag`);
|
||||||
|
|
||||||
const control = readControlFile();
|
const control = readControlFile();
|
||||||
const streams = [];
|
const streams = [];
|
||||||
|
|||||||
@@ -4,19 +4,19 @@
|
|||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
*--------------------------------------------------------------------------------------------*/
|
*--------------------------------------------------------------------------------------------*/
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const fs = require("fs");
|
var fs = require("fs");
|
||||||
const path = require("path");
|
var path = require("path");
|
||||||
const vm = require("vm");
|
var vm = require("vm");
|
||||||
/**
|
/**
|
||||||
* Bundle `entryPoints` given config `config`.
|
* Bundle `entryPoints` given config `config`.
|
||||||
*/
|
*/
|
||||||
function bundle(entryPoints, config, callback) {
|
function bundle(entryPoints, config, callback) {
|
||||||
const entryPointsMap = {};
|
var entryPointsMap = {};
|
||||||
entryPoints.forEach((module) => {
|
entryPoints.forEach(function (module) {
|
||||||
entryPointsMap[module.name] = module;
|
entryPointsMap[module.name] = module;
|
||||||
});
|
});
|
||||||
const allMentionedModulesMap = {};
|
var allMentionedModulesMap = {};
|
||||||
entryPoints.forEach((module) => {
|
entryPoints.forEach(function (module) {
|
||||||
allMentionedModulesMap[module.name] = true;
|
allMentionedModulesMap[module.name] = true;
|
||||||
(module.include || []).forEach(function (includedModule) {
|
(module.include || []).forEach(function (includedModule) {
|
||||||
allMentionedModulesMap[includedModule] = true;
|
allMentionedModulesMap[includedModule] = true;
|
||||||
@@ -25,30 +25,26 @@ function bundle(entryPoints, config, callback) {
|
|||||||
allMentionedModulesMap[excludedModule] = true;
|
allMentionedModulesMap[excludedModule] = true;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
const code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
|
var code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
|
||||||
const r = vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
|
var r = vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
|
||||||
const loaderModule = { exports: {} };
|
var loaderModule = { exports: {} };
|
||||||
r.call({}, require, loaderModule, loaderModule.exports);
|
r.call({}, require, loaderModule, loaderModule.exports);
|
||||||
const loader = loaderModule.exports;
|
var loader = loaderModule.exports;
|
||||||
config.isBuild = true;
|
config.isBuild = true;
|
||||||
config.paths = config.paths || {};
|
config.paths = config.paths || {};
|
||||||
if (!config.paths['vs/nls']) {
|
config.paths['vs/nls'] = 'out-build/vs/nls.build';
|
||||||
config.paths['vs/nls'] = 'out-build/vs/nls.build';
|
config.paths['vs/css'] = 'out-build/vs/css.build';
|
||||||
}
|
|
||||||
if (!config.paths['vs/css']) {
|
|
||||||
config.paths['vs/css'] = 'out-build/vs/css.build';
|
|
||||||
}
|
|
||||||
loader.config(config);
|
loader.config(config);
|
||||||
loader(['require'], (localRequire) => {
|
loader(['require'], function (localRequire) {
|
||||||
const resolvePath = (path) => {
|
var resolvePath = function (path) {
|
||||||
const r = localRequire.toUrl(path);
|
var r = localRequire.toUrl(path);
|
||||||
if (!/\.js/.test(r)) {
|
if (!/\.js/.test(r)) {
|
||||||
return r + '.js';
|
return r + '.js';
|
||||||
}
|
}
|
||||||
return r;
|
return r;
|
||||||
};
|
};
|
||||||
for (const moduleId in entryPointsMap) {
|
for (var moduleId in entryPointsMap) {
|
||||||
const entryPoint = entryPointsMap[moduleId];
|
var entryPoint = entryPointsMap[moduleId];
|
||||||
if (entryPoint.append) {
|
if (entryPoint.append) {
|
||||||
entryPoint.append = entryPoint.append.map(resolvePath);
|
entryPoint.append = entryPoint.append.map(resolvePath);
|
||||||
}
|
}
|
||||||
@@ -57,59 +53,59 @@ function bundle(entryPoints, config, callback) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
loader(Object.keys(allMentionedModulesMap), () => {
|
loader(Object.keys(allMentionedModulesMap), function () {
|
||||||
const modules = loader.getBuildInfo();
|
var modules = loader.getBuildInfo();
|
||||||
const partialResult = emitEntryPoints(modules, entryPointsMap);
|
var partialResult = emitEntryPoints(modules, entryPointsMap);
|
||||||
const cssInlinedResources = loader('vs/css').getInlinedResources();
|
var cssInlinedResources = loader('vs/css').getInlinedResources();
|
||||||
callback(null, {
|
callback(null, {
|
||||||
files: partialResult.files,
|
files: partialResult.files,
|
||||||
cssInlinedResources: cssInlinedResources,
|
cssInlinedResources: cssInlinedResources,
|
||||||
bundleData: partialResult.bundleData
|
bundleData: partialResult.bundleData
|
||||||
});
|
});
|
||||||
}, (err) => callback(err, null));
|
}, function (err) { return callback(err, null); });
|
||||||
}
|
}
|
||||||
exports.bundle = bundle;
|
exports.bundle = bundle;
|
||||||
function emitEntryPoints(modules, entryPoints) {
|
function emitEntryPoints(modules, entryPoints) {
|
||||||
const modulesMap = {};
|
var modulesMap = {};
|
||||||
modules.forEach((m) => {
|
modules.forEach(function (m) {
|
||||||
modulesMap[m.id] = m;
|
modulesMap[m.id] = m;
|
||||||
});
|
});
|
||||||
const modulesGraph = {};
|
var modulesGraph = {};
|
||||||
modules.forEach((m) => {
|
modules.forEach(function (m) {
|
||||||
modulesGraph[m.id] = m.dependencies;
|
modulesGraph[m.id] = m.dependencies;
|
||||||
});
|
});
|
||||||
const sortedModules = topologicalSort(modulesGraph);
|
var sortedModules = topologicalSort(modulesGraph);
|
||||||
let result = [];
|
var result = [];
|
||||||
const usedPlugins = {};
|
var usedPlugins = {};
|
||||||
const bundleData = {
|
var bundleData = {
|
||||||
graph: modulesGraph,
|
graph: modulesGraph,
|
||||||
bundles: {}
|
bundles: {}
|
||||||
};
|
};
|
||||||
Object.keys(entryPoints).forEach((moduleToBundle) => {
|
Object.keys(entryPoints).forEach(function (moduleToBundle) {
|
||||||
const info = entryPoints[moduleToBundle];
|
var info = entryPoints[moduleToBundle];
|
||||||
const rootNodes = [moduleToBundle].concat(info.include || []);
|
var rootNodes = [moduleToBundle].concat(info.include || []);
|
||||||
const allDependencies = visit(rootNodes, modulesGraph);
|
var allDependencies = visit(rootNodes, modulesGraph);
|
||||||
const excludes = ['require', 'exports', 'module'].concat(info.exclude || []);
|
var excludes = ['require', 'exports', 'module'].concat(info.exclude || []);
|
||||||
excludes.forEach((excludeRoot) => {
|
excludes.forEach(function (excludeRoot) {
|
||||||
const allExcludes = visit([excludeRoot], modulesGraph);
|
var allExcludes = visit([excludeRoot], modulesGraph);
|
||||||
Object.keys(allExcludes).forEach((exclude) => {
|
Object.keys(allExcludes).forEach(function (exclude) {
|
||||||
delete allDependencies[exclude];
|
delete allDependencies[exclude];
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
const includedModules = sortedModules.filter((module) => {
|
var includedModules = sortedModules.filter(function (module) {
|
||||||
return allDependencies[module];
|
return allDependencies[module];
|
||||||
});
|
});
|
||||||
bundleData.bundles[moduleToBundle] = includedModules;
|
bundleData.bundles[moduleToBundle] = includedModules;
|
||||||
const res = emitEntryPoint(modulesMap, modulesGraph, moduleToBundle, includedModules, info.prepend || [], info.append || [], info.dest);
|
var res = emitEntryPoint(modulesMap, modulesGraph, moduleToBundle, includedModules, info.prepend, info.append, info.dest);
|
||||||
result = result.concat(res.files);
|
result = result.concat(res.files);
|
||||||
for (const pluginName in res.usedPlugins) {
|
for (var pluginName in res.usedPlugins) {
|
||||||
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
|
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
Object.keys(usedPlugins).forEach((pluginName) => {
|
Object.keys(usedPlugins).forEach(function (pluginName) {
|
||||||
const plugin = usedPlugins[pluginName];
|
var plugin = usedPlugins[pluginName];
|
||||||
if (typeof plugin.finishBuild === 'function') {
|
if (typeof plugin.finishBuild === 'function') {
|
||||||
const write = (filename, contents) => {
|
var write = function (filename, contents) {
|
||||||
result.push({
|
result.push({
|
||||||
dest: filename,
|
dest: filename,
|
||||||
sources: [{
|
sources: [{
|
||||||
@@ -128,16 +124,16 @@ function emitEntryPoints(modules, entryPoints) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
function extractStrings(destFiles) {
|
function extractStrings(destFiles) {
|
||||||
const parseDefineCall = (moduleMatch, depsMatch) => {
|
var parseDefineCall = function (moduleMatch, depsMatch) {
|
||||||
const module = moduleMatch.replace(/^"|"$/g, '');
|
var module = moduleMatch.replace(/^"|"$/g, '');
|
||||||
let deps = depsMatch.split(',');
|
var deps = depsMatch.split(',');
|
||||||
deps = deps.map((dep) => {
|
deps = deps.map(function (dep) {
|
||||||
dep = dep.trim();
|
dep = dep.trim();
|
||||||
dep = dep.replace(/^"|"$/g, '');
|
dep = dep.replace(/^"|"$/g, '');
|
||||||
dep = dep.replace(/^'|'$/g, '');
|
dep = dep.replace(/^'|'$/g, '');
|
||||||
let prefix = null;
|
var prefix = null;
|
||||||
let _path = null;
|
var _path = null;
|
||||||
const pieces = dep.split('!');
|
var pieces = dep.split('!');
|
||||||
if (pieces.length > 1) {
|
if (pieces.length > 1) {
|
||||||
prefix = pieces[0] + '!';
|
prefix = pieces[0] + '!';
|
||||||
_path = pieces[1];
|
_path = pieces[1];
|
||||||
@@ -147,7 +143,7 @@ function extractStrings(destFiles) {
|
|||||||
_path = pieces[0];
|
_path = pieces[0];
|
||||||
}
|
}
|
||||||
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
|
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
|
||||||
const res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
|
var res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
|
||||||
return prefix + res;
|
return prefix + res;
|
||||||
}
|
}
|
||||||
return prefix + _path;
|
return prefix + _path;
|
||||||
@@ -157,7 +153,7 @@ function extractStrings(destFiles) {
|
|||||||
deps: deps
|
deps: deps
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
destFiles.forEach((destFile) => {
|
destFiles.forEach(function (destFile, index) {
|
||||||
if (!/\.js$/.test(destFile.dest)) {
|
if (!/\.js$/.test(destFile.dest)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -165,44 +161,44 @@ function extractStrings(destFiles) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// Do one pass to record the usage counts for each module id
|
// Do one pass to record the usage counts for each module id
|
||||||
const useCounts = {};
|
var useCounts = {};
|
||||||
destFile.sources.forEach((source) => {
|
destFile.sources.forEach(function (source) {
|
||||||
const matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
|
var matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
|
||||||
if (!matches) {
|
if (!matches) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const defineCall = parseDefineCall(matches[1], matches[2]);
|
var defineCall = parseDefineCall(matches[1], matches[2]);
|
||||||
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
|
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
|
||||||
defineCall.deps.forEach((dep) => {
|
defineCall.deps.forEach(function (dep) {
|
||||||
useCounts[dep] = (useCounts[dep] || 0) + 1;
|
useCounts[dep] = (useCounts[dep] || 0) + 1;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
const sortedByUseModules = Object.keys(useCounts);
|
var sortedByUseModules = Object.keys(useCounts);
|
||||||
sortedByUseModules.sort((a, b) => {
|
sortedByUseModules.sort(function (a, b) {
|
||||||
return useCounts[b] - useCounts[a];
|
return useCounts[b] - useCounts[a];
|
||||||
});
|
});
|
||||||
const replacementMap = {};
|
var replacementMap = {};
|
||||||
sortedByUseModules.forEach((module, index) => {
|
sortedByUseModules.forEach(function (module, index) {
|
||||||
replacementMap[module] = index;
|
replacementMap[module] = index;
|
||||||
});
|
});
|
||||||
destFile.sources.forEach((source) => {
|
destFile.sources.forEach(function (source) {
|
||||||
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, (_, moduleMatch, depsMatch) => {
|
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, function (_, moduleMatch, depsMatch) {
|
||||||
const defineCall = parseDefineCall(moduleMatch, depsMatch);
|
var defineCall = parseDefineCall(moduleMatch, depsMatch);
|
||||||
return `define(__m[${replacementMap[defineCall.module]}/*${defineCall.module}*/], __M([${defineCall.deps.map(dep => replacementMap[dep] + '/*' + dep + '*/').join(',')}])`;
|
return "define(__m[" + replacementMap[defineCall.module] + "/*" + defineCall.module + "*/], __M([" + defineCall.deps.map(function (dep) { return replacementMap[dep] + '/*' + dep + '*/'; }).join(',') + "])";
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
destFile.sources.unshift({
|
destFile.sources.unshift({
|
||||||
path: null,
|
path: null,
|
||||||
contents: [
|
contents: [
|
||||||
'(function() {',
|
'(function() {',
|
||||||
`var __m = ${JSON.stringify(sortedByUseModules)};`,
|
"var __m = " + JSON.stringify(sortedByUseModules) + ";",
|
||||||
`var __M = function(deps) {`,
|
"var __M = function(deps) {",
|
||||||
` var result = [];`,
|
" var result = [];",
|
||||||
` for (var i = 0, len = deps.length; i < len; i++) {`,
|
" for (var i = 0, len = deps.length; i < len; i++) {",
|
||||||
` result[i] = __m[deps[i]];`,
|
" result[i] = __m[deps[i]];",
|
||||||
` }`,
|
" }",
|
||||||
` return result;`,
|
" return result;",
|
||||||
`};`
|
"};"
|
||||||
].join('\n')
|
].join('\n')
|
||||||
});
|
});
|
||||||
destFile.sources.push({
|
destFile.sources.push({
|
||||||
@@ -214,7 +210,7 @@ function extractStrings(destFiles) {
|
|||||||
}
|
}
|
||||||
function removeDuplicateTSBoilerplate(destFiles) {
|
function removeDuplicateTSBoilerplate(destFiles) {
|
||||||
// Taken from typescript compiler => emitFiles
|
// Taken from typescript compiler => emitFiles
|
||||||
const BOILERPLATE = [
|
var BOILERPLATE = [
|
||||||
{ start: /^var __extends/, end: /^}\)\(\);$/ },
|
{ start: /^var __extends/, end: /^}\)\(\);$/ },
|
||||||
{ start: /^var __assign/, end: /^};$/ },
|
{ start: /^var __assign/, end: /^};$/ },
|
||||||
{ start: /^var __decorate/, end: /^};$/ },
|
{ start: /^var __decorate/, end: /^};$/ },
|
||||||
@@ -223,14 +219,14 @@ function removeDuplicateTSBoilerplate(destFiles) {
|
|||||||
{ start: /^var __awaiter/, end: /^};$/ },
|
{ start: /^var __awaiter/, end: /^};$/ },
|
||||||
{ start: /^var __generator/, end: /^};$/ },
|
{ start: /^var __generator/, end: /^};$/ },
|
||||||
];
|
];
|
||||||
destFiles.forEach((destFile) => {
|
destFiles.forEach(function (destFile) {
|
||||||
const SEEN_BOILERPLATE = [];
|
var SEEN_BOILERPLATE = [];
|
||||||
destFile.sources.forEach((source) => {
|
destFile.sources.forEach(function (source) {
|
||||||
const lines = source.contents.split(/\r\n|\n|\r/);
|
var lines = source.contents.split(/\r\n|\n|\r/);
|
||||||
const newLines = [];
|
var newLines = [];
|
||||||
let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE;
|
var IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE;
|
||||||
for (let i = 0; i < lines.length; i++) {
|
for (var i = 0; i < lines.length; i++) {
|
||||||
const line = lines[i];
|
var line = lines[i];
|
||||||
if (IS_REMOVING_BOILERPLATE) {
|
if (IS_REMOVING_BOILERPLATE) {
|
||||||
newLines.push('');
|
newLines.push('');
|
||||||
if (END_BOILERPLATE.test(line)) {
|
if (END_BOILERPLATE.test(line)) {
|
||||||
@@ -238,8 +234,8 @@ function removeDuplicateTSBoilerplate(destFiles) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
for (let j = 0; j < BOILERPLATE.length; j++) {
|
for (var j = 0; j < BOILERPLATE.length; j++) {
|
||||||
const boilerplate = BOILERPLATE[j];
|
var boilerplate = BOILERPLATE[j];
|
||||||
if (boilerplate.start.test(line)) {
|
if (boilerplate.start.test(line)) {
|
||||||
if (SEEN_BOILERPLATE[j]) {
|
if (SEEN_BOILERPLATE[j]) {
|
||||||
IS_REMOVING_BOILERPLATE = true;
|
IS_REMOVING_BOILERPLATE = true;
|
||||||
@@ -267,45 +263,45 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
|
|||||||
if (!dest) {
|
if (!dest) {
|
||||||
dest = entryPoint + '.js';
|
dest = entryPoint + '.js';
|
||||||
}
|
}
|
||||||
const mainResult = {
|
var mainResult = {
|
||||||
sources: [],
|
sources: [],
|
||||||
dest: dest
|
dest: dest
|
||||||
}, results = [mainResult];
|
}, results = [mainResult];
|
||||||
const usedPlugins = {};
|
var usedPlugins = {};
|
||||||
const getLoaderPlugin = (pluginName) => {
|
var getLoaderPlugin = function (pluginName) {
|
||||||
if (!usedPlugins[pluginName]) {
|
if (!usedPlugins[pluginName]) {
|
||||||
usedPlugins[pluginName] = modulesMap[pluginName].exports;
|
usedPlugins[pluginName] = modulesMap[pluginName].exports;
|
||||||
}
|
}
|
||||||
return usedPlugins[pluginName];
|
return usedPlugins[pluginName];
|
||||||
};
|
};
|
||||||
includedModules.forEach((c) => {
|
includedModules.forEach(function (c) {
|
||||||
const bangIndex = c.indexOf('!');
|
var bangIndex = c.indexOf('!');
|
||||||
if (bangIndex >= 0) {
|
if (bangIndex >= 0) {
|
||||||
const pluginName = c.substr(0, bangIndex);
|
var pluginName = c.substr(0, bangIndex);
|
||||||
const plugin = getLoaderPlugin(pluginName);
|
var plugin = getLoaderPlugin(pluginName);
|
||||||
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
|
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const module = modulesMap[c];
|
var module = modulesMap[c];
|
||||||
if (module.path === 'empty:') {
|
if (module.path === 'empty:') {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const contents = readFileAndRemoveBOM(module.path);
|
var contents = readFileAndRemoveBOM(module.path);
|
||||||
if (module.shim) {
|
if (module.shim) {
|
||||||
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
|
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
mainResult.sources.push(emitNamedModule(c, module.defineLocation, module.path, contents));
|
mainResult.sources.push(emitNamedModule(c, deps[c], module.defineLocation, module.path, contents));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
Object.keys(usedPlugins).forEach((pluginName) => {
|
Object.keys(usedPlugins).forEach(function (pluginName) {
|
||||||
const plugin = usedPlugins[pluginName];
|
var plugin = usedPlugins[pluginName];
|
||||||
if (typeof plugin.writeFile === 'function') {
|
if (typeof plugin.writeFile === 'function') {
|
||||||
const req = (() => {
|
var req = (function () {
|
||||||
throw new Error('no-no!');
|
throw new Error('no-no!');
|
||||||
});
|
});
|
||||||
req.toUrl = something => something;
|
req.toUrl = function (something) { return something; };
|
||||||
const write = (filename, contents) => {
|
var write = function (filename, contents) {
|
||||||
results.push({
|
results.push({
|
||||||
dest: filename,
|
dest: filename,
|
||||||
sources: [{
|
sources: [{
|
||||||
@@ -317,15 +313,15 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
|
|||||||
plugin.writeFile(pluginName, entryPoint, req, write, {});
|
plugin.writeFile(pluginName, entryPoint, req, write, {});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
const toIFile = (path) => {
|
var toIFile = function (path) {
|
||||||
const contents = readFileAndRemoveBOM(path);
|
var contents = readFileAndRemoveBOM(path);
|
||||||
return {
|
return {
|
||||||
path: path,
|
path: path,
|
||||||
contents: contents
|
contents: contents
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
const toPrepend = (prepend || []).map(toIFile);
|
var toPrepend = (prepend || []).map(toIFile);
|
||||||
const toAppend = (append || []).map(toIFile);
|
var toAppend = (append || []).map(toIFile);
|
||||||
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
|
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
|
||||||
return {
|
return {
|
||||||
files: results,
|
files: results,
|
||||||
@@ -333,8 +329,8 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
function readFileAndRemoveBOM(path) {
|
function readFileAndRemoveBOM(path) {
|
||||||
const BOM_CHAR_CODE = 65279;
|
var BOM_CHAR_CODE = 65279;
|
||||||
let contents = fs.readFileSync(path, 'utf8');
|
var contents = fs.readFileSync(path, 'utf8');
|
||||||
// Remove BOM
|
// Remove BOM
|
||||||
if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
|
if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
|
||||||
contents = contents.substring(1);
|
contents = contents.substring(1);
|
||||||
@@ -342,15 +338,15 @@ function readFileAndRemoveBOM(path) {
|
|||||||
return contents;
|
return contents;
|
||||||
}
|
}
|
||||||
function emitPlugin(entryPoint, plugin, pluginName, moduleName) {
|
function emitPlugin(entryPoint, plugin, pluginName, moduleName) {
|
||||||
let result = '';
|
var result = '';
|
||||||
if (typeof plugin.write === 'function') {
|
if (typeof plugin.write === 'function') {
|
||||||
const write = ((what) => {
|
var write = (function (what) {
|
||||||
result += what;
|
result += what;
|
||||||
});
|
});
|
||||||
write.getEntryPoint = () => {
|
write.getEntryPoint = function () {
|
||||||
return entryPoint;
|
return entryPoint;
|
||||||
};
|
};
|
||||||
write.asModule = (moduleId, code) => {
|
write.asModule = function (moduleId, code) {
|
||||||
code = code.replace(/^define\(/, 'define("' + moduleId + '",');
|
code = code.replace(/^define\(/, 'define("' + moduleId + '",');
|
||||||
result += code;
|
result += code;
|
||||||
};
|
};
|
||||||
@@ -361,20 +357,20 @@ function emitPlugin(entryPoint, plugin, pluginName, moduleName) {
|
|||||||
contents: result
|
contents: result
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
function emitNamedModule(moduleId, defineCallPosition, path, contents) {
|
function emitNamedModule(moduleId, myDeps, defineCallPosition, path, contents) {
|
||||||
// `defineCallPosition` is the position in code: |define()
|
// `defineCallPosition` is the position in code: |define()
|
||||||
const defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
|
var defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
|
||||||
// `parensOffset` is the position in code: define|()
|
// `parensOffset` is the position in code: define|()
|
||||||
const parensOffset = contents.indexOf('(', defineCallOffset);
|
var parensOffset = contents.indexOf('(', defineCallOffset);
|
||||||
const insertStr = '"' + moduleId + '", ';
|
var insertStr = '"' + moduleId + '", ';
|
||||||
return {
|
return {
|
||||||
path: path,
|
path: path,
|
||||||
contents: contents.substr(0, parensOffset + 1) + insertStr + contents.substr(parensOffset + 1)
|
contents: contents.substr(0, parensOffset + 1) + insertStr + contents.substr(parensOffset + 1)
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
function emitShimmedModule(moduleId, myDeps, factory, path, contents) {
|
function emitShimmedModule(moduleId, myDeps, factory, path, contents) {
|
||||||
const strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
|
var strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
|
||||||
const strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
|
var strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
|
||||||
return {
|
return {
|
||||||
path: path,
|
path: path,
|
||||||
contents: contents + '\n;\n' + strDefine
|
contents: contents + '\n;\n' + strDefine
|
||||||
@@ -387,8 +383,7 @@ function positionToOffset(str, desiredLine, desiredCol) {
|
|||||||
if (desiredLine === 1) {
|
if (desiredLine === 1) {
|
||||||
return desiredCol - 1;
|
return desiredCol - 1;
|
||||||
}
|
}
|
||||||
let line = 1;
|
var line = 1, lastNewLineOffset = -1;
|
||||||
let lastNewLineOffset = -1;
|
|
||||||
do {
|
do {
|
||||||
if (desiredLine === line) {
|
if (desiredLine === line) {
|
||||||
return lastNewLineOffset + 1 + desiredCol - 1;
|
return lastNewLineOffset + 1 + desiredCol - 1;
|
||||||
@@ -402,15 +397,14 @@ function positionToOffset(str, desiredLine, desiredCol) {
|
|||||||
* Return a set of reachable nodes in `graph` starting from `rootNodes`
|
* Return a set of reachable nodes in `graph` starting from `rootNodes`
|
||||||
*/
|
*/
|
||||||
function visit(rootNodes, graph) {
|
function visit(rootNodes, graph) {
|
||||||
const result = {};
|
var result = {}, queue = rootNodes;
|
||||||
const queue = rootNodes;
|
rootNodes.forEach(function (node) {
|
||||||
rootNodes.forEach((node) => {
|
|
||||||
result[node] = true;
|
result[node] = true;
|
||||||
});
|
});
|
||||||
while (queue.length > 0) {
|
while (queue.length > 0) {
|
||||||
const el = queue.shift();
|
var el = queue.shift();
|
||||||
const myEdges = graph[el] || [];
|
var myEdges = graph[el] || [];
|
||||||
myEdges.forEach((toNode) => {
|
myEdges.forEach(function (toNode) {
|
||||||
if (!result[toNode]) {
|
if (!result[toNode]) {
|
||||||
result[toNode] = true;
|
result[toNode] = true;
|
||||||
queue.push(toNode);
|
queue.push(toNode);
|
||||||
@@ -423,11 +417,11 @@ function visit(rootNodes, graph) {
|
|||||||
* Perform a topological sort on `graph`
|
* Perform a topological sort on `graph`
|
||||||
*/
|
*/
|
||||||
function topologicalSort(graph) {
|
function topologicalSort(graph) {
|
||||||
const allNodes = {}, outgoingEdgeCount = {}, inverseEdges = {};
|
var allNodes = {}, outgoingEdgeCount = {}, inverseEdges = {};
|
||||||
Object.keys(graph).forEach((fromNode) => {
|
Object.keys(graph).forEach(function (fromNode) {
|
||||||
allNodes[fromNode] = true;
|
allNodes[fromNode] = true;
|
||||||
outgoingEdgeCount[fromNode] = graph[fromNode].length;
|
outgoingEdgeCount[fromNode] = graph[fromNode].length;
|
||||||
graph[fromNode].forEach((toNode) => {
|
graph[fromNode].forEach(function (toNode) {
|
||||||
allNodes[toNode] = true;
|
allNodes[toNode] = true;
|
||||||
outgoingEdgeCount[toNode] = outgoingEdgeCount[toNode] || 0;
|
outgoingEdgeCount[toNode] = outgoingEdgeCount[toNode] || 0;
|
||||||
inverseEdges[toNode] = inverseEdges[toNode] || [];
|
inverseEdges[toNode] = inverseEdges[toNode] || [];
|
||||||
@@ -435,8 +429,8 @@ function topologicalSort(graph) {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
// https://en.wikipedia.org/wiki/Topological_sorting
|
// https://en.wikipedia.org/wiki/Topological_sorting
|
||||||
const S = [], L = [];
|
var S = [], L = [];
|
||||||
Object.keys(allNodes).forEach((node) => {
|
Object.keys(allNodes).forEach(function (node) {
|
||||||
if (outgoingEdgeCount[node] === 0) {
|
if (outgoingEdgeCount[node] === 0) {
|
||||||
delete outgoingEdgeCount[node];
|
delete outgoingEdgeCount[node];
|
||||||
S.push(node);
|
S.push(node);
|
||||||
@@ -445,10 +439,10 @@ function topologicalSort(graph) {
|
|||||||
while (S.length > 0) {
|
while (S.length > 0) {
|
||||||
// Ensure the exact same order all the time with the same inputs
|
// Ensure the exact same order all the time with the same inputs
|
||||||
S.sort();
|
S.sort();
|
||||||
const n = S.shift();
|
var n = S.shift();
|
||||||
L.push(n);
|
L.push(n);
|
||||||
const myInverseEdges = inverseEdges[n] || [];
|
var myInverseEdges = inverseEdges[n] || [];
|
||||||
myInverseEdges.forEach((m) => {
|
myInverseEdges.forEach(function (m) {
|
||||||
outgoingEdgeCount[m]--;
|
outgoingEdgeCount[m]--;
|
||||||
if (outgoingEdgeCount[m] === 0) {
|
if (outgoingEdgeCount[m] === 0) {
|
||||||
delete outgoingEdgeCount[m];
|
delete outgoingEdgeCount[m];
|
||||||
|
|||||||
@@ -3,9 +3,9 @@
|
|||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
*--------------------------------------------------------------------------------------------*/
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
import * as fs from 'fs';
|
import fs = require('fs');
|
||||||
import * as path from 'path';
|
import path = require('path');
|
||||||
import * as vm from 'vm';
|
import vm = require('vm');
|
||||||
|
|
||||||
interface IPosition {
|
interface IPosition {
|
||||||
line: number;
|
line: number;
|
||||||
@@ -46,7 +46,7 @@ export interface IEntryPoint {
|
|||||||
name: string;
|
name: string;
|
||||||
include?: string[];
|
include?: string[];
|
||||||
exclude?: string[];
|
exclude?: string[];
|
||||||
prepend?: string[];
|
prepend: string[];
|
||||||
append?: string[];
|
append?: string[];
|
||||||
dest?: string;
|
dest?: string;
|
||||||
}
|
}
|
||||||
@@ -64,7 +64,7 @@ interface INodeSet {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export interface IFile {
|
export interface IFile {
|
||||||
path: string | null;
|
path: string;
|
||||||
contents: string;
|
contents: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -97,13 +97,13 @@ export interface ILoaderConfig {
|
|||||||
/**
|
/**
|
||||||
* Bundle `entryPoints` given config `config`.
|
* Bundle `entryPoints` given config `config`.
|
||||||
*/
|
*/
|
||||||
export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callback: (err: any, result: IBundleResult | null) => void): void {
|
export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callback: (err: any, result: IBundleResult) => void): void {
|
||||||
const entryPointsMap: IEntryPointMap = {};
|
let entryPointsMap: IEntryPointMap = {};
|
||||||
entryPoints.forEach((module: IEntryPoint) => {
|
entryPoints.forEach((module: IEntryPoint) => {
|
||||||
entryPointsMap[module.name] = module;
|
entryPointsMap[module.name] = module;
|
||||||
});
|
});
|
||||||
|
|
||||||
const allMentionedModulesMap: { [modules: string]: boolean; } = {};
|
let allMentionedModulesMap: { [modules: string]: boolean; } = {};
|
||||||
entryPoints.forEach((module: IEntryPoint) => {
|
entryPoints.forEach((module: IEntryPoint) => {
|
||||||
allMentionedModulesMap[module.name] = true;
|
allMentionedModulesMap[module.name] = true;
|
||||||
(module.include || []).forEach(function (includedModule) {
|
(module.include || []).forEach(function (includedModule) {
|
||||||
@@ -115,32 +115,28 @@ export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callba
|
|||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
const code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
|
var code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
|
||||||
const r: Function = <any>vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
|
var r: Function = <any>vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
|
||||||
const loaderModule = { exports: {} };
|
var loaderModule = { exports: {} };
|
||||||
r.call({}, require, loaderModule, loaderModule.exports);
|
r.call({}, require, loaderModule, loaderModule.exports);
|
||||||
|
|
||||||
const loader: any = loaderModule.exports;
|
var loader: any = loaderModule.exports;
|
||||||
config.isBuild = true;
|
config.isBuild = true;
|
||||||
config.paths = config.paths || {};
|
config.paths = config.paths || {};
|
||||||
if (!config.paths['vs/nls']) {
|
config.paths['vs/nls'] = 'out-build/vs/nls.build';
|
||||||
config.paths['vs/nls'] = 'out-build/vs/nls.build';
|
config.paths['vs/css'] = 'out-build/vs/css.build';
|
||||||
}
|
|
||||||
if (!config.paths['vs/css']) {
|
|
||||||
config.paths['vs/css'] = 'out-build/vs/css.build';
|
|
||||||
}
|
|
||||||
loader.config(config);
|
loader.config(config);
|
||||||
|
|
||||||
loader(['require'], (localRequire: any) => {
|
loader(['require'], (localRequire) => {
|
||||||
const resolvePath = (path: string) => {
|
let resolvePath = (path: string) => {
|
||||||
const r = localRequire.toUrl(path);
|
let r = localRequire.toUrl(path);
|
||||||
if (!/\.js/.test(r)) {
|
if (!/\.js/.test(r)) {
|
||||||
return r + '.js';
|
return r + '.js';
|
||||||
}
|
}
|
||||||
return r;
|
return r;
|
||||||
};
|
};
|
||||||
for (const moduleId in entryPointsMap) {
|
for (let moduleId in entryPointsMap) {
|
||||||
const entryPoint = entryPointsMap[moduleId];
|
let entryPoint = entryPointsMap[moduleId];
|
||||||
if (entryPoint.append) {
|
if (entryPoint.append) {
|
||||||
entryPoint.append = entryPoint.append.map(resolvePath);
|
entryPoint.append = entryPoint.append.map(resolvePath);
|
||||||
}
|
}
|
||||||
@@ -151,76 +147,76 @@ export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callba
|
|||||||
});
|
});
|
||||||
|
|
||||||
loader(Object.keys(allMentionedModulesMap), () => {
|
loader(Object.keys(allMentionedModulesMap), () => {
|
||||||
const modules = <IBuildModuleInfo[]>loader.getBuildInfo();
|
let modules = <IBuildModuleInfo[]>loader.getBuildInfo();
|
||||||
const partialResult = emitEntryPoints(modules, entryPointsMap);
|
let partialResult = emitEntryPoints(modules, entryPointsMap);
|
||||||
const cssInlinedResources = loader('vs/css').getInlinedResources();
|
let cssInlinedResources = loader('vs/css').getInlinedResources();
|
||||||
callback(null, {
|
callback(null, {
|
||||||
files: partialResult.files,
|
files: partialResult.files,
|
||||||
cssInlinedResources: cssInlinedResources,
|
cssInlinedResources: cssInlinedResources,
|
||||||
bundleData: partialResult.bundleData
|
bundleData: partialResult.bundleData
|
||||||
});
|
});
|
||||||
}, (err: any) => callback(err, null));
|
}, (err) => callback(err, null));
|
||||||
}
|
}
|
||||||
|
|
||||||
function emitEntryPoints(modules: IBuildModuleInfo[], entryPoints: IEntryPointMap): IPartialBundleResult {
|
function emitEntryPoints(modules: IBuildModuleInfo[], entryPoints: IEntryPointMap): IPartialBundleResult {
|
||||||
const modulesMap: IBuildModuleInfoMap = {};
|
let modulesMap: IBuildModuleInfoMap = {};
|
||||||
modules.forEach((m: IBuildModuleInfo) => {
|
modules.forEach((m: IBuildModuleInfo) => {
|
||||||
modulesMap[m.id] = m;
|
modulesMap[m.id] = m;
|
||||||
});
|
});
|
||||||
|
|
||||||
const modulesGraph: IGraph = {};
|
let modulesGraph: IGraph = {};
|
||||||
modules.forEach((m: IBuildModuleInfo) => {
|
modules.forEach((m: IBuildModuleInfo) => {
|
||||||
modulesGraph[m.id] = m.dependencies;
|
modulesGraph[m.id] = m.dependencies;
|
||||||
});
|
});
|
||||||
|
|
||||||
const sortedModules = topologicalSort(modulesGraph);
|
let sortedModules = topologicalSort(modulesGraph);
|
||||||
|
|
||||||
let result: IConcatFile[] = [];
|
let result: IConcatFile[] = [];
|
||||||
const usedPlugins: IPluginMap = {};
|
let usedPlugins: IPluginMap = {};
|
||||||
const bundleData: IBundleData = {
|
let bundleData: IBundleData = {
|
||||||
graph: modulesGraph,
|
graph: modulesGraph,
|
||||||
bundles: {}
|
bundles: {}
|
||||||
};
|
};
|
||||||
|
|
||||||
Object.keys(entryPoints).forEach((moduleToBundle: string) => {
|
Object.keys(entryPoints).forEach((moduleToBundle: string) => {
|
||||||
const info = entryPoints[moduleToBundle];
|
let info = entryPoints[moduleToBundle];
|
||||||
const rootNodes = [moduleToBundle].concat(info.include || []);
|
let rootNodes = [moduleToBundle].concat(info.include || []);
|
||||||
const allDependencies = visit(rootNodes, modulesGraph);
|
let allDependencies = visit(rootNodes, modulesGraph);
|
||||||
const excludes: string[] = ['require', 'exports', 'module'].concat(info.exclude || []);
|
let excludes: string[] = ['require', 'exports', 'module'].concat(info.exclude || []);
|
||||||
|
|
||||||
excludes.forEach((excludeRoot: string) => {
|
excludes.forEach((excludeRoot: string) => {
|
||||||
const allExcludes = visit([excludeRoot], modulesGraph);
|
let allExcludes = visit([excludeRoot], modulesGraph);
|
||||||
Object.keys(allExcludes).forEach((exclude: string) => {
|
Object.keys(allExcludes).forEach((exclude: string) => {
|
||||||
delete allDependencies[exclude];
|
delete allDependencies[exclude];
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
const includedModules = sortedModules.filter((module: string) => {
|
let includedModules = sortedModules.filter((module: string) => {
|
||||||
return allDependencies[module];
|
return allDependencies[module];
|
||||||
});
|
});
|
||||||
|
|
||||||
bundleData.bundles[moduleToBundle] = includedModules;
|
bundleData.bundles[moduleToBundle] = includedModules;
|
||||||
|
|
||||||
const res = emitEntryPoint(
|
let res = emitEntryPoint(
|
||||||
modulesMap,
|
modulesMap,
|
||||||
modulesGraph,
|
modulesGraph,
|
||||||
moduleToBundle,
|
moduleToBundle,
|
||||||
includedModules,
|
includedModules,
|
||||||
info.prepend || [],
|
info.prepend,
|
||||||
info.append || [],
|
info.append,
|
||||||
info.dest
|
info.dest
|
||||||
);
|
);
|
||||||
|
|
||||||
result = result.concat(res.files);
|
result = result.concat(res.files);
|
||||||
for (const pluginName in res.usedPlugins) {
|
for (let pluginName in res.usedPlugins) {
|
||||||
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
|
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
Object.keys(usedPlugins).forEach((pluginName: string) => {
|
Object.keys(usedPlugins).forEach((pluginName: string) => {
|
||||||
const plugin = usedPlugins[pluginName];
|
let plugin = usedPlugins[pluginName];
|
||||||
if (typeof plugin.finishBuild === 'function') {
|
if (typeof plugin.finishBuild === 'function') {
|
||||||
const write = (filename: string, contents: string) => {
|
let write = (filename: string, contents: string) => {
|
||||||
result.push({
|
result.push({
|
||||||
dest: filename,
|
dest: filename,
|
||||||
sources: [{
|
sources: [{
|
||||||
@@ -241,16 +237,16 @@ function emitEntryPoints(modules: IBuildModuleInfo[], entryPoints: IEntryPointMa
|
|||||||
}
|
}
|
||||||
|
|
||||||
function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
|
function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
|
||||||
const parseDefineCall = (moduleMatch: string, depsMatch: string) => {
|
let parseDefineCall = (moduleMatch: string, depsMatch: string) => {
|
||||||
const module = moduleMatch.replace(/^"|"$/g, '');
|
let module = moduleMatch.replace(/^"|"$/g, '');
|
||||||
let deps = depsMatch.split(',');
|
let deps = depsMatch.split(',');
|
||||||
deps = deps.map((dep) => {
|
deps = deps.map((dep) => {
|
||||||
dep = dep.trim();
|
dep = dep.trim();
|
||||||
dep = dep.replace(/^"|"$/g, '');
|
dep = dep.replace(/^"|"$/g, '');
|
||||||
dep = dep.replace(/^'|'$/g, '');
|
dep = dep.replace(/^'|'$/g, '');
|
||||||
let prefix: string | null = null;
|
let prefix: string = null;
|
||||||
let _path: string | null = null;
|
let _path: string = null;
|
||||||
const pieces = dep.split('!');
|
let pieces = dep.split('!');
|
||||||
if (pieces.length > 1) {
|
if (pieces.length > 1) {
|
||||||
prefix = pieces[0] + '!';
|
prefix = pieces[0] + '!';
|
||||||
_path = pieces[1];
|
_path = pieces[1];
|
||||||
@@ -260,7 +256,7 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
|
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
|
||||||
const res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
|
let res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
|
||||||
return prefix + res;
|
return prefix + res;
|
||||||
}
|
}
|
||||||
return prefix + _path;
|
return prefix + _path;
|
||||||
@@ -271,7 +267,7 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
destFiles.forEach((destFile) => {
|
destFiles.forEach((destFile, index) => {
|
||||||
if (!/\.js$/.test(destFile.dest)) {
|
if (!/\.js$/.test(destFile.dest)) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -280,33 +276,33 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Do one pass to record the usage counts for each module id
|
// Do one pass to record the usage counts for each module id
|
||||||
const useCounts: { [moduleId: string]: number; } = {};
|
let useCounts: { [moduleId: string]: number; } = {};
|
||||||
destFile.sources.forEach((source) => {
|
destFile.sources.forEach((source) => {
|
||||||
const matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
|
let matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
|
||||||
if (!matches) {
|
if (!matches) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const defineCall = parseDefineCall(matches[1], matches[2]);
|
let defineCall = parseDefineCall(matches[1], matches[2]);
|
||||||
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
|
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
|
||||||
defineCall.deps.forEach((dep) => {
|
defineCall.deps.forEach((dep) => {
|
||||||
useCounts[dep] = (useCounts[dep] || 0) + 1;
|
useCounts[dep] = (useCounts[dep] || 0) + 1;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
const sortedByUseModules = Object.keys(useCounts);
|
let sortedByUseModules = Object.keys(useCounts);
|
||||||
sortedByUseModules.sort((a, b) => {
|
sortedByUseModules.sort((a, b) => {
|
||||||
return useCounts[b] - useCounts[a];
|
return useCounts[b] - useCounts[a];
|
||||||
});
|
});
|
||||||
|
|
||||||
const replacementMap: { [moduleId: string]: number; } = {};
|
let replacementMap: { [moduleId: string]: number; } = {};
|
||||||
sortedByUseModules.forEach((module, index) => {
|
sortedByUseModules.forEach((module, index) => {
|
||||||
replacementMap[module] = index;
|
replacementMap[module] = index;
|
||||||
});
|
});
|
||||||
|
|
||||||
destFile.sources.forEach((source) => {
|
destFile.sources.forEach((source) => {
|
||||||
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, (_, moduleMatch, depsMatch) => {
|
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, (_, moduleMatch, depsMatch) => {
|
||||||
const defineCall = parseDefineCall(moduleMatch, depsMatch);
|
let defineCall = parseDefineCall(moduleMatch, depsMatch);
|
||||||
return `define(__m[${replacementMap[defineCall.module]}/*${defineCall.module}*/], __M([${defineCall.deps.map(dep => replacementMap[dep] + '/*' + dep + '*/').join(',')}])`;
|
return `define(__m[${replacementMap[defineCall.module]}/*${defineCall.module}*/], __M([${defineCall.deps.map(dep => replacementMap[dep] + '/*' + dep + '*/').join(',')}])`;
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@@ -336,7 +332,7 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
|
|||||||
|
|
||||||
function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
|
function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
|
||||||
// Taken from typescript compiler => emitFiles
|
// Taken from typescript compiler => emitFiles
|
||||||
const BOILERPLATE = [
|
let BOILERPLATE = [
|
||||||
{ start: /^var __extends/, end: /^}\)\(\);$/ },
|
{ start: /^var __extends/, end: /^}\)\(\);$/ },
|
||||||
{ start: /^var __assign/, end: /^};$/ },
|
{ start: /^var __assign/, end: /^};$/ },
|
||||||
{ start: /^var __decorate/, end: /^};$/ },
|
{ start: /^var __decorate/, end: /^};$/ },
|
||||||
@@ -347,22 +343,22 @@ function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
|
|||||||
];
|
];
|
||||||
|
|
||||||
destFiles.forEach((destFile) => {
|
destFiles.forEach((destFile) => {
|
||||||
const SEEN_BOILERPLATE: boolean[] = [];
|
let SEEN_BOILERPLATE = [];
|
||||||
destFile.sources.forEach((source) => {
|
destFile.sources.forEach((source) => {
|
||||||
const lines = source.contents.split(/\r\n|\n|\r/);
|
let lines = source.contents.split(/\r\n|\n|\r/);
|
||||||
const newLines: string[] = [];
|
let newLines: string[] = [];
|
||||||
let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE: RegExp;
|
let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE: RegExp;
|
||||||
|
|
||||||
for (let i = 0; i < lines.length; i++) {
|
for (let i = 0; i < lines.length; i++) {
|
||||||
const line = lines[i];
|
let line = lines[i];
|
||||||
if (IS_REMOVING_BOILERPLATE) {
|
if (IS_REMOVING_BOILERPLATE) {
|
||||||
newLines.push('');
|
newLines.push('');
|
||||||
if (END_BOILERPLATE!.test(line)) {
|
if (END_BOILERPLATE.test(line)) {
|
||||||
IS_REMOVING_BOILERPLATE = false;
|
IS_REMOVING_BOILERPLATE = false;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
for (let j = 0; j < BOILERPLATE.length; j++) {
|
for (let j = 0; j < BOILERPLATE.length; j++) {
|
||||||
const boilerplate = BOILERPLATE[j];
|
let boilerplate = BOILERPLATE[j];
|
||||||
if (boilerplate.start.test(line)) {
|
if (boilerplate.start.test(line)) {
|
||||||
if (SEEN_BOILERPLATE[j]) {
|
if (SEEN_BOILERPLATE[j]) {
|
||||||
IS_REMOVING_BOILERPLATE = true;
|
IS_REMOVING_BOILERPLATE = true;
|
||||||
@@ -402,19 +398,19 @@ function emitEntryPoint(
|
|||||||
includedModules: string[],
|
includedModules: string[],
|
||||||
prepend: string[],
|
prepend: string[],
|
||||||
append: string[],
|
append: string[],
|
||||||
dest: string | undefined
|
dest: string
|
||||||
): IEmitEntryPointResult {
|
): IEmitEntryPointResult {
|
||||||
if (!dest) {
|
if (!dest) {
|
||||||
dest = entryPoint + '.js';
|
dest = entryPoint + '.js';
|
||||||
}
|
}
|
||||||
const mainResult: IConcatFile = {
|
let mainResult: IConcatFile = {
|
||||||
sources: [],
|
sources: [],
|
||||||
dest: dest
|
dest: dest
|
||||||
},
|
},
|
||||||
results: IConcatFile[] = [mainResult];
|
results: IConcatFile[] = [mainResult];
|
||||||
|
|
||||||
const usedPlugins: IPluginMap = {};
|
let usedPlugins: IPluginMap = {};
|
||||||
const getLoaderPlugin = (pluginName: string): ILoaderPlugin => {
|
let getLoaderPlugin = (pluginName: string): ILoaderPlugin => {
|
||||||
if (!usedPlugins[pluginName]) {
|
if (!usedPlugins[pluginName]) {
|
||||||
usedPlugins[pluginName] = modulesMap[pluginName].exports;
|
usedPlugins[pluginName] = modulesMap[pluginName].exports;
|
||||||
}
|
}
|
||||||
@@ -422,39 +418,39 @@ function emitEntryPoint(
|
|||||||
};
|
};
|
||||||
|
|
||||||
includedModules.forEach((c: string) => {
|
includedModules.forEach((c: string) => {
|
||||||
const bangIndex = c.indexOf('!');
|
let bangIndex = c.indexOf('!');
|
||||||
|
|
||||||
if (bangIndex >= 0) {
|
if (bangIndex >= 0) {
|
||||||
const pluginName = c.substr(0, bangIndex);
|
let pluginName = c.substr(0, bangIndex);
|
||||||
const plugin = getLoaderPlugin(pluginName);
|
let plugin = getLoaderPlugin(pluginName);
|
||||||
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
|
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const module = modulesMap[c];
|
let module = modulesMap[c];
|
||||||
|
|
||||||
if (module.path === 'empty:') {
|
if (module.path === 'empty:') {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const contents = readFileAndRemoveBOM(module.path);
|
let contents = readFileAndRemoveBOM(module.path);
|
||||||
|
|
||||||
if (module.shim) {
|
if (module.shim) {
|
||||||
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
|
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
|
||||||
} else {
|
} else {
|
||||||
mainResult.sources.push(emitNamedModule(c, module.defineLocation, module.path, contents));
|
mainResult.sources.push(emitNamedModule(c, deps[c], module.defineLocation, module.path, contents));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
Object.keys(usedPlugins).forEach((pluginName: string) => {
|
Object.keys(usedPlugins).forEach((pluginName: string) => {
|
||||||
const plugin = usedPlugins[pluginName];
|
let plugin = usedPlugins[pluginName];
|
||||||
if (typeof plugin.writeFile === 'function') {
|
if (typeof plugin.writeFile === 'function') {
|
||||||
const req: ILoaderPluginReqFunc = <any>(() => {
|
let req: ILoaderPluginReqFunc = <any>(() => {
|
||||||
throw new Error('no-no!');
|
throw new Error('no-no!');
|
||||||
});
|
});
|
||||||
req.toUrl = something => something;
|
req.toUrl = something => something;
|
||||||
|
|
||||||
const write = (filename: string, contents: string) => {
|
let write = (filename: string, contents: string) => {
|
||||||
results.push({
|
results.push({
|
||||||
dest: filename,
|
dest: filename,
|
||||||
sources: [{
|
sources: [{
|
||||||
@@ -467,16 +463,16 @@ function emitEntryPoint(
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
const toIFile = (path: string): IFile => {
|
let toIFile = (path): IFile => {
|
||||||
const contents = readFileAndRemoveBOM(path);
|
let contents = readFileAndRemoveBOM(path);
|
||||||
return {
|
return {
|
||||||
path: path,
|
path: path,
|
||||||
contents: contents
|
contents: contents
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
const toPrepend = (prepend || []).map(toIFile);
|
let toPrepend = (prepend || []).map(toIFile);
|
||||||
const toAppend = (append || []).map(toIFile);
|
let toAppend = (append || []).map(toIFile);
|
||||||
|
|
||||||
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
|
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
|
||||||
|
|
||||||
@@ -487,8 +483,8 @@ function emitEntryPoint(
|
|||||||
}
|
}
|
||||||
|
|
||||||
function readFileAndRemoveBOM(path: string): string {
|
function readFileAndRemoveBOM(path: string): string {
|
||||||
const BOM_CHAR_CODE = 65279;
|
var BOM_CHAR_CODE = 65279;
|
||||||
let contents = fs.readFileSync(path, 'utf8');
|
var contents = fs.readFileSync(path, 'utf8');
|
||||||
// Remove BOM
|
// Remove BOM
|
||||||
if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
|
if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
|
||||||
contents = contents.substring(1);
|
contents = contents.substring(1);
|
||||||
@@ -499,7 +495,7 @@ function readFileAndRemoveBOM(path: string): string {
|
|||||||
function emitPlugin(entryPoint: string, plugin: ILoaderPlugin, pluginName: string, moduleName: string): IFile {
|
function emitPlugin(entryPoint: string, plugin: ILoaderPlugin, pluginName: string, moduleName: string): IFile {
|
||||||
let result = '';
|
let result = '';
|
||||||
if (typeof plugin.write === 'function') {
|
if (typeof plugin.write === 'function') {
|
||||||
const write: ILoaderPluginWriteFunc = <any>((what: string) => {
|
let write: ILoaderPluginWriteFunc = <any>((what) => {
|
||||||
result += what;
|
result += what;
|
||||||
});
|
});
|
||||||
write.getEntryPoint = () => {
|
write.getEntryPoint = () => {
|
||||||
@@ -517,15 +513,15 @@ function emitPlugin(entryPoint: string, plugin: ILoaderPlugin, pluginName: strin
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function emitNamedModule(moduleId: string, defineCallPosition: IPosition, path: string, contents: string): IFile {
|
function emitNamedModule(moduleId: string, myDeps: string[], defineCallPosition: IPosition, path: string, contents: string): IFile {
|
||||||
|
|
||||||
// `defineCallPosition` is the position in code: |define()
|
// `defineCallPosition` is the position in code: |define()
|
||||||
const defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
|
let defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
|
||||||
|
|
||||||
// `parensOffset` is the position in code: define|()
|
// `parensOffset` is the position in code: define|()
|
||||||
const parensOffset = contents.indexOf('(', defineCallOffset);
|
let parensOffset = contents.indexOf('(', defineCallOffset);
|
||||||
|
|
||||||
const insertStr = '"' + moduleId + '", ';
|
let insertStr = '"' + moduleId + '", ';
|
||||||
|
|
||||||
return {
|
return {
|
||||||
path: path,
|
path: path,
|
||||||
@@ -534,8 +530,8 @@ function emitNamedModule(moduleId: string, defineCallPosition: IPosition, path:
|
|||||||
}
|
}
|
||||||
|
|
||||||
function emitShimmedModule(moduleId: string, myDeps: string[], factory: string, path: string, contents: string): IFile {
|
function emitShimmedModule(moduleId: string, myDeps: string[], factory: string, path: string, contents: string): IFile {
|
||||||
const strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
|
let strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
|
||||||
const strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
|
let strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
|
||||||
return {
|
return {
|
||||||
path: path,
|
path: path,
|
||||||
contents: contents + '\n;\n' + strDefine
|
contents: contents + '\n;\n' + strDefine
|
||||||
@@ -550,8 +546,8 @@ function positionToOffset(str: string, desiredLine: number, desiredCol: number):
|
|||||||
return desiredCol - 1;
|
return desiredCol - 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
let line = 1;
|
let line = 1,
|
||||||
let lastNewLineOffset = -1;
|
lastNewLineOffset = -1;
|
||||||
|
|
||||||
do {
|
do {
|
||||||
if (desiredLine === line) {
|
if (desiredLine === line) {
|
||||||
@@ -569,16 +565,16 @@ function positionToOffset(str: string, desiredLine: number, desiredCol: number):
|
|||||||
* Return a set of reachable nodes in `graph` starting from `rootNodes`
|
* Return a set of reachable nodes in `graph` starting from `rootNodes`
|
||||||
*/
|
*/
|
||||||
function visit(rootNodes: string[], graph: IGraph): INodeSet {
|
function visit(rootNodes: string[], graph: IGraph): INodeSet {
|
||||||
const result: INodeSet = {};
|
let result: INodeSet = {},
|
||||||
const queue = rootNodes;
|
queue = rootNodes;
|
||||||
|
|
||||||
rootNodes.forEach((node) => {
|
rootNodes.forEach((node) => {
|
||||||
result[node] = true;
|
result[node] = true;
|
||||||
});
|
});
|
||||||
|
|
||||||
while (queue.length > 0) {
|
while (queue.length > 0) {
|
||||||
const el = queue.shift();
|
let el = queue.shift();
|
||||||
const myEdges = graph[el!] || [];
|
let myEdges = graph[el] || [];
|
||||||
myEdges.forEach((toNode) => {
|
myEdges.forEach((toNode) => {
|
||||||
if (!result[toNode]) {
|
if (!result[toNode]) {
|
||||||
result[toNode] = true;
|
result[toNode] = true;
|
||||||
@@ -595,7 +591,7 @@ function visit(rootNodes: string[], graph: IGraph): INodeSet {
|
|||||||
*/
|
*/
|
||||||
function topologicalSort(graph: IGraph): string[] {
|
function topologicalSort(graph: IGraph): string[] {
|
||||||
|
|
||||||
const allNodes: INodeSet = {},
|
let allNodes: INodeSet = {},
|
||||||
outgoingEdgeCount: { [node: string]: number; } = {},
|
outgoingEdgeCount: { [node: string]: number; } = {},
|
||||||
inverseEdges: IGraph = {};
|
inverseEdges: IGraph = {};
|
||||||
|
|
||||||
@@ -613,7 +609,7 @@ function topologicalSort(graph: IGraph): string[] {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// https://en.wikipedia.org/wiki/Topological_sorting
|
// https://en.wikipedia.org/wiki/Topological_sorting
|
||||||
const S: string[] = [],
|
let S: string[] = [],
|
||||||
L: string[] = [];
|
L: string[] = [];
|
||||||
|
|
||||||
Object.keys(allNodes).forEach((node: string) => {
|
Object.keys(allNodes).forEach((node: string) => {
|
||||||
@@ -627,10 +623,10 @@ function topologicalSort(graph: IGraph): string[] {
|
|||||||
// Ensure the exact same order all the time with the same inputs
|
// Ensure the exact same order all the time with the same inputs
|
||||||
S.sort();
|
S.sort();
|
||||||
|
|
||||||
const n: string = S.shift()!;
|
let n: string = S.shift();
|
||||||
L.push(n);
|
L.push(n);
|
||||||
|
|
||||||
const myInverseEdges = inverseEdges[n] || [];
|
let myInverseEdges = inverseEdges[n] || [];
|
||||||
myInverseEdges.forEach((m: string) => {
|
myInverseEdges.forEach((m: string) => {
|
||||||
outgoingEdgeCount[m]--;
|
outgoingEdgeCount[m]--;
|
||||||
if (outgoingEdgeCount[m] === 0) {
|
if (outgoingEdgeCount[m] === 0) {
|
||||||
|
|||||||
@@ -4,54 +4,44 @@
|
|||||||
*--------------------------------------------------------------------------------------------*/
|
*--------------------------------------------------------------------------------------------*/
|
||||||
'use strict';
|
'use strict';
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const es = require("event-stream");
|
var gulp = require("gulp");
|
||||||
const fs = require("fs");
|
var tsb = require("gulp-tsb");
|
||||||
const gulp = require("gulp");
|
var es = require("event-stream");
|
||||||
const bom = require("gulp-bom");
|
var watch = require('./watch');
|
||||||
const sourcemaps = require("gulp-sourcemaps");
|
var nls = require("./nls");
|
||||||
const tsb = require("gulp-tsb");
|
var util = require("./util");
|
||||||
const path = require("path");
|
var reporter_1 = require("./reporter");
|
||||||
const _ = require("underscore");
|
var path = require("path");
|
||||||
const monacodts = require("../monaco/api");
|
var bom = require("gulp-bom");
|
||||||
const nls = require("./nls");
|
var sourcemaps = require("gulp-sourcemaps");
|
||||||
const reporter_1 = require("./reporter");
|
var _ = require("underscore");
|
||||||
const util = require("./util");
|
var monacodts = require("../monaco/api");
|
||||||
const fancyLog = require("fancy-log");
|
var fs = require("fs");
|
||||||
const ansiColors = require("ansi-colors");
|
var reporter = reporter_1.createReporter();
|
||||||
const watch = require('./watch');
|
|
||||||
const reporter = reporter_1.createReporter();
|
|
||||||
function getTypeScriptCompilerOptions(src) {
|
function getTypeScriptCompilerOptions(src) {
|
||||||
const rootDir = path.join(__dirname, `../../${src}`);
|
var rootDir = path.join(__dirname, "../../" + src);
|
||||||
const tsconfig = require(`../../${src}/tsconfig.json`);
|
var options = require("../../" + src + "/tsconfig.json").compilerOptions;
|
||||||
let options;
|
|
||||||
if (tsconfig.extends) {
|
|
||||||
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
options = tsconfig.compilerOptions;
|
|
||||||
}
|
|
||||||
options.verbose = false;
|
options.verbose = false;
|
||||||
options.sourceMap = true;
|
options.sourceMap = true;
|
||||||
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
||||||
options.sourceMap = false;
|
options.sourceMap = false;
|
||||||
}
|
}
|
||||||
options.rootDir = rootDir;
|
options.rootDir = rootDir;
|
||||||
options.baseUrl = rootDir;
|
|
||||||
options.sourceRoot = util.toFileUri(rootDir);
|
options.sourceRoot = util.toFileUri(rootDir);
|
||||||
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
|
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
|
||||||
return options;
|
return options;
|
||||||
}
|
}
|
||||||
function createCompile(src, build, emitError) {
|
function createCompile(src, build, emitError) {
|
||||||
const opts = _.clone(getTypeScriptCompilerOptions(src));
|
var opts = _.clone(getTypeScriptCompilerOptions(src));
|
||||||
opts.inlineSources = !!build;
|
opts.inlineSources = !!build;
|
||||||
opts.noFilesystemLookup = true;
|
opts.noFilesystemLookup = true;
|
||||||
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
|
var ts = tsb.create(opts, null, null, function (err) { return reporter(err.toString()); });
|
||||||
return function (token) {
|
return function (token) {
|
||||||
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
|
var utf8Filter = util.filter(function (data) { return /(\/|\\)test(\/|\\).*utf8/.test(data.path); });
|
||||||
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
|
var tsFilter = util.filter(function (data) { return /\.ts$/.test(data.path); });
|
||||||
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path)));
|
var noDeclarationsFilter = util.filter(function (data) { return !(/\.d\.ts$/.test(data.path)); });
|
||||||
const input = es.through();
|
var input = es.through();
|
||||||
const output = input
|
var output = input
|
||||||
.pipe(utf8Filter)
|
.pipe(utf8Filter)
|
||||||
.pipe(bom())
|
.pipe(bom())
|
||||||
.pipe(utf8Filter.restore)
|
.pipe(utf8Filter.restore)
|
||||||
@@ -67,136 +57,91 @@ function createCompile(src, build, emitError) {
|
|||||||
sourceRoot: opts.sourceRoot
|
sourceRoot: opts.sourceRoot
|
||||||
}))
|
}))
|
||||||
.pipe(tsFilter.restore)
|
.pipe(tsFilter.restore)
|
||||||
.pipe(reporter.end(!!emitError));
|
.pipe(reporter.end(emitError));
|
||||||
return es.duplex(input, output);
|
return es.duplex(input, output);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
const typesDts = [
|
|
||||||
'node_modules/typescript/lib/*.d.ts',
|
|
||||||
'node_modules/@types/**/*.d.ts',
|
|
||||||
'!node_modules/@types/webpack/**/*',
|
|
||||||
'!node_modules/@types/uglify-js/**/*',
|
|
||||||
];
|
|
||||||
function compileTask(src, out, build) {
|
function compileTask(src, out, build) {
|
||||||
return function () {
|
return function () {
|
||||||
const compile = createCompile(src, build, true);
|
var compile = createCompile(src, build, true);
|
||||||
const srcPipe = es.merge(gulp.src(`${src}/**`, { base: `${src}` }), gulp.src(typesDts));
|
var srcPipe = es.merge(gulp.src(src + "/**", { base: "" + src }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
|
||||||
let generator = new MonacoGenerator(false);
|
// Do not write .d.ts files to disk, as they are not needed there.
|
||||||
if (src === 'src') {
|
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
|
||||||
generator.execute();
|
|
||||||
}
|
|
||||||
return srcPipe
|
return srcPipe
|
||||||
.pipe(generator.stream)
|
|
||||||
.pipe(compile())
|
.pipe(compile())
|
||||||
.pipe(gulp.dest(out));
|
.pipe(dtsFilter)
|
||||||
|
.pipe(gulp.dest(out))
|
||||||
|
.pipe(dtsFilter.restore)
|
||||||
|
.pipe(src !== 'src' ? es.through() : monacodtsTask(out, false));
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
exports.compileTask = compileTask;
|
exports.compileTask = compileTask;
|
||||||
function watchTask(out, build) {
|
function watchTask(out, build) {
|
||||||
return function () {
|
return function () {
|
||||||
const compile = createCompile('src', build);
|
var compile = createCompile('src', build);
|
||||||
const src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src(typesDts));
|
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
|
||||||
const watchSrc = watch('src/**', { base: 'src' });
|
var watchSrc = watch('src/**', { base: 'src' });
|
||||||
let generator = new MonacoGenerator(true);
|
// Do not write .d.ts files to disk, as they are not needed there.
|
||||||
generator.execute();
|
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
|
||||||
return watchSrc
|
return watchSrc
|
||||||
.pipe(generator.stream)
|
|
||||||
.pipe(util.incremental(compile, src, true))
|
.pipe(util.incremental(compile, src, true))
|
||||||
.pipe(gulp.dest(out));
|
.pipe(dtsFilter)
|
||||||
|
.pipe(gulp.dest(out))
|
||||||
|
.pipe(dtsFilter.restore)
|
||||||
|
.pipe(monacodtsTask(out, true));
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
exports.watchTask = watchTask;
|
exports.watchTask = watchTask;
|
||||||
const REPO_SRC_FOLDER = path.join(__dirname, '../../src');
|
function monacodtsTask(out, isWatch) {
|
||||||
class MonacoGenerator {
|
var basePath = path.resolve(process.cwd(), out);
|
||||||
constructor(isWatch) {
|
var neededFiles = {};
|
||||||
this._executeSoonTimer = null;
|
monacodts.getFilesToWatch(out).forEach(function (filePath) {
|
||||||
this._isWatch = isWatch;
|
filePath = path.normalize(filePath);
|
||||||
this.stream = es.through();
|
neededFiles[filePath] = true;
|
||||||
this._watchers = [];
|
});
|
||||||
this._watchedFiles = {};
|
var inputFiles = {};
|
||||||
let onWillReadFile = (moduleId, filePath) => {
|
for (var filePath in neededFiles) {
|
||||||
if (!this._isWatch) {
|
if (/\bsrc(\/|\\)vs\b/.test(filePath)) {
|
||||||
return;
|
// This file is needed from source => simply read it now
|
||||||
}
|
inputFiles[filePath] = fs.readFileSync(filePath).toString();
|
||||||
if (this._watchedFiles[filePath]) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
this._watchedFiles[filePath] = true;
|
|
||||||
const watcher = fs.watch(filePath);
|
|
||||||
watcher.addListener('change', () => {
|
|
||||||
this._declarationResolver.invalidateCache(moduleId);
|
|
||||||
this._executeSoon();
|
|
||||||
});
|
|
||||||
watcher.addListener('error', (err) => {
|
|
||||||
console.error(`Encountered error while watching ${filePath}.`);
|
|
||||||
console.log(err);
|
|
||||||
delete this._watchedFiles[filePath];
|
|
||||||
for (let i = 0; i < this._watchers.length; i++) {
|
|
||||||
if (this._watchers[i] === watcher) {
|
|
||||||
this._watchers.splice(i, 1);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
watcher.close();
|
|
||||||
this._declarationResolver.invalidateCache(moduleId);
|
|
||||||
this._executeSoon();
|
|
||||||
});
|
|
||||||
this._watchers.push(watcher);
|
|
||||||
};
|
|
||||||
this._fsProvider = new class extends monacodts.FSProvider {
|
|
||||||
readFileSync(moduleId, filePath) {
|
|
||||||
onWillReadFile(moduleId, filePath);
|
|
||||||
return super.readFileSync(moduleId, filePath);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider);
|
|
||||||
if (this._isWatch) {
|
|
||||||
const recipeWatcher = fs.watch(monacodts.RECIPE_PATH);
|
|
||||||
recipeWatcher.addListener('change', () => {
|
|
||||||
this._executeSoon();
|
|
||||||
});
|
|
||||||
this._watchers.push(recipeWatcher);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
_executeSoon() {
|
var setInputFile = function (filePath, contents) {
|
||||||
if (this._executeSoonTimer !== null) {
|
if (inputFiles[filePath] === contents) {
|
||||||
clearTimeout(this._executeSoonTimer);
|
// no change
|
||||||
this._executeSoonTimer = null;
|
|
||||||
}
|
|
||||||
this._executeSoonTimer = setTimeout(() => {
|
|
||||||
this._executeSoonTimer = null;
|
|
||||||
this.execute();
|
|
||||||
}, 20);
|
|
||||||
}
|
|
||||||
dispose() {
|
|
||||||
this._watchers.forEach(watcher => watcher.close());
|
|
||||||
}
|
|
||||||
_run() {
|
|
||||||
let r = monacodts.run3(this._declarationResolver);
|
|
||||||
if (!r && !this._isWatch) {
|
|
||||||
// The build must always be able to generate the monaco.d.ts
|
|
||||||
throw new Error(`monaco.d.ts generation error - Cannot continue`);
|
|
||||||
}
|
|
||||||
return r;
|
|
||||||
}
|
|
||||||
_log(message, ...rest) {
|
|
||||||
fancyLog(ansiColors.cyan('[monaco.d.ts]'), message, ...rest);
|
|
||||||
}
|
|
||||||
execute() {
|
|
||||||
const startTime = Date.now();
|
|
||||||
const result = this._run();
|
|
||||||
if (!result) {
|
|
||||||
// nothing really changed
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (result.isTheSame) {
|
inputFiles[filePath] = contents;
|
||||||
return;
|
var neededInputFilesCount = Object.keys(neededFiles).length;
|
||||||
|
var availableInputFilesCount = Object.keys(inputFiles).length;
|
||||||
|
if (neededInputFilesCount === availableInputFilesCount) {
|
||||||
|
run();
|
||||||
}
|
}
|
||||||
fs.writeFileSync(result.filePath, result.content);
|
};
|
||||||
fs.writeFileSync(path.join(REPO_SRC_FOLDER, 'vs/editor/common/standalone/standaloneEnums.ts'), result.enums);
|
var run = function () {
|
||||||
this._log(`monaco.d.ts is changed - total time took ${Date.now() - startTime} ms`);
|
var result = monacodts.run(out, inputFiles);
|
||||||
if (!this._isWatch) {
|
if (!result.isTheSame) {
|
||||||
this.stream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
|
if (isWatch) {
|
||||||
|
fs.writeFileSync(result.filePath, result.content);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
fs.writeFileSync(result.filePath, result.content);
|
||||||
|
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
var resultStream;
|
||||||
|
if (isWatch) {
|
||||||
|
watch('build/monaco/*').pipe(es.through(function () {
|
||||||
|
run();
|
||||||
|
}));
|
||||||
}
|
}
|
||||||
|
resultStream = es.through(function (data) {
|
||||||
|
var filePath = path.normalize(path.resolve(basePath, data.relative));
|
||||||
|
if (neededFiles[filePath]) {
|
||||||
|
setInputFile(filePath, data.contents.toString());
|
||||||
|
}
|
||||||
|
this.emit('data', data);
|
||||||
|
});
|
||||||
|
return resultStream;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,41 +5,31 @@
|
|||||||
|
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
import * as es from 'event-stream';
|
|
||||||
import * as fs from 'fs';
|
|
||||||
import * as gulp from 'gulp';
|
import * as gulp from 'gulp';
|
||||||
|
import * as tsb from 'gulp-tsb';
|
||||||
|
import * as es from 'event-stream';
|
||||||
|
const watch = require('./watch');
|
||||||
|
import * as nls from './nls';
|
||||||
|
import * as util from './util';
|
||||||
|
import { createReporter } from './reporter';
|
||||||
|
import * as path from 'path';
|
||||||
import * as bom from 'gulp-bom';
|
import * as bom from 'gulp-bom';
|
||||||
import * as sourcemaps from 'gulp-sourcemaps';
|
import * as sourcemaps from 'gulp-sourcemaps';
|
||||||
import * as tsb from 'gulp-tsb';
|
|
||||||
import * as path from 'path';
|
|
||||||
import * as _ from 'underscore';
|
import * as _ from 'underscore';
|
||||||
import * as monacodts from '../monaco/api';
|
import * as monacodts from '../monaco/api';
|
||||||
import * as nls from './nls';
|
import * as fs from 'fs';
|
||||||
import { createReporter } from './reporter';
|
|
||||||
import * as util from './util';
|
|
||||||
import * as fancyLog from 'fancy-log';
|
|
||||||
import * as ansiColors from 'ansi-colors';
|
|
||||||
|
|
||||||
const watch = require('./watch');
|
|
||||||
|
|
||||||
const reporter = createReporter();
|
const reporter = createReporter();
|
||||||
|
|
||||||
function getTypeScriptCompilerOptions(src: string) {
|
function getTypeScriptCompilerOptions(src: string) {
|
||||||
const rootDir = path.join(__dirname, `../../${src}`);
|
const rootDir = path.join(__dirname, `../../${src}`);
|
||||||
const tsconfig = require(`../../${src}/tsconfig.json`);
|
const options = require(`../../${src}/tsconfig.json`).compilerOptions;
|
||||||
let options: { [key: string]: any };
|
|
||||||
if (tsconfig.extends) {
|
|
||||||
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
|
|
||||||
} else {
|
|
||||||
options = tsconfig.compilerOptions;
|
|
||||||
}
|
|
||||||
options.verbose = false;
|
options.verbose = false;
|
||||||
options.sourceMap = true;
|
options.sourceMap = true;
|
||||||
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
||||||
options.sourceMap = false;
|
options.sourceMap = false;
|
||||||
}
|
}
|
||||||
options.rootDir = rootDir;
|
options.rootDir = rootDir;
|
||||||
options.baseUrl = rootDir;
|
|
||||||
options.sourceRoot = util.toFileUri(rootDir);
|
options.sourceRoot = util.toFileUri(rootDir);
|
||||||
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
|
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
|
||||||
return options;
|
return options;
|
||||||
@@ -50,7 +40,7 @@ function createCompile(src: string, build: boolean, emitError?: boolean): (token
|
|||||||
opts.inlineSources = !!build;
|
opts.inlineSources = !!build;
|
||||||
opts.noFilesystemLookup = true;
|
opts.noFilesystemLookup = true;
|
||||||
|
|
||||||
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
|
const ts = tsb.create(opts, null, null, err => reporter(err.toString()));
|
||||||
|
|
||||||
return function (token?: util.ICancellationToken) {
|
return function (token?: util.ICancellationToken) {
|
||||||
|
|
||||||
@@ -75,19 +65,12 @@ function createCompile(src: string, build: boolean, emitError?: boolean): (token
|
|||||||
sourceRoot: opts.sourceRoot
|
sourceRoot: opts.sourceRoot
|
||||||
}))
|
}))
|
||||||
.pipe(tsFilter.restore)
|
.pipe(tsFilter.restore)
|
||||||
.pipe(reporter.end(!!emitError));
|
.pipe(reporter.end(emitError));
|
||||||
|
|
||||||
return es.duplex(input, output);
|
return es.duplex(input, output);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const typesDts = [
|
|
||||||
'node_modules/typescript/lib/*.d.ts',
|
|
||||||
'node_modules/@types/**/*.d.ts',
|
|
||||||
'!node_modules/@types/webpack/**/*',
|
|
||||||
'!node_modules/@types/uglify-js/**/*',
|
|
||||||
];
|
|
||||||
|
|
||||||
export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream {
|
export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream {
|
||||||
|
|
||||||
return function () {
|
return function () {
|
||||||
@@ -95,18 +78,18 @@ export function compileTask(src: string, out: string, build: boolean): () => Nod
|
|||||||
|
|
||||||
const srcPipe = es.merge(
|
const srcPipe = es.merge(
|
||||||
gulp.src(`${src}/**`, { base: `${src}` }),
|
gulp.src(`${src}/**`, { base: `${src}` }),
|
||||||
gulp.src(typesDts),
|
gulp.src('node_modules/typescript/lib/lib.d.ts'),
|
||||||
);
|
);
|
||||||
|
|
||||||
let generator = new MonacoGenerator(false);
|
// Do not write .d.ts files to disk, as they are not needed there.
|
||||||
if (src === 'src') {
|
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
|
||||||
generator.execute();
|
|
||||||
}
|
|
||||||
|
|
||||||
return srcPipe
|
return srcPipe
|
||||||
.pipe(generator.stream)
|
|
||||||
.pipe(compile())
|
.pipe(compile())
|
||||||
.pipe(gulp.dest(out));
|
.pipe(dtsFilter)
|
||||||
|
.pipe(gulp.dest(out))
|
||||||
|
.pipe(dtsFilter.restore)
|
||||||
|
.pipe(src !== 'src' ? es.through() : monacodtsTask(out, false));
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -117,128 +100,80 @@ export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteSt
|
|||||||
|
|
||||||
const src = es.merge(
|
const src = es.merge(
|
||||||
gulp.src('src/**', { base: 'src' }),
|
gulp.src('src/**', { base: 'src' }),
|
||||||
gulp.src(typesDts),
|
gulp.src('node_modules/typescript/lib/lib.d.ts'),
|
||||||
);
|
);
|
||||||
const watchSrc = watch('src/**', { base: 'src' });
|
const watchSrc = watch('src/**', { base: 'src' });
|
||||||
|
|
||||||
let generator = new MonacoGenerator(true);
|
// Do not write .d.ts files to disk, as they are not needed there.
|
||||||
generator.execute();
|
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
|
||||||
|
|
||||||
return watchSrc
|
return watchSrc
|
||||||
.pipe(generator.stream)
|
|
||||||
.pipe(util.incremental(compile, src, true))
|
.pipe(util.incremental(compile, src, true))
|
||||||
.pipe(gulp.dest(out));
|
.pipe(dtsFilter)
|
||||||
|
.pipe(gulp.dest(out))
|
||||||
|
.pipe(dtsFilter.restore)
|
||||||
|
.pipe(monacodtsTask(out, true));
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const REPO_SRC_FOLDER = path.join(__dirname, '../../src');
|
function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
|
||||||
|
|
||||||
class MonacoGenerator {
|
const basePath = path.resolve(process.cwd(), out);
|
||||||
private readonly _isWatch: boolean;
|
|
||||||
public readonly stream: NodeJS.ReadWriteStream;
|
|
||||||
|
|
||||||
private readonly _watchers: fs.FSWatcher[];
|
const neededFiles: { [file: string]: boolean; } = {};
|
||||||
private readonly _watchedFiles: { [filePath: string]: boolean; };
|
monacodts.getFilesToWatch(out).forEach(function (filePath) {
|
||||||
private readonly _fsProvider: monacodts.FSProvider;
|
filePath = path.normalize(filePath);
|
||||||
private readonly _declarationResolver: monacodts.DeclarationResolver;
|
neededFiles[filePath] = true;
|
||||||
|
});
|
||||||
|
|
||||||
constructor(isWatch: boolean) {
|
const inputFiles: { [file: string]: string; } = {};
|
||||||
this._isWatch = isWatch;
|
for (let filePath in neededFiles) {
|
||||||
this.stream = es.through();
|
if (/\bsrc(\/|\\)vs\b/.test(filePath)) {
|
||||||
this._watchers = [];
|
// This file is needed from source => simply read it now
|
||||||
this._watchedFiles = {};
|
inputFiles[filePath] = fs.readFileSync(filePath).toString();
|
||||||
let onWillReadFile = (moduleId: string, filePath: string) => {
|
|
||||||
if (!this._isWatch) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (this._watchedFiles[filePath]) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
this._watchedFiles[filePath] = true;
|
|
||||||
|
|
||||||
const watcher = fs.watch(filePath);
|
|
||||||
watcher.addListener('change', () => {
|
|
||||||
this._declarationResolver.invalidateCache(moduleId);
|
|
||||||
this._executeSoon();
|
|
||||||
});
|
|
||||||
watcher.addListener('error', (err) => {
|
|
||||||
console.error(`Encountered error while watching ${filePath}.`);
|
|
||||||
console.log(err);
|
|
||||||
delete this._watchedFiles[filePath];
|
|
||||||
for (let i = 0; i < this._watchers.length; i++) {
|
|
||||||
if (this._watchers[i] === watcher) {
|
|
||||||
this._watchers.splice(i, 1);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
watcher.close();
|
|
||||||
this._declarationResolver.invalidateCache(moduleId);
|
|
||||||
this._executeSoon();
|
|
||||||
});
|
|
||||||
this._watchers.push(watcher);
|
|
||||||
};
|
|
||||||
this._fsProvider = new class extends monacodts.FSProvider {
|
|
||||||
public readFileSync(moduleId: string, filePath: string): Buffer {
|
|
||||||
onWillReadFile(moduleId, filePath);
|
|
||||||
return super.readFileSync(moduleId, filePath);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider);
|
|
||||||
|
|
||||||
if (this._isWatch) {
|
|
||||||
const recipeWatcher = fs.watch(monacodts.RECIPE_PATH);
|
|
||||||
recipeWatcher.addListener('change', () => {
|
|
||||||
this._executeSoon();
|
|
||||||
});
|
|
||||||
this._watchers.push(recipeWatcher);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private _executeSoonTimer: NodeJS.Timer | null = null;
|
const setInputFile = (filePath: string, contents: string) => {
|
||||||
private _executeSoon(): void {
|
if (inputFiles[filePath] === contents) {
|
||||||
if (this._executeSoonTimer !== null) {
|
// no change
|
||||||
clearTimeout(this._executeSoonTimer);
|
|
||||||
this._executeSoonTimer = null;
|
|
||||||
}
|
|
||||||
this._executeSoonTimer = setTimeout(() => {
|
|
||||||
this._executeSoonTimer = null;
|
|
||||||
this.execute();
|
|
||||||
}, 20);
|
|
||||||
}
|
|
||||||
|
|
||||||
public dispose(): void {
|
|
||||||
this._watchers.forEach(watcher => watcher.close());
|
|
||||||
}
|
|
||||||
|
|
||||||
private _run(): monacodts.IMonacoDeclarationResult | null {
|
|
||||||
let r = monacodts.run3(this._declarationResolver);
|
|
||||||
if (!r && !this._isWatch) {
|
|
||||||
// The build must always be able to generate the monaco.d.ts
|
|
||||||
throw new Error(`monaco.d.ts generation error - Cannot continue`);
|
|
||||||
}
|
|
||||||
return r;
|
|
||||||
}
|
|
||||||
|
|
||||||
private _log(message: any, ...rest: any[]): void {
|
|
||||||
fancyLog(ansiColors.cyan('[monaco.d.ts]'), message, ...rest);
|
|
||||||
}
|
|
||||||
|
|
||||||
public execute(): void {
|
|
||||||
const startTime = Date.now();
|
|
||||||
const result = this._run();
|
|
||||||
if (!result) {
|
|
||||||
// nothing really changed
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (result.isTheSame) {
|
inputFiles[filePath] = contents;
|
||||||
return;
|
const neededInputFilesCount = Object.keys(neededFiles).length;
|
||||||
|
const availableInputFilesCount = Object.keys(inputFiles).length;
|
||||||
|
if (neededInputFilesCount === availableInputFilesCount) {
|
||||||
|
run();
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
fs.writeFileSync(result.filePath, result.content);
|
const run = () => {
|
||||||
fs.writeFileSync(path.join(REPO_SRC_FOLDER, 'vs/editor/common/standalone/standaloneEnums.ts'), result.enums);
|
const result = monacodts.run(out, inputFiles);
|
||||||
this._log(`monaco.d.ts is changed - total time took ${Date.now() - startTime} ms`);
|
if (!result.isTheSame) {
|
||||||
if (!this._isWatch) {
|
if (isWatch) {
|
||||||
this.stream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
|
fs.writeFileSync(result.filePath, result.content);
|
||||||
|
} else {
|
||||||
|
fs.writeFileSync(result.filePath, result.content);
|
||||||
|
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let resultStream: NodeJS.ReadWriteStream;
|
||||||
|
|
||||||
|
if (isWatch) {
|
||||||
|
watch('build/monaco/*').pipe(es.through(function () {
|
||||||
|
run();
|
||||||
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
resultStream = es.through(function (data) {
|
||||||
|
const filePath = path.normalize(path.resolve(basePath, data.relative));
|
||||||
|
if (neededFiles[filePath]) {
|
||||||
|
setInputFile(filePath, data.contents.toString());
|
||||||
|
}
|
||||||
|
this.emit('data', data);
|
||||||
|
});
|
||||||
|
|
||||||
|
return resultStream;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ const root = path.dirname(path.dirname(__dirname));
|
|||||||
|
|
||||||
function getElectronVersion() {
|
function getElectronVersion() {
|
||||||
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
|
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
|
||||||
// @ts-ignore
|
|
||||||
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
|
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
|
||||||
|
|
||||||
return target;
|
return target;
|
||||||
@@ -20,7 +19,6 @@ function getElectronVersion() {
|
|||||||
module.exports.getElectronVersion = getElectronVersion;
|
module.exports.getElectronVersion = getElectronVersion;
|
||||||
|
|
||||||
// returns 0 if the right version of electron is in .build/electron
|
// returns 0 if the right version of electron is in .build/electron
|
||||||
// @ts-ignore
|
|
||||||
if (require.main === module) {
|
if (require.main === module) {
|
||||||
const version = getElectronVersion();
|
const version = getElectronVersion();
|
||||||
const versionFile = path.join(root, '.build', 'electron', 'version');
|
const versionFile = path.join(root, '.build', 'electron', 'version');
|
||||||
|
|||||||
@@ -4,330 +4,116 @@
|
|||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
*--------------------------------------------------------------------------------------------*/
|
*--------------------------------------------------------------------------------------------*/
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const es = require("event-stream");
|
var es = require("event-stream");
|
||||||
const fs = require("fs");
|
var assign = require("object-assign");
|
||||||
const glob = require("glob");
|
var remote = require("gulp-remote-src");
|
||||||
const gulp = require("gulp");
|
var flatmap = require('gulp-flatmap');
|
||||||
const path = require("path");
|
var vzip = require('gulp-vinyl-zip');
|
||||||
const File = require("vinyl");
|
var filter = require('gulp-filter');
|
||||||
const vsce = require("vsce");
|
var rename = require('gulp-rename');
|
||||||
const stats_1 = require("./stats");
|
var util = require('gulp-util');
|
||||||
const util2 = require("./util");
|
var buffer = require('gulp-buffer');
|
||||||
const remote = require("gulp-remote-src");
|
var json = require('gulp-json-editor');
|
||||||
const vzip = require('gulp-vinyl-zip');
|
var fs = require("fs");
|
||||||
const filter = require("gulp-filter");
|
var path = require("path");
|
||||||
const rename = require("gulp-rename");
|
var vsce = require("vsce");
|
||||||
const fancyLog = require("fancy-log");
|
var File = require("vinyl");
|
||||||
const ansiColors = require("ansi-colors");
|
function fromLocal(extensionPath) {
|
||||||
const buffer = require('gulp-buffer');
|
var result = es.through();
|
||||||
const json = require("gulp-json-editor");
|
|
||||||
const webpack = require('webpack');
|
|
||||||
const webpackGulp = require('webpack-stream');
|
|
||||||
const root = path.resolve(path.join(__dirname, '..', '..'));
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
const _ = require("underscore");
|
|
||||||
const vfs = require("vinyl-fs");
|
|
||||||
const deps = require('../dependencies');
|
|
||||||
const extensionsRoot = path.join(root, 'extensions');
|
|
||||||
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
|
|
||||||
function packageBuiltInExtensions() {
|
|
||||||
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
|
||||||
.map(manifestPath => {
|
|
||||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
|
||||||
const extensionName = path.basename(extensionPath);
|
|
||||||
return { name: extensionName, path: extensionPath };
|
|
||||||
})
|
|
||||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
|
||||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
|
||||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
|
|
||||||
const visxDirectory = path.join(path.dirname(root), 'vsix');
|
|
||||||
try {
|
|
||||||
if (!fs.existsSync(visxDirectory)) {
|
|
||||||
fs.mkdirSync(visxDirectory);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
// don't fail the build if the output directory already exists
|
|
||||||
console.warn(err);
|
|
||||||
}
|
|
||||||
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
|
|
||||||
let pkgJson = JSON.parse(fs.readFileSync(path.join(element.path, 'package.json'), { encoding: 'utf8' }));
|
|
||||||
const packagePath = path.join(visxDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
|
|
||||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
|
||||||
vsce.createVSIX({
|
|
||||||
cwd: element.path,
|
|
||||||
packagePath: packagePath,
|
|
||||||
useYarn: true
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
exports.packageBuiltInExtensions = packageBuiltInExtensions;
|
|
||||||
function packageExtensionTask(extensionName, platform, arch) {
|
|
||||||
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
|
||||||
if (platform === 'darwin') {
|
|
||||||
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
|
|
||||||
}
|
|
||||||
platform = platform || process.platform;
|
|
||||||
return () => {
|
|
||||||
const root = path.resolve(path.join(__dirname, '../..'));
|
|
||||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
|
||||||
.map(manifestPath => {
|
|
||||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
|
||||||
const extensionName = path.basename(extensionPath);
|
|
||||||
return { name: extensionName, path: extensionPath };
|
|
||||||
})
|
|
||||||
.filter(({ name }) => extensionName === name);
|
|
||||||
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
|
||||||
return fromLocal(extension.path);
|
|
||||||
}));
|
|
||||||
let result = localExtensions
|
|
||||||
.pipe(util2.skipDirectories())
|
|
||||||
.pipe(util2.fixWin32DirectoryPermissions())
|
|
||||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
|
|
||||||
return result.pipe(vfs.dest(destination));
|
|
||||||
};
|
|
||||||
}
|
|
||||||
exports.packageExtensionTask = packageExtensionTask;
|
|
||||||
// {{SQL CARBON EDIT}} - End
|
|
||||||
function fromLocal(extensionPath, sourceMappingURLBase) {
|
|
||||||
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
|
|
||||||
if (fs.existsSync(webpackFilename)) {
|
|
||||||
return fromLocalWebpack(extensionPath, sourceMappingURLBase);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
return fromLocalNormal(extensionPath);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function fromLocalWebpack(extensionPath, sourceMappingURLBase) {
|
|
||||||
const result = es.through();
|
|
||||||
const packagedDependencies = [];
|
|
||||||
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
|
|
||||||
if (packageJsonConfig.dependencies) {
|
|
||||||
const webpackRootConfig = require(path.join(extensionPath, 'extension.webpack.config.js'));
|
|
||||||
for (const key in webpackRootConfig.externals) {
|
|
||||||
if (key in packageJsonConfig.dependencies) {
|
|
||||||
packagedDependencies.push(key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
|
|
||||||
const files = fileNames
|
|
||||||
.map(fileName => path.join(extensionPath, fileName))
|
|
||||||
.map(filePath => new File({
|
|
||||||
path: filePath,
|
|
||||||
stat: fs.statSync(filePath),
|
|
||||||
base: extensionPath,
|
|
||||||
contents: fs.createReadStream(filePath)
|
|
||||||
}));
|
|
||||||
const filesStream = es.readArray(files);
|
|
||||||
// check for a webpack configuration files, then invoke webpack
|
|
||||||
// and merge its output with the files stream. also rewrite the package.json
|
|
||||||
// file to a new entry point
|
|
||||||
const webpackConfigLocations = glob.sync(path.join(extensionPath, '/**/extension.webpack.config.js'), { ignore: ['**/node_modules'] });
|
|
||||||
const packageJsonFilter = filter(f => {
|
|
||||||
if (path.basename(f.path) === 'package.json') {
|
|
||||||
// only modify package.json's next to the webpack file.
|
|
||||||
// to be safe, use existsSync instead of path comparison.
|
|
||||||
return fs.existsSync(path.join(path.dirname(f.path), 'extension.webpack.config.js'));
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}, { restore: true });
|
|
||||||
const patchFilesStream = filesStream
|
|
||||||
.pipe(packageJsonFilter)
|
|
||||||
.pipe(buffer())
|
|
||||||
.pipe(json((data) => {
|
|
||||||
if (data.main) {
|
|
||||||
// hardcoded entry point directory!
|
|
||||||
data.main = data.main.replace('/out/', /dist/);
|
|
||||||
}
|
|
||||||
return data;
|
|
||||||
}))
|
|
||||||
.pipe(packageJsonFilter.restore);
|
|
||||||
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => () => {
|
|
||||||
const webpackDone = (err, stats) => {
|
|
||||||
fancyLog(`Bundled extension: ${ansiColors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
|
|
||||||
if (err) {
|
|
||||||
result.emit('error', err);
|
|
||||||
}
|
|
||||||
const { compilation } = stats;
|
|
||||||
if (compilation.errors.length > 0) {
|
|
||||||
result.emit('error', compilation.errors.join('\n'));
|
|
||||||
}
|
|
||||||
if (compilation.warnings.length > 0) {
|
|
||||||
result.emit('error', compilation.warnings.join('\n'));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
const webpackConfig = Object.assign({}, require(webpackConfigPath), { mode: 'production' });
|
|
||||||
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
|
|
||||||
return webpackGulp(webpackConfig, webpack, webpackDone)
|
|
||||||
.pipe(es.through(function (data) {
|
|
||||||
data.stat = data.stat || {};
|
|
||||||
data.base = extensionPath;
|
|
||||||
this.emit('data', data);
|
|
||||||
}))
|
|
||||||
.pipe(es.through(function (data) {
|
|
||||||
// source map handling:
|
|
||||||
// * rewrite sourceMappingURL
|
|
||||||
// * save to disk so that upload-task picks this up
|
|
||||||
if (sourceMappingURLBase) {
|
|
||||||
const contents = data.contents.toString('utf8');
|
|
||||||
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
|
|
||||||
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
|
|
||||||
}), 'utf8');
|
|
||||||
if (/\.js\.map$/.test(data.path)) {
|
|
||||||
if (!fs.existsSync(path.dirname(data.path))) {
|
|
||||||
fs.mkdirSync(path.dirname(data.path));
|
|
||||||
}
|
|
||||||
fs.writeFileSync(data.path, data.contents);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this.emit('data', data);
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
es.merge(sequence(webpackStreams), patchFilesStream)
|
|
||||||
// .pipe(es.through(function (data) {
|
|
||||||
// // debug
|
|
||||||
// console.log('out', data.path, data.contents.length);
|
|
||||||
// this.emit('data', data);
|
|
||||||
// }))
|
|
||||||
.pipe(result);
|
|
||||||
}).catch(err => {
|
|
||||||
console.error(extensionPath);
|
|
||||||
console.error(packagedDependencies);
|
|
||||||
result.emit('error', err);
|
|
||||||
});
|
|
||||||
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
|
|
||||||
}
|
|
||||||
function fromLocalNormal(extensionPath) {
|
|
||||||
const result = es.through();
|
|
||||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
||||||
.then(fileNames => {
|
.then(function (fileNames) {
|
||||||
const files = fileNames
|
var files = fileNames
|
||||||
.map(fileName => path.join(extensionPath, fileName))
|
.map(function (fileName) { return path.join(extensionPath, fileName); })
|
||||||
.map(filePath => new File({
|
.map(function (filePath) { return new File({
|
||||||
path: filePath,
|
path: filePath,
|
||||||
stat: fs.statSync(filePath),
|
stat: fs.statSync(filePath),
|
||||||
base: extensionPath,
|
base: extensionPath,
|
||||||
contents: fs.createReadStream(filePath)
|
contents: fs.createReadStream(filePath)
|
||||||
}));
|
}); });
|
||||||
es.readArray(files).pipe(result);
|
es.readArray(files).pipe(result);
|
||||||
})
|
})
|
||||||
.catch(err => result.emit('error', err));
|
.catch(function (err) { return result.emit('error', err); });
|
||||||
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
|
return result;
|
||||||
}
|
}
|
||||||
const baseHeaders = {
|
exports.fromLocal = fromLocal;
|
||||||
|
function error(err) {
|
||||||
|
var result = es.through();
|
||||||
|
setTimeout(function () { return result.emit('error', err); });
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
var baseHeaders = {
|
||||||
'X-Market-Client-Id': 'VSCode Build',
|
'X-Market-Client-Id': 'VSCode Build',
|
||||||
'User-Agent': 'VSCode Build',
|
'User-Agent': 'VSCode Build',
|
||||||
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
|
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
|
||||||
};
|
};
|
||||||
function fromMarketplace(extensionName, version, metadata) {
|
function fromMarketplace(extensionName, version) {
|
||||||
const [publisher, name] = extensionName.split('.');
|
var filterType = 7;
|
||||||
const url = `https://marketplace.visualstudio.com/_apis/public/gallery/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`;
|
var value = extensionName;
|
||||||
fancyLog('Downloading extension:', ansiColors.yellow(`${extensionName}@${version}`), '...');
|
var criterium = { filterType: filterType, value: value };
|
||||||
const options = {
|
var criteria = [criterium];
|
||||||
base: url,
|
var pageNumber = 1;
|
||||||
|
var pageSize = 1;
|
||||||
|
var sortBy = 0;
|
||||||
|
var sortOrder = 0;
|
||||||
|
var flags = 0x1 | 0x2 | 0x80;
|
||||||
|
var assetTypes = ['Microsoft.VisualStudio.Services.VSIXPackage'];
|
||||||
|
var filters = [{ criteria: criteria, pageNumber: pageNumber, pageSize: pageSize, sortBy: sortBy, sortOrder: sortOrder }];
|
||||||
|
var body = JSON.stringify({ filters: filters, assetTypes: assetTypes, flags: flags });
|
||||||
|
var headers = assign({}, baseHeaders, {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Accept': 'application/json;api-version=3.0-preview.1',
|
||||||
|
'Content-Length': body.length
|
||||||
|
});
|
||||||
|
var options = {
|
||||||
|
base: 'https://marketplace.visualstudio.com/_apis/public/gallery',
|
||||||
requestOptions: {
|
requestOptions: {
|
||||||
|
method: 'POST',
|
||||||
gzip: true,
|
gzip: true,
|
||||||
headers: baseHeaders
|
headers: headers,
|
||||||
|
body: body
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
const packageJsonFilter = filter('package.json', { restore: true });
|
return remote('/extensionquery', options)
|
||||||
return remote('', options)
|
.pipe(flatmap(function (stream, f) {
|
||||||
.pipe(vzip.src())
|
var rawResult = f.contents.toString('utf8');
|
||||||
.pipe(filter('extension/**'))
|
var result = JSON.parse(rawResult);
|
||||||
.pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, '')))
|
var extension = result.results[0].extensions[0];
|
||||||
.pipe(packageJsonFilter)
|
if (!extension) {
|
||||||
.pipe(buffer())
|
return error("No such extension: " + extension);
|
||||||
.pipe(json({ __metadata: metadata }))
|
}
|
||||||
.pipe(packageJsonFilter.restore);
|
var metadata = {
|
||||||
|
id: extension.extensionId,
|
||||||
|
publisherId: extension.publisher,
|
||||||
|
publisherDisplayName: extension.publisher.displayName
|
||||||
|
};
|
||||||
|
var extensionVersion = extension.versions.filter(function (v) { return v.version === version; })[0];
|
||||||
|
if (!extensionVersion) {
|
||||||
|
return error("No such extension version: " + extensionName + " @ " + version);
|
||||||
|
}
|
||||||
|
var asset = extensionVersion.files.filter(function (f) { return f.assetType === 'Microsoft.VisualStudio.Services.VSIXPackage'; })[0];
|
||||||
|
if (!asset) {
|
||||||
|
return error("No VSIX found for extension version: " + extensionName + " @ " + version);
|
||||||
|
}
|
||||||
|
util.log('Downloading extension:', util.colors.yellow(extensionName + "@" + version), '...');
|
||||||
|
var options = {
|
||||||
|
base: asset.source,
|
||||||
|
requestOptions: {
|
||||||
|
gzip: true,
|
||||||
|
headers: baseHeaders
|
||||||
|
}
|
||||||
|
};
|
||||||
|
return remote('', options)
|
||||||
|
.pipe(flatmap(function (stream) {
|
||||||
|
var packageJsonFilter = filter('package.json', { restore: true });
|
||||||
|
return stream
|
||||||
|
.pipe(vzip.src())
|
||||||
|
.pipe(filter('extension/**'))
|
||||||
|
.pipe(rename(function (p) { return p.dirname = p.dirname.replace(/^extension\/?/, ''); }))
|
||||||
|
.pipe(packageJsonFilter)
|
||||||
|
.pipe(buffer())
|
||||||
|
.pipe(json({ __metadata: metadata }))
|
||||||
|
.pipe(packageJsonFilter.restore);
|
||||||
|
}));
|
||||||
|
}));
|
||||||
}
|
}
|
||||||
exports.fromMarketplace = fromMarketplace;
|
exports.fromMarketplace = fromMarketplace;
|
||||||
const excludedExtensions = [
|
|
||||||
'vscode-api-tests',
|
|
||||||
'vscode-colorize-tests',
|
|
||||||
'vscode-test-resolver',
|
|
||||||
'ms-vscode.node-debug',
|
|
||||||
'ms-vscode.node-debug2',
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
'integration-tests'
|
|
||||||
];
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
const sqlBuiltInExtensions = [
|
|
||||||
// Add SQL built-in extensions here.
|
|
||||||
// the extension will be excluded from SQLOps package and will have separate vsix packages
|
|
||||||
'admin-tool-ext-win',
|
|
||||||
'agent',
|
|
||||||
'import',
|
|
||||||
'profiler',
|
|
||||||
'admin-pack',
|
|
||||||
'big-data-cluster',
|
|
||||||
'dacpac',
|
|
||||||
'schema-compare',
|
|
||||||
'resource-deployment',
|
|
||||||
'cms'
|
|
||||||
];
|
|
||||||
const builtInExtensions = require('../builtInExtensions.json');
|
|
||||||
/**
|
|
||||||
* We're doing way too much stuff at once, with webpack et al. So much stuff
|
|
||||||
* that while downloading extensions from the marketplace, node js doesn't get enough
|
|
||||||
* stack frames to complete the download in under 2 minutes, at which point the
|
|
||||||
* marketplace server cuts off the http request. So, we sequentialize the extensino tasks.
|
|
||||||
*/
|
|
||||||
function sequence(streamProviders) {
|
|
||||||
const result = es.through();
|
|
||||||
function pop() {
|
|
||||||
if (streamProviders.length === 0) {
|
|
||||||
result.emit('end');
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
const fn = streamProviders.shift();
|
|
||||||
fn()
|
|
||||||
.on('end', function () { setTimeout(pop, 0); })
|
|
||||||
.pipe(result, { end: false });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pop();
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
function packageExtensionsStream(optsIn) {
|
|
||||||
const opts = optsIn || {};
|
|
||||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
|
||||||
.map(manifestPath => {
|
|
||||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
|
||||||
const extensionName = path.basename(extensionPath);
|
|
||||||
return { name: extensionName, path: extensionPath };
|
|
||||||
})
|
|
||||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
|
||||||
.filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
|
|
||||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1);
|
|
||||||
const localExtensions = () => sequence([...localExtensionDescriptions.map(extension => () => {
|
|
||||||
return fromLocal(extension.path, opts.sourceMappingURLBase)
|
|
||||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
|
||||||
})]);
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
const extensionDepsSrc = [
|
|
||||||
..._.flatten(extensionsProductionDependencies.map((d) => path.relative(root, d.path)).map((d) => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
|
|
||||||
];
|
|
||||||
const localExtensionDependencies = () => gulp.src(extensionDepsSrc, { base: '.', dot: true })
|
|
||||||
.pipe(filter(['**', '!**/package-lock.json']));
|
|
||||||
// Original code commented out here
|
|
||||||
// const localExtensionDependencies = () => gulp.src('extensions/node_modules/**', { base: '.' });
|
|
||||||
// const marketplaceExtensions = () => es.merge(
|
|
||||||
// ...builtInExtensions
|
|
||||||
// .filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
|
|
||||||
// .map(extension => {
|
|
||||||
// return fromMarketplace(extension.name, extension.version, extension.metadata)
|
|
||||||
// .pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
|
||||||
// })
|
|
||||||
// );
|
|
||||||
return sequence([localExtensions, localExtensionDependencies,])
|
|
||||||
.pipe(util2.setExecutableBit(['**/*.sh']))
|
|
||||||
.pipe(filter(['**', '!**/*.js.map']));
|
|
||||||
// {{SQL CARBON EDIT}} - End
|
|
||||||
}
|
|
||||||
exports.packageExtensionsStream = packageExtensionsStream;
|
|
||||||
|
|||||||
@@ -4,232 +4,22 @@
|
|||||||
*--------------------------------------------------------------------------------------------*/
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
import * as es from 'event-stream';
|
import * as es from 'event-stream';
|
||||||
import * as fs from 'fs';
|
|
||||||
import * as glob from 'glob';
|
|
||||||
import * as gulp from 'gulp';
|
|
||||||
import * as path from 'path';
|
|
||||||
import { Stream } from 'stream';
|
import { Stream } from 'stream';
|
||||||
import * as File from 'vinyl';
|
import assign = require('object-assign');
|
||||||
import * as vsce from 'vsce';
|
|
||||||
import { createStatsStream } from './stats';
|
|
||||||
import * as util2 from './util';
|
|
||||||
import remote = require('gulp-remote-src');
|
import remote = require('gulp-remote-src');
|
||||||
|
const flatmap = require('gulp-flatmap');
|
||||||
const vzip = require('gulp-vinyl-zip');
|
const vzip = require('gulp-vinyl-zip');
|
||||||
import filter = require('gulp-filter');
|
const filter = require('gulp-filter');
|
||||||
import rename = require('gulp-rename');
|
const rename = require('gulp-rename');
|
||||||
import * as fancyLog from 'fancy-log';
|
const util = require('gulp-util');
|
||||||
import * as ansiColors from 'ansi-colors';
|
|
||||||
const buffer = require('gulp-buffer');
|
const buffer = require('gulp-buffer');
|
||||||
import json = require('gulp-json-editor');
|
const json = require('gulp-json-editor');
|
||||||
const webpack = require('webpack');
|
import * as fs from 'fs';
|
||||||
const webpackGulp = require('webpack-stream');
|
import * as path from 'path';
|
||||||
|
import * as vsce from 'vsce';
|
||||||
|
import * as File from 'vinyl';
|
||||||
|
|
||||||
const root = path.resolve(path.join(__dirname, '..', '..'));
|
export function fromLocal(extensionPath: string): Stream {
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
import * as _ from 'underscore';
|
|
||||||
import * as vfs from 'vinyl-fs';
|
|
||||||
const deps = require('../dependencies');
|
|
||||||
const extensionsRoot = path.join(root, 'extensions');
|
|
||||||
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
|
|
||||||
|
|
||||||
export function packageBuiltInExtensions() {
|
|
||||||
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
|
||||||
.map(manifestPath => {
|
|
||||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
|
||||||
const extensionName = path.basename(extensionPath);
|
|
||||||
return { name: extensionName, path: extensionPath };
|
|
||||||
})
|
|
||||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
|
||||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
|
||||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
|
|
||||||
const visxDirectory = path.join(path.dirname(root), 'vsix');
|
|
||||||
try {
|
|
||||||
if (!fs.existsSync(visxDirectory)) {
|
|
||||||
fs.mkdirSync(visxDirectory);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
// don't fail the build if the output directory already exists
|
|
||||||
console.warn(err);
|
|
||||||
}
|
|
||||||
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
|
|
||||||
let pkgJson = JSON.parse(fs.readFileSync(path.join(element.path, 'package.json'), { encoding: 'utf8' }));
|
|
||||||
const packagePath = path.join(visxDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
|
|
||||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
|
||||||
vsce.createVSIX({
|
|
||||||
cwd: element.path,
|
|
||||||
packagePath: packagePath,
|
|
||||||
useYarn: true
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export function packageExtensionTask(extensionName: string, platform: string, arch: string) {
|
|
||||||
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
|
||||||
if (platform === 'darwin') {
|
|
||||||
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
|
|
||||||
} else {
|
|
||||||
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
|
|
||||||
}
|
|
||||||
|
|
||||||
platform = platform || process.platform;
|
|
||||||
|
|
||||||
return () => {
|
|
||||||
const root = path.resolve(path.join(__dirname, '../..'));
|
|
||||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
|
||||||
.map(manifestPath => {
|
|
||||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
|
||||||
const extensionName = path.basename(extensionPath);
|
|
||||||
return { name: extensionName, path: extensionPath };
|
|
||||||
})
|
|
||||||
.filter(({ name }) => extensionName === name);
|
|
||||||
|
|
||||||
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
|
||||||
return fromLocal(extension.path);
|
|
||||||
}));
|
|
||||||
|
|
||||||
let result = localExtensions
|
|
||||||
.pipe(util2.skipDirectories())
|
|
||||||
.pipe(util2.fixWin32DirectoryPermissions())
|
|
||||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
|
|
||||||
|
|
||||||
return result.pipe(vfs.dest(destination));
|
|
||||||
};
|
|
||||||
}
|
|
||||||
// {{SQL CARBON EDIT}} - End
|
|
||||||
|
|
||||||
function fromLocal(extensionPath: string, sourceMappingURLBase?: string): Stream {
|
|
||||||
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
|
|
||||||
if (fs.existsSync(webpackFilename)) {
|
|
||||||
return fromLocalWebpack(extensionPath, sourceMappingURLBase);
|
|
||||||
} else {
|
|
||||||
return fromLocalNormal(extensionPath);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function fromLocalWebpack(extensionPath: string, sourceMappingURLBase: string | undefined): Stream {
|
|
||||||
const result = es.through();
|
|
||||||
|
|
||||||
const packagedDependencies: string[] = [];
|
|
||||||
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
|
|
||||||
if (packageJsonConfig.dependencies) {
|
|
||||||
const webpackRootConfig = require(path.join(extensionPath, 'extension.webpack.config.js'));
|
|
||||||
for (const key in webpackRootConfig.externals) {
|
|
||||||
if (key in packageJsonConfig.dependencies) {
|
|
||||||
packagedDependencies.push(key);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
|
|
||||||
const files = fileNames
|
|
||||||
.map(fileName => path.join(extensionPath, fileName))
|
|
||||||
.map(filePath => new File({
|
|
||||||
path: filePath,
|
|
||||||
stat: fs.statSync(filePath),
|
|
||||||
base: extensionPath,
|
|
||||||
contents: fs.createReadStream(filePath) as any
|
|
||||||
}));
|
|
||||||
|
|
||||||
const filesStream = es.readArray(files);
|
|
||||||
|
|
||||||
// check for a webpack configuration files, then invoke webpack
|
|
||||||
// and merge its output with the files stream. also rewrite the package.json
|
|
||||||
// file to a new entry point
|
|
||||||
const webpackConfigLocations = (<string[]>glob.sync(
|
|
||||||
path.join(extensionPath, '/**/extension.webpack.config.js'),
|
|
||||||
{ ignore: ['**/node_modules'] }
|
|
||||||
));
|
|
||||||
|
|
||||||
const packageJsonFilter = filter(f => {
|
|
||||||
if (path.basename(f.path) === 'package.json') {
|
|
||||||
// only modify package.json's next to the webpack file.
|
|
||||||
// to be safe, use existsSync instead of path comparison.
|
|
||||||
return fs.existsSync(path.join(path.dirname(f.path), 'extension.webpack.config.js'));
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}, { restore: true });
|
|
||||||
|
|
||||||
const patchFilesStream = filesStream
|
|
||||||
.pipe(packageJsonFilter)
|
|
||||||
.pipe(buffer())
|
|
||||||
.pipe(json((data: any) => {
|
|
||||||
if (data.main) {
|
|
||||||
// hardcoded entry point directory!
|
|
||||||
data.main = data.main.replace('/out/', /dist/);
|
|
||||||
}
|
|
||||||
return data;
|
|
||||||
}))
|
|
||||||
.pipe(packageJsonFilter.restore);
|
|
||||||
|
|
||||||
|
|
||||||
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => () => {
|
|
||||||
|
|
||||||
const webpackDone = (err: any, stats: any) => {
|
|
||||||
fancyLog(`Bundled extension: ${ansiColors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
|
|
||||||
if (err) {
|
|
||||||
result.emit('error', err);
|
|
||||||
}
|
|
||||||
const { compilation } = stats;
|
|
||||||
if (compilation.errors.length > 0) {
|
|
||||||
result.emit('error', compilation.errors.join('\n'));
|
|
||||||
}
|
|
||||||
if (compilation.warnings.length > 0) {
|
|
||||||
result.emit('error', compilation.warnings.join('\n'));
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const webpackConfig = {
|
|
||||||
...require(webpackConfigPath),
|
|
||||||
...{ mode: 'production' }
|
|
||||||
};
|
|
||||||
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
|
|
||||||
|
|
||||||
return webpackGulp(webpackConfig, webpack, webpackDone)
|
|
||||||
.pipe(es.through(function (data) {
|
|
||||||
data.stat = data.stat || {};
|
|
||||||
data.base = extensionPath;
|
|
||||||
this.emit('data', data);
|
|
||||||
}))
|
|
||||||
.pipe(es.through(function (data: File) {
|
|
||||||
// source map handling:
|
|
||||||
// * rewrite sourceMappingURL
|
|
||||||
// * save to disk so that upload-task picks this up
|
|
||||||
if (sourceMappingURLBase) {
|
|
||||||
const contents = (<Buffer>data.contents).toString('utf8');
|
|
||||||
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
|
|
||||||
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
|
|
||||||
}), 'utf8');
|
|
||||||
|
|
||||||
if (/\.js\.map$/.test(data.path)) {
|
|
||||||
if (!fs.existsSync(path.dirname(data.path))) {
|
|
||||||
fs.mkdirSync(path.dirname(data.path));
|
|
||||||
}
|
|
||||||
fs.writeFileSync(data.path, data.contents);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
this.emit('data', data);
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
|
|
||||||
es.merge(sequence(webpackStreams), patchFilesStream)
|
|
||||||
// .pipe(es.through(function (data) {
|
|
||||||
// // debug
|
|
||||||
// console.log('out', data.path, data.contents.length);
|
|
||||||
// this.emit('data', data);
|
|
||||||
// }))
|
|
||||||
.pipe(result);
|
|
||||||
|
|
||||||
}).catch(err => {
|
|
||||||
console.error(extensionPath);
|
|
||||||
console.error(packagedDependencies);
|
|
||||||
result.emit('error', err);
|
|
||||||
});
|
|
||||||
|
|
||||||
return result.pipe(createStatsStream(path.basename(extensionPath)));
|
|
||||||
}
|
|
||||||
|
|
||||||
function fromLocalNormal(extensionPath: string): Stream {
|
|
||||||
const result = es.through();
|
const result = es.through();
|
||||||
|
|
||||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
||||||
@@ -247,7 +37,13 @@ function fromLocalNormal(extensionPath: string): Stream {
|
|||||||
})
|
})
|
||||||
.catch(err => result.emit('error', err));
|
.catch(err => result.emit('error', err));
|
||||||
|
|
||||||
return result.pipe(createStatsStream(path.basename(extensionPath)));
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
function error(err: any): Stream {
|
||||||
|
const result = es.through();
|
||||||
|
setTimeout(() => result.emit('error', err));
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
const baseHeaders = {
|
const baseHeaders = {
|
||||||
@@ -256,142 +52,82 @@ const baseHeaders = {
|
|||||||
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
|
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
|
||||||
};
|
};
|
||||||
|
|
||||||
export function fromMarketplace(extensionName: string, version: string, metadata: any): Stream {
|
export function fromMarketplace(extensionName: string, version: string): Stream {
|
||||||
const [publisher, name] = extensionName.split('.');
|
const filterType = 7;
|
||||||
const url = `https://marketplace.visualstudio.com/_apis/public/gallery/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`;
|
const value = extensionName;
|
||||||
|
const criterium = { filterType, value };
|
||||||
fancyLog('Downloading extension:', ansiColors.yellow(`${extensionName}@${version}`), '...');
|
const criteria = [criterium];
|
||||||
|
const pageNumber = 1;
|
||||||
|
const pageSize = 1;
|
||||||
|
const sortBy = 0;
|
||||||
|
const sortOrder = 0;
|
||||||
|
const flags = 0x1 | 0x2 | 0x80;
|
||||||
|
const assetTypes = ['Microsoft.VisualStudio.Services.VSIXPackage'];
|
||||||
|
const filters = [{ criteria, pageNumber, pageSize, sortBy, sortOrder }];
|
||||||
|
const body = JSON.stringify({ filters, assetTypes, flags });
|
||||||
|
const headers: any = assign({}, baseHeaders, {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Accept': 'application/json;api-version=3.0-preview.1',
|
||||||
|
'Content-Length': body.length
|
||||||
|
});
|
||||||
|
|
||||||
const options = {
|
const options = {
|
||||||
base: url,
|
base: 'https://marketplace.visualstudio.com/_apis/public/gallery',
|
||||||
requestOptions: {
|
requestOptions: {
|
||||||
|
method: 'POST',
|
||||||
gzip: true,
|
gzip: true,
|
||||||
headers: baseHeaders
|
headers,
|
||||||
|
body: body
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
const packageJsonFilter = filter('package.json', { restore: true });
|
return remote('/extensionquery', options)
|
||||||
|
.pipe(flatmap((stream, f) => {
|
||||||
|
const rawResult = f.contents.toString('utf8');
|
||||||
|
const result = JSON.parse(rawResult);
|
||||||
|
const extension = result.results[0].extensions[0];
|
||||||
|
if (!extension) {
|
||||||
|
return error(`No such extension: ${extension}`);
|
||||||
|
}
|
||||||
|
|
||||||
return remote('', options)
|
const metadata = {
|
||||||
.pipe(vzip.src())
|
id: extension.extensionId,
|
||||||
.pipe(filter('extension/**'))
|
publisherId: extension.publisher,
|
||||||
.pipe(rename(p => p.dirname = p.dirname!.replace(/^extension\/?/, '')))
|
publisherDisplayName: extension.publisher.displayName
|
||||||
.pipe(packageJsonFilter)
|
};
|
||||||
.pipe(buffer())
|
|
||||||
.pipe(json({ __metadata: metadata }))
|
const extensionVersion = extension.versions.filter(v => v.version === version)[0];
|
||||||
.pipe(packageJsonFilter.restore);
|
if (!extensionVersion) {
|
||||||
}
|
return error(`No such extension version: ${extensionName} @ ${version}`);
|
||||||
|
}
|
||||||
interface IPackageExtensionsOptions {
|
|
||||||
/**
|
const asset = extensionVersion.files.filter(f => f.assetType === 'Microsoft.VisualStudio.Services.VSIXPackage')[0];
|
||||||
* Set to undefined to package all of them.
|
if (!asset) {
|
||||||
*/
|
return error(`No VSIX found for extension version: ${extensionName} @ ${version}`);
|
||||||
desiredExtensions?: string[];
|
}
|
||||||
sourceMappingURLBase?: string;
|
|
||||||
}
|
util.log('Downloading extension:', util.colors.yellow(`${extensionName}@${version}`), '...');
|
||||||
|
|
||||||
const excludedExtensions = [
|
const options = {
|
||||||
'vscode-api-tests',
|
base: asset.source,
|
||||||
'vscode-colorize-tests',
|
requestOptions: {
|
||||||
'vscode-test-resolver',
|
gzip: true,
|
||||||
'ms-vscode.node-debug',
|
headers: baseHeaders
|
||||||
'ms-vscode.node-debug2',
|
}
|
||||||
// {{SQL CARBON EDIT}}
|
};
|
||||||
'integration-tests'
|
|
||||||
];
|
return remote('', options)
|
||||||
|
.pipe(flatmap(stream => {
|
||||||
// {{SQL CARBON EDIT}}
|
const packageJsonFilter = filter('package.json', { restore: true });
|
||||||
const sqlBuiltInExtensions = [
|
|
||||||
// Add SQL built-in extensions here.
|
return stream
|
||||||
// the extension will be excluded from SQLOps package and will have separate vsix packages
|
.pipe(vzip.src())
|
||||||
'admin-tool-ext-win',
|
.pipe(filter('extension/**'))
|
||||||
'agent',
|
.pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, '')))
|
||||||
'import',
|
.pipe(packageJsonFilter)
|
||||||
'profiler',
|
.pipe(buffer())
|
||||||
'admin-pack',
|
.pipe(json({ __metadata: metadata }))
|
||||||
'big-data-cluster',
|
.pipe(packageJsonFilter.restore);
|
||||||
'dacpac',
|
}));
|
||||||
'schema-compare',
|
}));
|
||||||
'resource-deployment',
|
|
||||||
'cms'
|
|
||||||
];
|
|
||||||
// {{SQL CARBON EDIT}} - End
|
|
||||||
|
|
||||||
interface IBuiltInExtension {
|
|
||||||
name: string;
|
|
||||||
version: string;
|
|
||||||
repo: string;
|
|
||||||
metadata: any;
|
|
||||||
}
|
|
||||||
|
|
||||||
const builtInExtensions: IBuiltInExtension[] = require('../builtInExtensions.json');
|
|
||||||
|
|
||||||
/**
|
|
||||||
* We're doing way too much stuff at once, with webpack et al. So much stuff
|
|
||||||
* that while downloading extensions from the marketplace, node js doesn't get enough
|
|
||||||
* stack frames to complete the download in under 2 minutes, at which point the
|
|
||||||
* marketplace server cuts off the http request. So, we sequentialize the extensino tasks.
|
|
||||||
*/
|
|
||||||
function sequence(streamProviders: { (): Stream }[]): Stream {
|
|
||||||
const result = es.through();
|
|
||||||
|
|
||||||
function pop() {
|
|
||||||
if (streamProviders.length === 0) {
|
|
||||||
result.emit('end');
|
|
||||||
} else {
|
|
||||||
const fn = streamProviders.shift()!;
|
|
||||||
fn()
|
|
||||||
.on('end', function () { setTimeout(pop, 0); })
|
|
||||||
.pipe(result, { end: false });
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pop();
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
export function packageExtensionsStream(optsIn?: IPackageExtensionsOptions): NodeJS.ReadWriteStream {
|
|
||||||
const opts = optsIn || {};
|
|
||||||
|
|
||||||
const localExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
|
|
||||||
.map(manifestPath => {
|
|
||||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
|
||||||
const extensionName = path.basename(extensionPath);
|
|
||||||
return { name: extensionName, path: extensionPath };
|
|
||||||
})
|
|
||||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
|
||||||
.filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
|
|
||||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1);
|
|
||||||
|
|
||||||
const localExtensions = () => sequence([...localExtensionDescriptions.map(extension => () => {
|
|
||||||
return fromLocal(extension.path, opts.sourceMappingURLBase)
|
|
||||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
|
||||||
})]);
|
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
const extensionDepsSrc = [
|
|
||||||
..._.flatten(extensionsProductionDependencies.map((d: any) => path.relative(root, d.path)).map((d: any) => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
|
|
||||||
];
|
|
||||||
|
|
||||||
const localExtensionDependencies = () => gulp.src(extensionDepsSrc, { base: '.', dot: true })
|
|
||||||
.pipe(filter(['**', '!**/package-lock.json']))
|
|
||||||
|
|
||||||
// Original code commented out here
|
|
||||||
// const localExtensionDependencies = () => gulp.src('extensions/node_modules/**', { base: '.' });
|
|
||||||
|
|
||||||
// const marketplaceExtensions = () => es.merge(
|
|
||||||
// ...builtInExtensions
|
|
||||||
// .filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
|
|
||||||
// .map(extension => {
|
|
||||||
// return fromMarketplace(extension.name, extension.version, extension.metadata)
|
|
||||||
// .pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
|
||||||
// })
|
|
||||||
// );
|
|
||||||
|
|
||||||
return sequence([localExtensions, localExtensionDependencies, /*marketplaceExtensions*/])
|
|
||||||
.pipe(util2.setExecutableBit(['**/*.sh']))
|
|
||||||
.pipe(filter(['**', '!**/*.js.map']));
|
|
||||||
// {{SQL CARBON EDIT}} - End
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,47 +4,47 @@
|
|||||||
*--------------------------------------------------------------------------------------------*/
|
*--------------------------------------------------------------------------------------------*/
|
||||||
'use strict';
|
'use strict';
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const path = require("path");
|
var path = require("path");
|
||||||
const fs = require("fs");
|
var fs = require("fs");
|
||||||
/**
|
/**
|
||||||
* Returns the sha1 commit version of a repository or undefined in case of failure.
|
* Returns the sha1 commit version of a repository or undefined in case of failure.
|
||||||
*/
|
*/
|
||||||
function getVersion(repo) {
|
function getVersion(repo) {
|
||||||
const git = path.join(repo, '.git');
|
var git = path.join(repo, '.git');
|
||||||
const headPath = path.join(git, 'HEAD');
|
var headPath = path.join(git, 'HEAD');
|
||||||
let head;
|
var head;
|
||||||
try {
|
try {
|
||||||
head = fs.readFileSync(headPath, 'utf8').trim();
|
head = fs.readFileSync(headPath, 'utf8').trim();
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
return undefined;
|
return void 0;
|
||||||
}
|
}
|
||||||
if (/^[0-9a-f]{40}$/i.test(head)) {
|
if (/^[0-9a-f]{40}$/i.test(head)) {
|
||||||
return head;
|
return head;
|
||||||
}
|
}
|
||||||
const refMatch = /^ref: (.*)$/.exec(head);
|
var refMatch = /^ref: (.*)$/.exec(head);
|
||||||
if (!refMatch) {
|
if (!refMatch) {
|
||||||
return undefined;
|
return void 0;
|
||||||
}
|
}
|
||||||
const ref = refMatch[1];
|
var ref = refMatch[1];
|
||||||
const refPath = path.join(git, ref);
|
var refPath = path.join(git, ref);
|
||||||
try {
|
try {
|
||||||
return fs.readFileSync(refPath, 'utf8').trim();
|
return fs.readFileSync(refPath, 'utf8').trim();
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
// noop
|
// noop
|
||||||
}
|
}
|
||||||
const packedRefsPath = path.join(git, 'packed-refs');
|
var packedRefsPath = path.join(git, 'packed-refs');
|
||||||
let refsRaw;
|
var refsRaw;
|
||||||
try {
|
try {
|
||||||
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
|
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
|
||||||
}
|
}
|
||||||
catch (e) {
|
catch (e) {
|
||||||
return undefined;
|
return void 0;
|
||||||
}
|
}
|
||||||
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
|
var refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
|
||||||
let refsMatch;
|
var refsMatch;
|
||||||
let refs = {};
|
var refs = {};
|
||||||
while (refsMatch = refsRegex.exec(refsRaw)) {
|
while (refsMatch = refsRegex.exec(refsRaw)) {
|
||||||
refs[refsMatch[2]] = refsMatch[1];
|
refs[refsMatch[2]] = refsMatch[1];
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ import * as fs from 'fs';
|
|||||||
/**
|
/**
|
||||||
* Returns the sha1 commit version of a repository or undefined in case of failure.
|
* Returns the sha1 commit version of a repository or undefined in case of failure.
|
||||||
*/
|
*/
|
||||||
export function getVersion(repo: string): string | undefined {
|
export function getVersion(repo: string): string {
|
||||||
const git = path.join(repo, '.git');
|
const git = path.join(repo, '.git');
|
||||||
const headPath = path.join(git, 'HEAD');
|
const headPath = path.join(git, 'HEAD');
|
||||||
let head: string;
|
let head: string;
|
||||||
@@ -18,7 +18,7 @@ export function getVersion(repo: string): string | undefined {
|
|||||||
try {
|
try {
|
||||||
head = fs.readFileSync(headPath, 'utf8').trim();
|
head = fs.readFileSync(headPath, 'utf8').trim();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
return undefined;
|
return void 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (/^[0-9a-f]{40}$/i.test(head)) {
|
if (/^[0-9a-f]{40}$/i.test(head)) {
|
||||||
@@ -28,7 +28,7 @@ export function getVersion(repo: string): string | undefined {
|
|||||||
const refMatch = /^ref: (.*)$/.exec(head);
|
const refMatch = /^ref: (.*)$/.exec(head);
|
||||||
|
|
||||||
if (!refMatch) {
|
if (!refMatch) {
|
||||||
return undefined;
|
return void 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
const ref = refMatch[1];
|
const ref = refMatch[1];
|
||||||
@@ -46,11 +46,11 @@ export function getVersion(repo: string): string | undefined {
|
|||||||
try {
|
try {
|
||||||
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
|
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
return undefined;
|
return void 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
|
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
|
||||||
let refsMatch: RegExpExecArray | null;
|
let refsMatch: RegExpExecArray;
|
||||||
let refs: { [ref: string]: string } = {};
|
let refs: { [ref: string]: string } = {};
|
||||||
|
|
||||||
while (refsMatch = refsRegex.exec(refsRaw)) {
|
while (refsMatch = refsRegex.exec(refsRaw)) {
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -27,159 +27,135 @@
|
|||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/api/common",
|
"name": "vs/workbench/parts/cli",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/cli",
|
"name": "vs/workbench/parts/codeEditor",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/codeEditor",
|
"name": "vs/workbench/parts/comments",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/codeinset",
|
"name": "vs/workbench/parts/debug",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/callHierarchy",
|
"name": "vs/workbench/parts/emmet",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/comments",
|
"name": "vs/workbench/parts/execution",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/debug",
|
"name": "vs/workbench/parts/extensions",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/emmet",
|
"name": "vs/workbench/parts/feedback",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/extensions",
|
"name": "vs/workbench/parts/files",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/externalTerminal",
|
"name": "vs/workbench/parts/html",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/feedback",
|
"name": "vs/workbench/parts/markers",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/files",
|
"name": "vs/workbench/parts/localizations",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/html",
|
"name": "vs/workbench/parts/logs",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/issue",
|
"name": "vs/workbench/parts/navigation",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/markers",
|
"name": "vs/workbench/parts/output",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/localizations",
|
"name": "vs/workbench/parts/performance",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/logs",
|
"name": "vs/workbench/parts/preferences",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/output",
|
"name": "vs/workbench/parts/quickopen",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/performance",
|
"name": "vs/workbench/parts/relauncher",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/preferences",
|
"name": "vs/workbench/parts/scm",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/quickopen",
|
"name": "vs/workbench/parts/search",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/remote",
|
"name": "vs/workbench/parts/snippets",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/relauncher",
|
"name": "vs/workbench/parts/surveys",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/scm",
|
"name": "vs/workbench/parts/tasks",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/search",
|
"name": "vs/workbench/parts/terminal",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/snippets",
|
"name": "vs/workbench/parts/themes",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/format",
|
"name": "vs/workbench/parts/trust",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/stats",
|
"name": "vs/workbench/parts/update",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/surveys",
|
"name": "vs/workbench/parts/url",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/tasks",
|
"name": "vs/workbench/parts/watermark",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/terminal",
|
"name": "vs/workbench/parts/webview",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/themes",
|
"name": "vs/workbench/parts/welcome",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/trust",
|
"name": "vs/workbench/parts/outline",
|
||||||
"project": "vscode-workbench"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "vs/workbench/contrib/update",
|
|
||||||
"project": "vscode-workbench"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "vs/workbench/contrib/url",
|
|
||||||
"project": "vscode-workbench"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "vs/workbench/contrib/watermark",
|
|
||||||
"project": "vscode-workbench"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "vs/workbench/contrib/webview",
|
|
||||||
"project": "vscode-workbench"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "vs/workbench/contrib/welcome",
|
|
||||||
"project": "vscode-workbench"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "vs/workbench/contrib/outline",
|
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -190,10 +166,6 @@
|
|||||||
"name": "vs/workbench/services/bulkEdit",
|
"name": "vs/workbench/services/bulkEdit",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "vs/workbench/services/commands",
|
|
||||||
"project": "vscode-workbench"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/services/configuration",
|
"name": "vs/workbench/services/configuration",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
@@ -219,17 +191,13 @@
|
|||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/services/extensionManagement",
|
"name": "vs/workbench/services/jsonschemas",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/services/files",
|
"name": "vs/workbench/services/files",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "vs/workbench/services/integrity",
|
|
||||||
"project": "vscode-workbench"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/services/keybinding",
|
"name": "vs/workbench/services/keybinding",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
@@ -242,10 +210,6 @@
|
|||||||
"name": "vs/workbench/services/progress",
|
"name": "vs/workbench/services/progress",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "vs/workbench/services/remote",
|
|
||||||
"project": "vscode-workbench"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/services/textfile",
|
"name": "vs/workbench/services/textfile",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
@@ -266,10 +230,6 @@
|
|||||||
"name": "vs/workbench/services/decorations",
|
"name": "vs/workbench/services/decorations",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "vs/workbench/services/label",
|
|
||||||
"project": "vscode-workbench"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/services/preferences",
|
"name": "vs/workbench/services/preferences",
|
||||||
"project": "vscode-preferences"
|
"project": "vscode-preferences"
|
||||||
|
|||||||
@@ -7,25 +7,25 @@ import * as path from 'path';
|
|||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
|
|
||||||
import { through, readable, ThroughStream } from 'event-stream';
|
import { through, readable, ThroughStream } from 'event-stream';
|
||||||
import * as File from 'vinyl';
|
import File = require('vinyl');
|
||||||
import * as Is from 'is';
|
import * as Is from 'is';
|
||||||
import * as xml2js from 'xml2js';
|
import * as xml2js from 'xml2js';
|
||||||
import * as glob from 'glob';
|
import * as glob from 'glob';
|
||||||
import * as https from 'https';
|
import * as https from 'https';
|
||||||
import * as gulp from 'gulp';
|
import * as gulp from 'gulp';
|
||||||
import * as fancyLog from 'fancy-log';
|
|
||||||
import * as ansiColors from 'ansi-colors';
|
var util = require('gulp-util');
|
||||||
import * as iconv from 'iconv-lite';
|
var iconv = require('iconv-lite');
|
||||||
|
|
||||||
const NUMBER_OF_CONCURRENT_DOWNLOADS = 4;
|
const NUMBER_OF_CONCURRENT_DOWNLOADS = 4;
|
||||||
|
|
||||||
function log(message: any, ...rest: any[]): void {
|
function log(message: any, ...rest: any[]): void {
|
||||||
fancyLog(ansiColors.green('[i18n]'), message, ...rest);
|
util.log(util.colors.green('[i18n]'), message, ...rest);
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface Language {
|
export interface Language {
|
||||||
id: string; // language id, e.g. zh-tw, de
|
id: string; // laguage id, e.g. zh-tw, de
|
||||||
translationId?: string; // language id used in translation tools, e.g zh-hant, de (optional, if not set, the id is used)
|
transifexId?: string; // language id used in transifex, e.g zh-hant, de (optional, if not set, the id is used)
|
||||||
folderName?: string; // language specific folder name, e.g. cht, deu (optional, if not set, the id is used)
|
folderName?: string; // language specific folder name, e.g. cht, deu (optional, if not set, the id is used)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -38,8 +38,8 @@ export interface InnoSetup {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export const defaultLanguages: Language[] = [
|
export const defaultLanguages: Language[] = [
|
||||||
{ id: 'zh-tw', folderName: 'cht', translationId: 'zh-hant' },
|
{ id: 'zh-tw', folderName: 'cht', transifexId: 'zh-hant' },
|
||||||
{ id: 'zh-cn', folderName: 'chs', translationId: 'zh-hans' },
|
{ id: 'zh-cn', folderName: 'chs', transifexId: 'zh-hans' },
|
||||||
{ id: 'ja', folderName: 'jpn' },
|
{ id: 'ja', folderName: 'jpn' },
|
||||||
{ id: 'ko', folderName: 'kor' },
|
{ id: 'ko', folderName: 'kor' },
|
||||||
{ id: 'de', folderName: 'deu' },
|
{ id: 'de', folderName: 'deu' },
|
||||||
@@ -57,7 +57,7 @@ export const extraLanguages: Language[] = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
// non built-in extensions also that are transifex and need to be part of the language packs
|
// non built-in extensions also that are transifex and need to be part of the language packs
|
||||||
export const externalExtensionsWithTranslations = {
|
const externalExtensionsWithTranslations = {
|
||||||
'vscode-chrome-debug': 'msjsdiag.debugger-for-chrome',
|
'vscode-chrome-debug': 'msjsdiag.debugger-for-chrome',
|
||||||
'vscode-node-debug': 'ms-vscode.node-debug',
|
'vscode-node-debug': 'ms-vscode.node-debug',
|
||||||
'vscode-node-debug2': 'ms-vscode.node-debug2'
|
'vscode-node-debug2': 'ms-vscode.node-debug2'
|
||||||
@@ -71,7 +71,7 @@ interface Map<V> {
|
|||||||
interface Item {
|
interface Item {
|
||||||
id: string;
|
id: string;
|
||||||
message: string;
|
message: string;
|
||||||
comment?: string;
|
comment: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface Resource {
|
export interface Resource {
|
||||||
@@ -137,6 +137,27 @@ module PackageJsonFormat {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
interface ModuleJsonFormat {
|
||||||
|
messages: string[];
|
||||||
|
keys: (string | LocalizeInfo)[];
|
||||||
|
}
|
||||||
|
|
||||||
|
module ModuleJsonFormat {
|
||||||
|
export function is(value: any): value is ModuleJsonFormat {
|
||||||
|
let candidate = value as ModuleJsonFormat;
|
||||||
|
return Is.defined(candidate)
|
||||||
|
&& Is.array(candidate.messages) && candidate.messages.every(message => Is.string(message))
|
||||||
|
&& Is.array(candidate.keys) && candidate.keys.every(key => Is.string(key) || LocalizeInfo.is(key));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
interface BundledExtensionHeaderFormat {
|
||||||
|
id: string;
|
||||||
|
type: string;
|
||||||
|
hash: string;
|
||||||
|
outDir: string;
|
||||||
|
}
|
||||||
|
|
||||||
interface BundledExtensionFormat {
|
interface BundledExtensionFormat {
|
||||||
[key: string]: {
|
[key: string]: {
|
||||||
messages: string[];
|
messages: string[];
|
||||||
@@ -144,19 +165,10 @@ interface BundledExtensionFormat {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
interface I18nFormat {
|
|
||||||
version: string;
|
|
||||||
contents: {
|
|
||||||
[module: string]: {
|
|
||||||
[messageKey: string]: string;
|
|
||||||
};
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export class Line {
|
export class Line {
|
||||||
private buffer: string[] = [];
|
private buffer: string[] = [];
|
||||||
|
|
||||||
constructor(indent: number = 0) {
|
constructor(private indent: number = 0) {
|
||||||
if (indent > 0) {
|
if (indent > 0) {
|
||||||
this.buffer.push(new Array(indent + 1).join(' '));
|
this.buffer.push(new Array(indent + 1).join(' '));
|
||||||
}
|
}
|
||||||
@@ -223,8 +235,8 @@ export class XLF {
|
|||||||
let existingKeys = new Set<string>();
|
let existingKeys = new Set<string>();
|
||||||
for (let i = 0; i < keys.length; i++) {
|
for (let i = 0; i < keys.length; i++) {
|
||||||
let key = keys[i];
|
let key = keys[i];
|
||||||
let realKey: string | undefined;
|
let realKey: string;
|
||||||
let comment: string | undefined;
|
let comment: string;
|
||||||
if (Is.string(key)) {
|
if (Is.string(key)) {
|
||||||
realKey = key;
|
realKey = key;
|
||||||
comment = undefined;
|
comment = undefined;
|
||||||
@@ -274,17 +286,17 @@ export class XLF {
|
|||||||
}
|
}
|
||||||
|
|
||||||
static parsePseudo = function (xlfString: string): Promise<ParsedXLF[]> {
|
static parsePseudo = function (xlfString: string): Promise<ParsedXLF[]> {
|
||||||
return new Promise((resolve) => {
|
return new Promise((resolve, reject) => {
|
||||||
let parser = new xml2js.Parser();
|
let parser = new xml2js.Parser();
|
||||||
let files: { messages: Map<string>, originalFilePath: string, language: string }[] = [];
|
let files: { messages: Map<string>, originalFilePath: string, language: string }[] = [];
|
||||||
parser.parseString(xlfString, function (_err: any, result: any) {
|
parser.parseString(xlfString, function (err, result) {
|
||||||
const fileNodes: any[] = result['xliff']['file'];
|
const fileNodes: any[] = result['xliff']['file'];
|
||||||
fileNodes.forEach(file => {
|
fileNodes.forEach(file => {
|
||||||
const originalFilePath = file.$.original;
|
const originalFilePath = file.$.original;
|
||||||
const messages: Map<string> = {};
|
const messages: Map<string> = {};
|
||||||
const transUnits = file.body[0]['trans-unit'];
|
const transUnits = file.body[0]['trans-unit'];
|
||||||
if (transUnits) {
|
if (transUnits) {
|
||||||
transUnits.forEach((unit: any) => {
|
transUnits.forEach(unit => {
|
||||||
const key = unit.$.id;
|
const key = unit.$.id;
|
||||||
const val = pseudify(unit.source[0]['_'].toString());
|
const val = pseudify(unit.source[0]['_'].toString());
|
||||||
if (key && val) {
|
if (key && val) {
|
||||||
@@ -305,7 +317,7 @@ export class XLF {
|
|||||||
|
|
||||||
let files: { messages: Map<string>, originalFilePath: string, language: string }[] = [];
|
let files: { messages: Map<string>, originalFilePath: string, language: string }[] = [];
|
||||||
|
|
||||||
parser.parseString(xlfString, function (err: any, result: any) {
|
parser.parseString(xlfString, function (err, result) {
|
||||||
if (err) {
|
if (err) {
|
||||||
reject(new Error(`XLF parsing error: Failed to parse XLIFF string. ${err}`));
|
reject(new Error(`XLF parsing error: Failed to parse XLIFF string. ${err}`));
|
||||||
}
|
}
|
||||||
@@ -328,20 +340,17 @@ export class XLF {
|
|||||||
|
|
||||||
const transUnits = file.body[0]['trans-unit'];
|
const transUnits = file.body[0]['trans-unit'];
|
||||||
if (transUnits) {
|
if (transUnits) {
|
||||||
transUnits.forEach((unit: any) => {
|
transUnits.forEach(unit => {
|
||||||
const key = unit.$.id;
|
const key = unit.$.id;
|
||||||
if (!unit.target) {
|
if (!unit.target) {
|
||||||
return; // No translation available
|
return; // No translation available
|
||||||
}
|
}
|
||||||
|
|
||||||
let val = unit.target[0];
|
const val = unit.target.toString();
|
||||||
if (typeof val !== 'string') {
|
|
||||||
val = val._;
|
|
||||||
}
|
|
||||||
if (key && val) {
|
if (key && val) {
|
||||||
messages[key] = decodeEntities(val);
|
messages[key] = decodeEntities(val);
|
||||||
} else {
|
} else {
|
||||||
reject(new Error(`XLF parsing error: XLIFF file ${originalFilePath} does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.`));
|
reject(new Error(`XLF parsing error: XLIFF file does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.`));
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() });
|
files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() });
|
||||||
@@ -360,7 +369,7 @@ export interface ITask<T> {
|
|||||||
|
|
||||||
interface ILimitedTaskFactory<T> {
|
interface ILimitedTaskFactory<T> {
|
||||||
factory: ITask<Promise<T>>;
|
factory: ITask<Promise<T>>;
|
||||||
c: (value?: T | Promise<T>) => void;
|
c: (value?: T | Thenable<T>) => void;
|
||||||
e: (error?: any) => void;
|
e: (error?: any) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -382,7 +391,7 @@ export class Limiter<T> {
|
|||||||
|
|
||||||
private consume(): void {
|
private consume(): void {
|
||||||
while (this.outstandingPromises.length && this.runningPromises < this.maxDegreeOfParalellism) {
|
while (this.outstandingPromises.length && this.runningPromises < this.maxDegreeOfParalellism) {
|
||||||
const iLimitedTask = this.outstandingPromises.shift()!;
|
const iLimitedTask = this.outstandingPromises.shift();
|
||||||
this.runningPromises++;
|
this.runningPromises++;
|
||||||
|
|
||||||
const promise = iLimitedTask.factory();
|
const promise = iLimitedTask.factory();
|
||||||
@@ -410,8 +419,8 @@ function stripComments(content: string): string {
|
|||||||
* Third matches block comments
|
* Third matches block comments
|
||||||
* Fourth matches line comments
|
* Fourth matches line comments
|
||||||
*/
|
*/
|
||||||
const regexp = /("(?:[^\\\"]*(?:\\.)?)*")|('(?:[^\\\']*(?:\\.)?)*')|(\/\*(?:\r?\n|.)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g;
|
var regexp: RegExp = /("(?:[^\\\"]*(?:\\.)?)*")|('(?:[^\\\']*(?:\\.)?)*')|(\/\*(?:\r?\n|.)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g;
|
||||||
let result = content.replace(regexp, (match, _m1, _m2, m3, m4) => {
|
let result = content.replace(regexp, (match, m1, m2, m3, m4) => {
|
||||||
// Only one of m1, m2, m3, m4 matches
|
// Only one of m1, m2, m3, m4 matches
|
||||||
if (m3) {
|
if (m3) {
|
||||||
// A block comment. Replace with nothing
|
// A block comment. Replace with nothing
|
||||||
@@ -433,9 +442,9 @@ function stripComments(content: string): string {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function escapeCharacters(value: string): string {
|
function escapeCharacters(value: string): string {
|
||||||
const result: string[] = [];
|
var result: string[] = [];
|
||||||
for (let i = 0; i < value.length; i++) {
|
for (var i = 0; i < value.length; i++) {
|
||||||
const ch = value.charAt(i);
|
var ch = value.charAt(i);
|
||||||
switch (ch) {
|
switch (ch) {
|
||||||
case '\'':
|
case '\'':
|
||||||
result.push('\\\'');
|
result.push('\\\'');
|
||||||
@@ -475,6 +484,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
|
|||||||
|
|
||||||
let statistics: Map<number> = Object.create(null);
|
let statistics: Map<number> = Object.create(null);
|
||||||
|
|
||||||
|
let total: number = 0;
|
||||||
let defaultMessages: Map<Map<string>> = Object.create(null);
|
let defaultMessages: Map<Map<string>> = Object.create(null);
|
||||||
let modules = Object.keys(keysSection);
|
let modules = Object.keys(keysSection);
|
||||||
modules.forEach((module) => {
|
modules.forEach((module) => {
|
||||||
@@ -487,6 +497,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
|
|||||||
let messageMap: Map<string> = Object.create(null);
|
let messageMap: Map<string> = Object.create(null);
|
||||||
defaultMessages[module] = messageMap;
|
defaultMessages[module] = messageMap;
|
||||||
keys.map((key, i) => {
|
keys.map((key, i) => {
|
||||||
|
total++;
|
||||||
if (typeof key === 'string') {
|
if (typeof key === 'string') {
|
||||||
messageMap[key] = messages[i];
|
messageMap[key] = messages[i];
|
||||||
} else {
|
} else {
|
||||||
@@ -495,11 +506,7 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
let languageDirectory = path.join(__dirname, '..', '..', '..', 'vscode-loc', 'i18n');
|
let languageDirectory = path.join(__dirname, '..', '..', 'i18n');
|
||||||
if (!fs.existsSync(languageDirectory)) {
|
|
||||||
log(`No VS Code localization repository found. Looking at ${languageDirectory}`);
|
|
||||||
log(`To bundle translations please check out the vscode-loc repository as a sibling of the vscode repository.`);
|
|
||||||
}
|
|
||||||
let sortedLanguages = sortLanguages(languages);
|
let sortedLanguages = sortLanguages(languages);
|
||||||
sortedLanguages.forEach((language) => {
|
sortedLanguages.forEach((language) => {
|
||||||
if (process.env['VSCODE_BUILD_VERBOSE']) {
|
if (process.env['VSCODE_BUILD_VERBOSE']) {
|
||||||
@@ -508,35 +515,31 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
|
|||||||
|
|
||||||
statistics[language.id] = 0;
|
statistics[language.id] = 0;
|
||||||
let localizedModules: Map<string[]> = Object.create(null);
|
let localizedModules: Map<string[]> = Object.create(null);
|
||||||
let languageFolderName = language.translationId || language.id;
|
let languageFolderName = language.folderName || language.id;
|
||||||
let i18nFile = path.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json');
|
let cwd = path.join(languageDirectory, languageFolderName, 'src');
|
||||||
let allMessages: I18nFormat | undefined;
|
|
||||||
if (fs.existsSync(i18nFile)) {
|
|
||||||
let content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
|
|
||||||
allMessages = JSON.parse(content);
|
|
||||||
}
|
|
||||||
modules.forEach((module) => {
|
modules.forEach((module) => {
|
||||||
let order = keysSection[module];
|
let order = keysSection[module];
|
||||||
let moduleMessage: { [messageKey: string]: string } | undefined;
|
let i18nFile = path.join(cwd, module) + '.i18n.json';
|
||||||
if (allMessages) {
|
let messages: Map<string> = null;
|
||||||
moduleMessage = allMessages.contents[module];
|
if (fs.existsSync(i18nFile)) {
|
||||||
}
|
let content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
|
||||||
if (!moduleMessage) {
|
messages = JSON.parse(content);
|
||||||
|
} else {
|
||||||
if (process.env['VSCODE_BUILD_VERBOSE']) {
|
if (process.env['VSCODE_BUILD_VERBOSE']) {
|
||||||
log(`No localized messages found for module ${module}. Using default messages.`);
|
log(`No localized messages found for module ${module}. Using default messages.`);
|
||||||
}
|
}
|
||||||
moduleMessage = defaultMessages[module];
|
messages = defaultMessages[module];
|
||||||
statistics[language.id] = statistics[language.id] + Object.keys(moduleMessage).length;
|
statistics[language.id] = statistics[language.id] + Object.keys(messages).length;
|
||||||
}
|
}
|
||||||
let localizedMessages: string[] = [];
|
let localizedMessages: string[] = [];
|
||||||
order.forEach((keyInfo) => {
|
order.forEach((keyInfo) => {
|
||||||
let key: string | null = null;
|
let key: string = null;
|
||||||
if (typeof keyInfo === 'string') {
|
if (typeof keyInfo === 'string') {
|
||||||
key = keyInfo;
|
key = keyInfo;
|
||||||
} else {
|
} else {
|
||||||
key = keyInfo.key;
|
key = keyInfo.key;
|
||||||
}
|
}
|
||||||
let message: string = moduleMessage![key];
|
let message: string = messages[key];
|
||||||
if (!message) {
|
if (!message) {
|
||||||
if (process.env['VSCODE_BUILD_VERBOSE']) {
|
if (process.env['VSCODE_BUILD_VERBOSE']) {
|
||||||
log(`No localized message found for key ${key} in module ${module}. Using default message.`);
|
log(`No localized message found for key ${key} in module ${module}. Using default message.`);
|
||||||
@@ -622,7 +625,7 @@ export function getResource(sourceFile: string): Resource {
|
|||||||
return { name: 'vs/base', project: editorProject };
|
return { name: 'vs/base', project: editorProject };
|
||||||
} else if (/^vs\/code/.test(sourceFile)) {
|
} else if (/^vs\/code/.test(sourceFile)) {
|
||||||
return { name: 'vs/code', project: workbenchProject };
|
return { name: 'vs/code', project: workbenchProject };
|
||||||
} else if (/^vs\/workbench\/contrib/.test(sourceFile)) {
|
} else if (/^vs\/workbench\/parts/.test(sourceFile)) {
|
||||||
resource = sourceFile.split('/', 4).join('/');
|
resource = sourceFile.split('/', 4).join('/');
|
||||||
return { name: resource, project: workbenchProject };
|
return { name: resource, project: workbenchProject };
|
||||||
} else if (/^vs\/workbench\/services/.test(sourceFile)) {
|
} else if (/^vs\/workbench\/services/.test(sourceFile)) {
|
||||||
@@ -709,7 +712,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
|
|||||||
}
|
}
|
||||||
return _xlf;
|
return _xlf;
|
||||||
}
|
}
|
||||||
gulp.src([`./extensions/${extensionName}/package.nls.json`, `./extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(through(function (file: File) {
|
gulp.src([`./extensions/${extensionName}/package.nls.json`, `./extensions/${extensionName}/**/nls.metadata.json`]).pipe(through(function (file: File) {
|
||||||
if (file.isBuffer()) {
|
if (file.isBuffer()) {
|
||||||
const buffer: Buffer = file.contents as Buffer;
|
const buffer: Buffer = file.contents as Buffer;
|
||||||
const basename = path.basename(file.path);
|
const basename = path.basename(file.path);
|
||||||
@@ -821,8 +824,8 @@ export function createXlfFilesForIsl(): ThroughStream {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function pushXlfFiles(apiHostname: string, username: string, password: string): ThroughStream {
|
export function pushXlfFiles(apiHostname: string, username: string, password: string): ThroughStream {
|
||||||
let tryGetPromises: Array<Promise<boolean>> = [];
|
let tryGetPromises = [];
|
||||||
let updateCreatePromises: Array<Promise<boolean>> = [];
|
let updateCreatePromises = [];
|
||||||
|
|
||||||
return through(function (this: ThroughStream, file: File) {
|
return through(function (this: ThroughStream, file: File) {
|
||||||
const project = path.dirname(file.relative);
|
const project = path.dirname(file.relative);
|
||||||
@@ -887,7 +890,7 @@ function getAllResources(project: string, apiHostname: string, username: string,
|
|||||||
|
|
||||||
export function findObsoleteResources(apiHostname: string, username: string, password: string): ThroughStream {
|
export function findObsoleteResources(apiHostname: string, username: string, password: string): ThroughStream {
|
||||||
let resourcesByProject: Map<string[]> = Object.create(null);
|
let resourcesByProject: Map<string[]> = Object.create(null);
|
||||||
resourcesByProject[extensionsProject] = ([] as any[]).concat(externalExtensionsWithTranslations); // clone
|
resourcesByProject[extensionsProject] = [].concat(externalExtensionsWithTranslations); // clone
|
||||||
|
|
||||||
return through(function (this: ThroughStream, file: File) {
|
return through(function (this: ThroughStream, file: File) {
|
||||||
const project = path.dirname(file.relative);
|
const project = path.dirname(file.relative);
|
||||||
@@ -904,7 +907,7 @@ export function findObsoleteResources(apiHostname: string, username: string, pas
|
|||||||
|
|
||||||
const json = JSON.parse(fs.readFileSync('./build/lib/i18n.resources.json', 'utf8'));
|
const json = JSON.parse(fs.readFileSync('./build/lib/i18n.resources.json', 'utf8'));
|
||||||
let i18Resources = [...json.editor, ...json.workbench].map((r: Resource) => r.project + '/' + r.name.replace(/\//g, '_'));
|
let i18Resources = [...json.editor, ...json.workbench].map((r: Resource) => r.project + '/' + r.name.replace(/\//g, '_'));
|
||||||
let extractedResources: string[] = [];
|
let extractedResources = [];
|
||||||
for (let project of [workbenchProject, editorProject]) {
|
for (let project of [workbenchProject, editorProject]) {
|
||||||
for (let resource of resourcesByProject[project]) {
|
for (let resource of resourcesByProject[project]) {
|
||||||
if (resource !== 'setup_messages') {
|
if (resource !== 'setup_messages') {
|
||||||
@@ -917,7 +920,7 @@ export function findObsoleteResources(apiHostname: string, username: string, pas
|
|||||||
console.log(`[i18n] Missing resources in file 'build/lib/i18n.resources.json': JSON.stringify(${extractedResources.filter(p => i18Resources.indexOf(p) === -1)})`);
|
console.log(`[i18n] Missing resources in file 'build/lib/i18n.resources.json': JSON.stringify(${extractedResources.filter(p => i18Resources.indexOf(p) === -1)})`);
|
||||||
}
|
}
|
||||||
|
|
||||||
let promises: Array<Promise<void>> = [];
|
let promises = [];
|
||||||
for (let project in resourcesByProject) {
|
for (let project in resourcesByProject) {
|
||||||
promises.push(
|
promises.push(
|
||||||
getAllResources(project, apiHostname, username, password).then(resources => {
|
getAllResources(project, apiHostname, username, password).then(resources => {
|
||||||
@@ -962,7 +965,7 @@ function tryGetResource(project: string, slug: string, apiHostname: string, cred
|
|||||||
}
|
}
|
||||||
|
|
||||||
function createResource(project: string, slug: string, xlfFile: File, apiHostname: string, credentials: any): Promise<any> {
|
function createResource(project: string, slug: string, xlfFile: File, apiHostname: string, credentials: any): Promise<any> {
|
||||||
return new Promise((_resolve, reject) => {
|
return new Promise((resolve, reject) => {
|
||||||
const data = JSON.stringify({
|
const data = JSON.stringify({
|
||||||
'content': xlfFile.contents.toString(),
|
'content': xlfFile.contents.toString(),
|
||||||
'name': slug,
|
'name': slug,
|
||||||
@@ -1053,8 +1056,8 @@ export function pullCoreAndExtensionsXlfFiles(apiHostname: string, username: str
|
|||||||
|
|
||||||
// extensions
|
// extensions
|
||||||
let extensionsToLocalize = Object.create(null);
|
let extensionsToLocalize = Object.create(null);
|
||||||
glob.sync('./extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
glob.sync('./extensions/**/*.nls.json', ).forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
||||||
glob.sync('./extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
glob.sync('./extensions/*/node_modules/vscode-nls', ).forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
||||||
|
|
||||||
Object.keys(extensionsToLocalize).forEach(extension => {
|
Object.keys(extensionsToLocalize).forEach(extension => {
|
||||||
_coreAndExtensionResources.push({ name: extension, project: extensionsProject });
|
_coreAndExtensionResources.push({ name: extension, project: extensionsProject });
|
||||||
@@ -1082,7 +1085,7 @@ function pullXlfFiles(apiHostname: string, username: string, password: string, l
|
|||||||
let expectedTranslationsCount = resources.length;
|
let expectedTranslationsCount = resources.length;
|
||||||
let translationsRetrieved = 0, called = false;
|
let translationsRetrieved = 0, called = false;
|
||||||
|
|
||||||
return readable(function (_count: any, callback: any) {
|
return readable(function (count, callback) {
|
||||||
// Mark end of stream when all resources were retrieved
|
// Mark end of stream when all resources were retrieved
|
||||||
if (translationsRetrieved === expectedTranslationsCount) {
|
if (translationsRetrieved === expectedTranslationsCount) {
|
||||||
return this.emit('end');
|
return this.emit('end');
|
||||||
@@ -1092,7 +1095,7 @@ function pullXlfFiles(apiHostname: string, username: string, password: string, l
|
|||||||
called = true;
|
called = true;
|
||||||
const stream = this;
|
const stream = this;
|
||||||
resources.map(function (resource) {
|
resources.map(function (resource) {
|
||||||
retrieveResource(language, resource, apiHostname, credentials).then((file: File | null) => {
|
retrieveResource(language, resource, apiHostname, credentials).then((file: File) => {
|
||||||
if (file) {
|
if (file) {
|
||||||
stream.emit('data', file);
|
stream.emit('data', file);
|
||||||
}
|
}
|
||||||
@@ -1104,13 +1107,13 @@ function pullXlfFiles(apiHostname: string, username: string, password: string, l
|
|||||||
callback();
|
callback();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
const limiter = new Limiter<File | null>(NUMBER_OF_CONCURRENT_DOWNLOADS);
|
const limiter = new Limiter<File>(NUMBER_OF_CONCURRENT_DOWNLOADS);
|
||||||
|
|
||||||
function retrieveResource(language: Language, resource: Resource, apiHostname: string, credentials: string): Promise<File | null> {
|
function retrieveResource(language: Language, resource: Resource, apiHostname, credentials): Promise<File> {
|
||||||
return limiter.queue(() => new Promise<File | null>((resolve, reject) => {
|
return limiter.queue(() => new Promise<File>((resolve, reject) => {
|
||||||
const slug = resource.name.replace(/\//g, '_');
|
const slug = resource.name.replace(/\//g, '_');
|
||||||
const project = resource.project;
|
const project = resource.project;
|
||||||
let transifexLanguageId = language.id === 'ps' ? 'en' : language.translationId || language.id;
|
let transifexLanguageId = language.id === 'ps' ? 'en' : language.transifexId || language.id;
|
||||||
const options = {
|
const options = {
|
||||||
hostname: apiHostname,
|
hostname: apiHostname,
|
||||||
path: `/api/2/project/${project}/resource/${slug}/translation/${transifexLanguageId}?file&mode=onlyreviewed`,
|
path: `/api/2/project/${project}/resource/${slug}/translation/${transifexLanguageId}?file&mode=onlyreviewed`,
|
||||||
@@ -1209,10 +1212,10 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
|
|||||||
let parsePromises: Promise<ParsedXLF[]>[] = [];
|
let parsePromises: Promise<ParsedXLF[]>[] = [];
|
||||||
let mainPack: I18nPack = { version: i18nPackVersion, contents: {} };
|
let mainPack: I18nPack = { version: i18nPackVersion, contents: {} };
|
||||||
let extensionsPacks: Map<I18nPack> = {};
|
let extensionsPacks: Map<I18nPack> = {};
|
||||||
let errors: any[] = [];
|
|
||||||
return through(function (this: ThroughStream, xlf: File) {
|
return through(function (this: ThroughStream, xlf: File) {
|
||||||
let project = path.basename(path.dirname(xlf.relative));
|
let stream = this;
|
||||||
let resource = path.basename(xlf.relative, '.xlf');
|
let project = path.dirname(xlf.path);
|
||||||
|
let resource = path.basename(xlf.path, '.xlf');
|
||||||
let contents = xlf.contents.toString();
|
let contents = xlf.contents.toString();
|
||||||
let parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
|
let parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
|
||||||
parsePromises.push(parsePromise);
|
parsePromises.push(parsePromise);
|
||||||
@@ -1239,15 +1242,10 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
).catch(reason => {
|
);
|
||||||
errors.push(reason);
|
|
||||||
});
|
|
||||||
}, function () {
|
}, function () {
|
||||||
Promise.all(parsePromises)
|
Promise.all(parsePromises)
|
||||||
.then(() => {
|
.then(() => {
|
||||||
if (errors.length > 0) {
|
|
||||||
throw errors;
|
|
||||||
}
|
|
||||||
const translatedMainFile = createI18nFile('./main', mainPack);
|
const translatedMainFile = createI18nFile('./main', mainPack);
|
||||||
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
|
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
|
||||||
|
|
||||||
@@ -1266,9 +1264,7 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
|
|||||||
}
|
}
|
||||||
this.queue(null);
|
this.queue(null);
|
||||||
})
|
})
|
||||||
.catch((reason) => {
|
.catch(reason => { throw new Error(reason); });
|
||||||
this.emit('error', reason);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1289,15 +1285,11 @@ export function prepareIslFiles(language: Language, innoSetupConfig: InnoSetup):
|
|||||||
stream.queue(translatedFile);
|
stream.queue(translatedFile);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
).catch(reason => {
|
);
|
||||||
this.emit('error', reason);
|
|
||||||
});
|
|
||||||
}, function () {
|
}, function () {
|
||||||
Promise.all(parsePromises)
|
Promise.all(parsePromises)
|
||||||
.then(() => { this.queue(null); })
|
.then(() => { this.queue(null); })
|
||||||
.catch(reason => {
|
.catch(reason => { throw new Error(reason); });
|
||||||
this.emit('error', reason);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -1314,7 +1306,7 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
|
|||||||
let firstChar = line.charAt(0);
|
let firstChar = line.charAt(0);
|
||||||
if (firstChar === '[' || firstChar === ';') {
|
if (firstChar === '[' || firstChar === ';') {
|
||||||
if (line === '; *** Inno Setup version 5.5.3+ English messages ***') {
|
if (line === '; *** Inno Setup version 5.5.3+ English messages ***') {
|
||||||
content.push(`; *** Inno Setup version 5.5.3+ ${innoSetup.defaultInfo!.name} messages ***`);
|
content.push(`; *** Inno Setup version 5.5.3+ ${innoSetup.defaultInfo.name} messages ***`);
|
||||||
} else {
|
} else {
|
||||||
content.push(line);
|
content.push(line);
|
||||||
}
|
}
|
||||||
@@ -1324,9 +1316,9 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
|
|||||||
let translated = line;
|
let translated = line;
|
||||||
if (key) {
|
if (key) {
|
||||||
if (key === 'LanguageName') {
|
if (key === 'LanguageName') {
|
||||||
translated = `${key}=${innoSetup.defaultInfo!.name}`;
|
translated = `${key}=${innoSetup.defaultInfo.name}`;
|
||||||
} else if (key === 'LanguageID') {
|
} else if (key === 'LanguageID') {
|
||||||
translated = `${key}=${innoSetup.defaultInfo!.id}`;
|
translated = `${key}=${innoSetup.defaultInfo.id}`;
|
||||||
} else if (key === 'LanguageCodePage') {
|
} else if (key === 'LanguageCodePage') {
|
||||||
translated = `${key}=${innoSetup.codePage.substr(2)}`;
|
translated = `${key}=${innoSetup.codePage.substr(2)}`;
|
||||||
} else {
|
} else {
|
||||||
@@ -1347,14 +1339,14 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
|
|||||||
|
|
||||||
return new File({
|
return new File({
|
||||||
path: filePath,
|
path: filePath,
|
||||||
contents: iconv.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage)
|
contents: iconv.encode(Buffer.from(content.join('\r\n'), 'utf8'), innoSetup.codePage)
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function encodeEntities(value: string): string {
|
function encodeEntities(value: string): string {
|
||||||
let result: string[] = [];
|
var result: string[] = [];
|
||||||
for (let i = 0; i < value.length; i++) {
|
for (var i = 0; i < value.length; i++) {
|
||||||
let ch = value[i];
|
var ch = value[i];
|
||||||
switch (ch) {
|
switch (ch) {
|
||||||
case '<':
|
case '<':
|
||||||
result.push('<');
|
result.push('<');
|
||||||
@@ -1378,4 +1370,4 @@ function decodeEntities(value: string): string {
|
|||||||
|
|
||||||
function pseudify(message: string) {
|
function pseudify(message: string) {
|
||||||
return '\uFF3B' + message.replace(/[aouei]/g, '$&$&') + '\uFF3D';
|
return '\uFF3B' + message.replace(/[aouei]/g, '$&$&') + '\uFF3D';
|
||||||
}
|
}
|
||||||
304
build/lib/nls.js
304
build/lib/nls.js
@@ -3,12 +3,13 @@
|
|||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
*--------------------------------------------------------------------------------------------*/
|
*--------------------------------------------------------------------------------------------*/
|
||||||
const ts = require("typescript");
|
var ts = require("typescript");
|
||||||
const lazy = require("lazy.js");
|
var lazy = require("lazy.js");
|
||||||
const event_stream_1 = require("event-stream");
|
var event_stream_1 = require("event-stream");
|
||||||
const File = require("vinyl");
|
var File = require("vinyl");
|
||||||
const sm = require("source-map");
|
var sm = require("source-map");
|
||||||
const path = require("path");
|
var assign = require("object-assign");
|
||||||
|
var path = require("path");
|
||||||
var CollectStepResult;
|
var CollectStepResult;
|
||||||
(function (CollectStepResult) {
|
(function (CollectStepResult) {
|
||||||
CollectStepResult[CollectStepResult["Yes"] = 0] = "Yes";
|
CollectStepResult[CollectStepResult["Yes"] = 0] = "Yes";
|
||||||
@@ -17,9 +18,9 @@ var CollectStepResult;
|
|||||||
CollectStepResult[CollectStepResult["NoAndRecurse"] = 3] = "NoAndRecurse";
|
CollectStepResult[CollectStepResult["NoAndRecurse"] = 3] = "NoAndRecurse";
|
||||||
})(CollectStepResult || (CollectStepResult = {}));
|
})(CollectStepResult || (CollectStepResult = {}));
|
||||||
function collect(node, fn) {
|
function collect(node, fn) {
|
||||||
const result = [];
|
var result = [];
|
||||||
function loop(node) {
|
function loop(node) {
|
||||||
const stepResult = fn(node);
|
var stepResult = fn(node);
|
||||||
if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) {
|
if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) {
|
||||||
result.push(node);
|
result.push(node);
|
||||||
}
|
}
|
||||||
@@ -31,45 +32,43 @@ function collect(node, fn) {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
function clone(object) {
|
function clone(object) {
|
||||||
const result = {};
|
var result = {};
|
||||||
for (const id in object) {
|
for (var id in object) {
|
||||||
result[id] = object[id];
|
result[id] = object[id];
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
function template(lines) {
|
function template(lines) {
|
||||||
let indent = '', wrap = '';
|
var indent = '', wrap = '';
|
||||||
if (lines.length > 1) {
|
if (lines.length > 1) {
|
||||||
indent = '\t';
|
indent = '\t';
|
||||||
wrap = '\n';
|
wrap = '\n';
|
||||||
}
|
}
|
||||||
return `/*---------------------------------------------------------
|
return "/*---------------------------------------------------------\n * Copyright (C) Microsoft Corporation. All rights reserved.\n *--------------------------------------------------------*/\ndefine([], [" + (wrap + lines.map(function (l) { return indent + l; }).join(',\n') + wrap) + "]);";
|
||||||
* Copyright (C) Microsoft Corporation. All rights reserved.
|
|
||||||
*--------------------------------------------------------*/
|
|
||||||
define([], [${wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
|
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
* Returns a stream containing the patched JavaScript and source maps.
|
* Returns a stream containing the patched JavaScript and source maps.
|
||||||
*/
|
*/
|
||||||
function nls() {
|
function nls() {
|
||||||
const input = event_stream_1.through();
|
var input = event_stream_1.through();
|
||||||
const output = input.pipe(event_stream_1.through(function (f) {
|
var output = input.pipe(event_stream_1.through(function (f) {
|
||||||
|
var _this = this;
|
||||||
if (!f.sourceMap) {
|
if (!f.sourceMap) {
|
||||||
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`));
|
return this.emit('error', new Error("File " + f.relative + " does not have sourcemaps."));
|
||||||
}
|
}
|
||||||
let source = f.sourceMap.sources[0];
|
var source = f.sourceMap.sources[0];
|
||||||
if (!source) {
|
if (!source) {
|
||||||
return this.emit('error', new Error(`File ${f.relative} does not have a source in the source map.`));
|
return this.emit('error', new Error("File " + f.relative + " does not have a source in the source map."));
|
||||||
}
|
}
|
||||||
const root = f.sourceMap.sourceRoot;
|
var root = f.sourceMap.sourceRoot;
|
||||||
if (root) {
|
if (root) {
|
||||||
source = path.join(root, source);
|
source = path.join(root, source);
|
||||||
}
|
}
|
||||||
const typescript = f.sourceMap.sourcesContent[0];
|
var typescript = f.sourceMap.sourcesContent[0];
|
||||||
if (!typescript) {
|
if (!typescript) {
|
||||||
return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`));
|
return this.emit('error', new Error("File " + f.relative + " does not have the original content in the source map."));
|
||||||
}
|
}
|
||||||
nls.patchFiles(f, typescript).forEach(f => this.emit('data', f));
|
nls.patchFiles(f, typescript).forEach(function (f) { return _this.emit('data', f); });
|
||||||
}));
|
}));
|
||||||
return event_stream_1.duplex(input, output);
|
return event_stream_1.duplex(input, output);
|
||||||
}
|
}
|
||||||
@@ -77,7 +76,8 @@ function isImportNode(node) {
|
|||||||
return node.kind === ts.SyntaxKind.ImportDeclaration || node.kind === ts.SyntaxKind.ImportEqualsDeclaration;
|
return node.kind === ts.SyntaxKind.ImportDeclaration || node.kind === ts.SyntaxKind.ImportEqualsDeclaration;
|
||||||
}
|
}
|
||||||
(function (nls_1) {
|
(function (nls_1) {
|
||||||
function fileFrom(file, contents, path = file.path) {
|
function fileFrom(file, contents, path) {
|
||||||
|
if (path === void 0) { path = file.path; }
|
||||||
return new File({
|
return new File({
|
||||||
contents: Buffer.from(contents),
|
contents: Buffer.from(contents),
|
||||||
base: file.base,
|
base: file.base,
|
||||||
@@ -87,27 +87,29 @@ function isImportNode(node) {
|
|||||||
}
|
}
|
||||||
nls_1.fileFrom = fileFrom;
|
nls_1.fileFrom = fileFrom;
|
||||||
function mappedPositionFrom(source, lc) {
|
function mappedPositionFrom(source, lc) {
|
||||||
return { source, line: lc.line + 1, column: lc.character };
|
return { source: source, line: lc.line + 1, column: lc.character };
|
||||||
}
|
}
|
||||||
nls_1.mappedPositionFrom = mappedPositionFrom;
|
nls_1.mappedPositionFrom = mappedPositionFrom;
|
||||||
function lcFrom(position) {
|
function lcFrom(position) {
|
||||||
return { line: position.line - 1, character: position.column };
|
return { line: position.line - 1, character: position.column };
|
||||||
}
|
}
|
||||||
nls_1.lcFrom = lcFrom;
|
nls_1.lcFrom = lcFrom;
|
||||||
class SingleFileServiceHost {
|
var SingleFileServiceHost = /** @class */ (function () {
|
||||||
constructor(options, filename, contents) {
|
function SingleFileServiceHost(options, filename, contents) {
|
||||||
|
var _this = this;
|
||||||
this.options = options;
|
this.options = options;
|
||||||
this.filename = filename;
|
this.filename = filename;
|
||||||
this.getCompilationSettings = () => this.options;
|
this.getCompilationSettings = function () { return _this.options; };
|
||||||
this.getScriptFileNames = () => [this.filename];
|
this.getScriptFileNames = function () { return [_this.filename]; };
|
||||||
this.getScriptVersion = () => '1';
|
this.getScriptVersion = function () { return '1'; };
|
||||||
this.getScriptSnapshot = (name) => name === this.filename ? this.file : this.lib;
|
this.getScriptSnapshot = function (name) { return name === _this.filename ? _this.file : _this.lib; };
|
||||||
this.getCurrentDirectory = () => '';
|
this.getCurrentDirectory = function () { return ''; };
|
||||||
this.getDefaultLibFileName = () => 'lib.d.ts';
|
this.getDefaultLibFileName = function () { return 'lib.d.ts'; };
|
||||||
this.file = ts.ScriptSnapshot.fromString(contents);
|
this.file = ts.ScriptSnapshot.fromString(contents);
|
||||||
this.lib = ts.ScriptSnapshot.fromString('');
|
this.lib = ts.ScriptSnapshot.fromString('');
|
||||||
}
|
}
|
||||||
}
|
return SingleFileServiceHost;
|
||||||
|
}());
|
||||||
nls_1.SingleFileServiceHost = SingleFileServiceHost;
|
nls_1.SingleFileServiceHost = SingleFileServiceHost;
|
||||||
function isCallExpressionWithinTextSpanCollectStep(textSpan, node) {
|
function isCallExpressionWithinTextSpanCollectStep(textSpan, node) {
|
||||||
if (!ts.textSpanContainsTextSpan({ start: node.pos, length: node.end - node.pos }, textSpan)) {
|
if (!ts.textSpanContainsTextSpan({ start: node.pos, length: node.end - node.pos }, textSpan)) {
|
||||||
@@ -115,96 +117,97 @@ function isImportNode(node) {
|
|||||||
}
|
}
|
||||||
return node.kind === ts.SyntaxKind.CallExpression ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse;
|
return node.kind === ts.SyntaxKind.CallExpression ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse;
|
||||||
}
|
}
|
||||||
function analyze(contents, options = {}) {
|
function analyze(contents, options) {
|
||||||
const filename = 'file.ts';
|
if (options === void 0) { options = {}; }
|
||||||
const serviceHost = new SingleFileServiceHost(Object.assign(clone(options), { noResolve: true }), filename, contents);
|
var filename = 'file.ts';
|
||||||
const service = ts.createLanguageService(serviceHost);
|
var serviceHost = new SingleFileServiceHost(assign(clone(options), { noResolve: true }), filename, contents);
|
||||||
const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
|
var service = ts.createLanguageService(serviceHost);
|
||||||
|
var sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
|
||||||
// all imports
|
// all imports
|
||||||
const imports = lazy(collect(sourceFile, n => isImportNode(n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse));
|
var imports = lazy(collect(sourceFile, function (n) { return isImportNode(n) ? CollectStepResult.YesAndRecurse : CollectStepResult.NoAndRecurse; }));
|
||||||
// import nls = require('vs/nls');
|
// import nls = require('vs/nls');
|
||||||
const importEqualsDeclarations = imports
|
var importEqualsDeclarations = imports
|
||||||
.filter(n => n.kind === ts.SyntaxKind.ImportEqualsDeclaration)
|
.filter(function (n) { return n.kind === ts.SyntaxKind.ImportEqualsDeclaration; })
|
||||||
.map(n => n)
|
.map(function (n) { return n; })
|
||||||
.filter(d => d.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference)
|
.filter(function (d) { return d.moduleReference.kind === ts.SyntaxKind.ExternalModuleReference; })
|
||||||
.filter(d => d.moduleReference.expression.getText() === '\'vs/nls\'');
|
.filter(function (d) { return d.moduleReference.expression.getText() === '\'vs/nls\''; });
|
||||||
// import ... from 'vs/nls';
|
// import ... from 'vs/nls';
|
||||||
const importDeclarations = imports
|
var importDeclarations = imports
|
||||||
.filter(n => n.kind === ts.SyntaxKind.ImportDeclaration)
|
.filter(function (n) { return n.kind === ts.SyntaxKind.ImportDeclaration; })
|
||||||
.map(n => n)
|
.map(function (n) { return n; })
|
||||||
.filter(d => d.moduleSpecifier.kind === ts.SyntaxKind.StringLiteral)
|
.filter(function (d) { return d.moduleSpecifier.kind === ts.SyntaxKind.StringLiteral; })
|
||||||
.filter(d => d.moduleSpecifier.getText() === '\'vs/nls\'')
|
.filter(function (d) { return d.moduleSpecifier.getText() === '\'vs/nls\''; })
|
||||||
.filter(d => !!d.importClause && !!d.importClause.namedBindings);
|
.filter(function (d) { return !!d.importClause && !!d.importClause.namedBindings; });
|
||||||
const nlsExpressions = importEqualsDeclarations
|
var nlsExpressions = importEqualsDeclarations
|
||||||
.map(d => d.moduleReference.expression)
|
.map(function (d) { return d.moduleReference.expression; })
|
||||||
.concat(importDeclarations.map(d => d.moduleSpecifier))
|
.concat(importDeclarations.map(function (d) { return d.moduleSpecifier; }))
|
||||||
.map(d => ({
|
.map(function (d) { return ({
|
||||||
start: ts.getLineAndCharacterOfPosition(sourceFile, d.getStart()),
|
start: ts.getLineAndCharacterOfPosition(sourceFile, d.getStart()),
|
||||||
end: ts.getLineAndCharacterOfPosition(sourceFile, d.getEnd())
|
end: ts.getLineAndCharacterOfPosition(sourceFile, d.getEnd())
|
||||||
}));
|
}); });
|
||||||
// `nls.localize(...)` calls
|
// `nls.localize(...)` calls
|
||||||
const nlsLocalizeCallExpressions = importDeclarations
|
var nlsLocalizeCallExpressions = importDeclarations
|
||||||
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport))
|
.filter(function (d) { return d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport; })
|
||||||
.map(d => d.importClause.namedBindings.name)
|
.map(function (d) { return d.importClause.namedBindings.name; })
|
||||||
.concat(importEqualsDeclarations.map(d => d.name))
|
.concat(importEqualsDeclarations.map(function (d) { return d.name; }))
|
||||||
// find read-only references to `nls`
|
// find read-only references to `nls`
|
||||||
.map(n => service.getReferencesAtPosition(filename, n.pos + 1))
|
.map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
|
||||||
.flatten()
|
.flatten()
|
||||||
.filter(r => !r.isWriteAccess)
|
.filter(function (r) { return !r.isWriteAccess; })
|
||||||
// find the deepest call expressions AST nodes that contain those references
|
// find the deepest call expressions AST nodes that contain those references
|
||||||
.map(r => collect(sourceFile, n => isCallExpressionWithinTextSpanCollectStep(r.textSpan, n)))
|
.map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
|
||||||
.map(a => lazy(a).last())
|
.map(function (a) { return lazy(a).last(); })
|
||||||
.filter(n => !!n)
|
.filter(function (n) { return !!n; })
|
||||||
.map(n => n)
|
.map(function (n) { return n; })
|
||||||
// only `localize` calls
|
// only `localize` calls
|
||||||
.filter(n => n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && n.expression.name.getText() === 'localize');
|
.filter(function (n) { return n.expression.kind === ts.SyntaxKind.PropertyAccessExpression && n.expression.name.getText() === 'localize'; });
|
||||||
// `localize` named imports
|
// `localize` named imports
|
||||||
const allLocalizeImportDeclarations = importDeclarations
|
var allLocalizeImportDeclarations = importDeclarations
|
||||||
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports))
|
.filter(function (d) { return d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports; })
|
||||||
.map(d => [].concat(d.importClause.namedBindings.elements))
|
.map(function (d) { return [].concat(d.importClause.namedBindings.elements); })
|
||||||
.flatten();
|
.flatten();
|
||||||
// `localize` read-only references
|
// `localize` read-only references
|
||||||
const localizeReferences = allLocalizeImportDeclarations
|
var localizeReferences = allLocalizeImportDeclarations
|
||||||
.filter(d => d.name.getText() === 'localize')
|
.filter(function (d) { return d.name.getText() === 'localize'; })
|
||||||
.map(n => service.getReferencesAtPosition(filename, n.pos + 1))
|
.map(function (n) { return service.getReferencesAtPosition(filename, n.pos + 1); })
|
||||||
.flatten()
|
.flatten()
|
||||||
.filter(r => !r.isWriteAccess);
|
.filter(function (r) { return !r.isWriteAccess; });
|
||||||
// custom named `localize` read-only references
|
// custom named `localize` read-only references
|
||||||
const namedLocalizeReferences = allLocalizeImportDeclarations
|
var namedLocalizeReferences = allLocalizeImportDeclarations
|
||||||
.filter(d => d.propertyName && d.propertyName.getText() === 'localize')
|
.filter(function (d) { return d.propertyName && d.propertyName.getText() === 'localize'; })
|
||||||
.map(n => service.getReferencesAtPosition(filename, n.name.pos + 1))
|
.map(function (n) { return service.getReferencesAtPosition(filename, n.name.pos + 1); })
|
||||||
.flatten()
|
.flatten()
|
||||||
.filter(r => !r.isWriteAccess);
|
.filter(function (r) { return !r.isWriteAccess; });
|
||||||
// find the deepest call expressions AST nodes that contain those references
|
// find the deepest call expressions AST nodes that contain those references
|
||||||
const localizeCallExpressions = localizeReferences
|
var localizeCallExpressions = localizeReferences
|
||||||
.concat(namedLocalizeReferences)
|
.concat(namedLocalizeReferences)
|
||||||
.map(r => collect(sourceFile, n => isCallExpressionWithinTextSpanCollectStep(r.textSpan, n)))
|
.map(function (r) { return collect(sourceFile, function (n) { return isCallExpressionWithinTextSpanCollectStep(r.textSpan, n); }); })
|
||||||
.map(a => lazy(a).last())
|
.map(function (a) { return lazy(a).last(); })
|
||||||
.filter(n => !!n)
|
.filter(function (n) { return !!n; })
|
||||||
.map(n => n);
|
.map(function (n) { return n; });
|
||||||
// collect everything
|
// collect everything
|
||||||
const localizeCalls = nlsLocalizeCallExpressions
|
var localizeCalls = nlsLocalizeCallExpressions
|
||||||
.concat(localizeCallExpressions)
|
.concat(localizeCallExpressions)
|
||||||
.map(e => e.arguments)
|
.map(function (e) { return e.arguments; })
|
||||||
.filter(a => a.length > 1)
|
.filter(function (a) { return a.length > 1; })
|
||||||
.sort((a, b) => a[0].getStart() - b[0].getStart())
|
.sort(function (a, b) { return a[0].getStart() - b[0].getStart(); })
|
||||||
.map(a => ({
|
.map(function (a) { return ({
|
||||||
keySpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getEnd()) },
|
keySpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[0].getEnd()) },
|
||||||
key: a[0].getText(),
|
key: a[0].getText(),
|
||||||
valueSpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getEnd()) },
|
valueSpan: { start: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getStart()), end: ts.getLineAndCharacterOfPosition(sourceFile, a[1].getEnd()) },
|
||||||
value: a[1].getText()
|
value: a[1].getText()
|
||||||
}));
|
}); });
|
||||||
return {
|
return {
|
||||||
localizeCalls: localizeCalls.toArray(),
|
localizeCalls: localizeCalls.toArray(),
|
||||||
nlsExpressions: nlsExpressions.toArray()
|
nlsExpressions: nlsExpressions.toArray()
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
nls_1.analyze = analyze;
|
nls_1.analyze = analyze;
|
||||||
class TextModel {
|
var TextModel = /** @class */ (function () {
|
||||||
constructor(contents) {
|
function TextModel(contents) {
|
||||||
const regex = /\r\n|\r|\n/g;
|
var regex = /\r\n|\r|\n/g;
|
||||||
let index = 0;
|
var index = 0;
|
||||||
let match;
|
var match;
|
||||||
this.lines = [];
|
this.lines = [];
|
||||||
this.lineEndings = [];
|
this.lineEndings = [];
|
||||||
while (match = regex.exec(contents)) {
|
while (match = regex.exec(contents)) {
|
||||||
@@ -217,80 +220,85 @@ function isImportNode(node) {
|
|||||||
this.lineEndings.push('');
|
this.lineEndings.push('');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
get(index) {
|
TextModel.prototype.get = function (index) {
|
||||||
return this.lines[index];
|
return this.lines[index];
|
||||||
}
|
};
|
||||||
set(index, line) {
|
TextModel.prototype.set = function (index, line) {
|
||||||
this.lines[index] = line;
|
this.lines[index] = line;
|
||||||
}
|
};
|
||||||
get lineCount() {
|
Object.defineProperty(TextModel.prototype, "lineCount", {
|
||||||
return this.lines.length;
|
get: function () {
|
||||||
}
|
return this.lines.length;
|
||||||
|
},
|
||||||
|
enumerable: true,
|
||||||
|
configurable: true
|
||||||
|
});
|
||||||
/**
|
/**
|
||||||
* Applies patch(es) to the model.
|
* Applies patch(es) to the model.
|
||||||
* Multiple patches must be ordered.
|
* Multiple patches must be ordered.
|
||||||
* Does not support patches spanning multiple lines.
|
* Does not support patches spanning multiple lines.
|
||||||
*/
|
*/
|
||||||
apply(patch) {
|
TextModel.prototype.apply = function (patch) {
|
||||||
const startLineNumber = patch.span.start.line;
|
var startLineNumber = patch.span.start.line;
|
||||||
const endLineNumber = patch.span.end.line;
|
var endLineNumber = patch.span.end.line;
|
||||||
const startLine = this.lines[startLineNumber] || '';
|
var startLine = this.lines[startLineNumber] || '';
|
||||||
const endLine = this.lines[endLineNumber] || '';
|
var endLine = this.lines[endLineNumber] || '';
|
||||||
this.lines[startLineNumber] = [
|
this.lines[startLineNumber] = [
|
||||||
startLine.substring(0, patch.span.start.character),
|
startLine.substring(0, patch.span.start.character),
|
||||||
patch.content,
|
patch.content,
|
||||||
endLine.substring(patch.span.end.character)
|
endLine.substring(patch.span.end.character)
|
||||||
].join('');
|
].join('');
|
||||||
for (let i = startLineNumber + 1; i <= endLineNumber; i++) {
|
for (var i = startLineNumber + 1; i <= endLineNumber; i++) {
|
||||||
this.lines[i] = '';
|
this.lines[i] = '';
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
toString() {
|
TextModel.prototype.toString = function () {
|
||||||
return lazy(this.lines).zip(this.lineEndings)
|
return lazy(this.lines).zip(this.lineEndings)
|
||||||
.flatten().toArray().join('');
|
.flatten().toArray().join('');
|
||||||
}
|
};
|
||||||
}
|
return TextModel;
|
||||||
|
}());
|
||||||
nls_1.TextModel = TextModel;
|
nls_1.TextModel = TextModel;
|
||||||
function patchJavascript(patches, contents, moduleId) {
|
function patchJavascript(patches, contents, moduleId) {
|
||||||
const model = new nls.TextModel(contents);
|
var model = new nls.TextModel(contents);
|
||||||
// patch the localize calls
|
// patch the localize calls
|
||||||
lazy(patches).reverse().each(p => model.apply(p));
|
lazy(patches).reverse().each(function (p) { return model.apply(p); });
|
||||||
// patch the 'vs/nls' imports
|
// patch the 'vs/nls' imports
|
||||||
const firstLine = model.get(0);
|
var firstLine = model.get(0);
|
||||||
const patchedFirstLine = firstLine.replace(/(['"])vs\/nls\1/g, `$1vs/nls!${moduleId}$1`);
|
var patchedFirstLine = firstLine.replace(/(['"])vs\/nls\1/g, "$1vs/nls!" + moduleId + "$1");
|
||||||
model.set(0, patchedFirstLine);
|
model.set(0, patchedFirstLine);
|
||||||
return model.toString();
|
return model.toString();
|
||||||
}
|
}
|
||||||
nls_1.patchJavascript = patchJavascript;
|
nls_1.patchJavascript = patchJavascript;
|
||||||
function patchSourcemap(patches, rsm, smc) {
|
function patchSourcemap(patches, rsm, smc) {
|
||||||
const smg = new sm.SourceMapGenerator({
|
var smg = new sm.SourceMapGenerator({
|
||||||
file: rsm.file,
|
file: rsm.file,
|
||||||
sourceRoot: rsm.sourceRoot
|
sourceRoot: rsm.sourceRoot
|
||||||
});
|
});
|
||||||
patches = patches.reverse();
|
patches = patches.reverse();
|
||||||
let currentLine = -1;
|
var currentLine = -1;
|
||||||
let currentLineDiff = 0;
|
var currentLineDiff = 0;
|
||||||
let source = null;
|
var source = null;
|
||||||
smc.eachMapping(m => {
|
smc.eachMapping(function (m) {
|
||||||
const patch = patches[patches.length - 1];
|
var patch = patches[patches.length - 1];
|
||||||
const original = { line: m.originalLine, column: m.originalColumn };
|
var original = { line: m.originalLine, column: m.originalColumn };
|
||||||
const generated = { line: m.generatedLine, column: m.generatedColumn };
|
var generated = { line: m.generatedLine, column: m.generatedColumn };
|
||||||
if (currentLine !== generated.line) {
|
if (currentLine !== generated.line) {
|
||||||
currentLineDiff = 0;
|
currentLineDiff = 0;
|
||||||
}
|
}
|
||||||
currentLine = generated.line;
|
currentLine = generated.line;
|
||||||
generated.column += currentLineDiff;
|
generated.column += currentLineDiff;
|
||||||
if (patch && m.generatedLine - 1 === patch.span.end.line && m.generatedColumn === patch.span.end.character) {
|
if (patch && m.generatedLine - 1 === patch.span.end.line && m.generatedColumn === patch.span.end.character) {
|
||||||
const originalLength = patch.span.end.character - patch.span.start.character;
|
var originalLength = patch.span.end.character - patch.span.start.character;
|
||||||
const modifiedLength = patch.content.length;
|
var modifiedLength = patch.content.length;
|
||||||
const lengthDiff = modifiedLength - originalLength;
|
var lengthDiff = modifiedLength - originalLength;
|
||||||
currentLineDiff += lengthDiff;
|
currentLineDiff += lengthDiff;
|
||||||
generated.column += lengthDiff;
|
generated.column += lengthDiff;
|
||||||
patches.pop();
|
patches.pop();
|
||||||
}
|
}
|
||||||
source = rsm.sourceRoot ? path.relative(rsm.sourceRoot, m.source) : m.source;
|
source = rsm.sourceRoot ? path.relative(rsm.sourceRoot, m.source) : m.source;
|
||||||
source = source.replace(/\\/g, '/');
|
source = source.replace(/\\/g, '/');
|
||||||
smg.addMapping({ source, name: m.name, original, generated });
|
smg.addMapping({ source: source, name: m.name, original: original, generated: generated });
|
||||||
}, null, sm.SourceMapConsumer.GENERATED_ORDER);
|
}, null, sm.SourceMapConsumer.GENERATED_ORDER);
|
||||||
if (source) {
|
if (source) {
|
||||||
smg.setSourceContent(source, smc.sourceContentFor(source));
|
smg.setSourceContent(source, smc.sourceContentFor(source));
|
||||||
@@ -299,47 +307,47 @@ function isImportNode(node) {
|
|||||||
}
|
}
|
||||||
nls_1.patchSourcemap = patchSourcemap;
|
nls_1.patchSourcemap = patchSourcemap;
|
||||||
function patch(moduleId, typescript, javascript, sourcemap) {
|
function patch(moduleId, typescript, javascript, sourcemap) {
|
||||||
const { localizeCalls, nlsExpressions } = analyze(typescript);
|
var _a = analyze(typescript), localizeCalls = _a.localizeCalls, nlsExpressions = _a.nlsExpressions;
|
||||||
if (localizeCalls.length === 0) {
|
if (localizeCalls.length === 0) {
|
||||||
return { javascript, sourcemap };
|
return { javascript: javascript, sourcemap: sourcemap };
|
||||||
}
|
}
|
||||||
const nlsKeys = template(localizeCalls.map(lc => lc.key));
|
var nlsKeys = template(localizeCalls.map(function (lc) { return lc.key; }));
|
||||||
const nls = template(localizeCalls.map(lc => lc.value));
|
var nls = template(localizeCalls.map(function (lc) { return lc.value; }));
|
||||||
const smc = new sm.SourceMapConsumer(sourcemap);
|
var smc = new sm.SourceMapConsumer(sourcemap);
|
||||||
const positionFrom = mappedPositionFrom.bind(null, sourcemap.sources[0]);
|
var positionFrom = mappedPositionFrom.bind(null, sourcemap.sources[0]);
|
||||||
let i = 0;
|
var i = 0;
|
||||||
// build patches
|
// build patches
|
||||||
const patches = lazy(localizeCalls)
|
var patches = lazy(localizeCalls)
|
||||||
.map(lc => ([
|
.map(function (lc) { return ([
|
||||||
{ range: lc.keySpan, content: '' + (i++) },
|
{ range: lc.keySpan, content: '' + (i++) },
|
||||||
{ range: lc.valueSpan, content: 'null' }
|
{ range: lc.valueSpan, content: 'null' }
|
||||||
]))
|
]); })
|
||||||
.flatten()
|
.flatten()
|
||||||
.map(c => {
|
.map(function (c) {
|
||||||
const start = lcFrom(smc.generatedPositionFor(positionFrom(c.range.start)));
|
var start = lcFrom(smc.generatedPositionFor(positionFrom(c.range.start)));
|
||||||
const end = lcFrom(smc.generatedPositionFor(positionFrom(c.range.end)));
|
var end = lcFrom(smc.generatedPositionFor(positionFrom(c.range.end)));
|
||||||
return { span: { start, end }, content: c.content };
|
return { span: { start: start, end: end }, content: c.content };
|
||||||
})
|
})
|
||||||
.toArray();
|
.toArray();
|
||||||
javascript = patchJavascript(patches, javascript, moduleId);
|
javascript = patchJavascript(patches, javascript, moduleId);
|
||||||
// since imports are not within the sourcemap information,
|
// since imports are not within the sourcemap information,
|
||||||
// we must do this MacGyver style
|
// we must do this MacGyver style
|
||||||
if (nlsExpressions.length) {
|
if (nlsExpressions.length) {
|
||||||
javascript = javascript.replace(/^define\(.*$/m, line => {
|
javascript = javascript.replace(/^define\(.*$/m, function (line) {
|
||||||
return line.replace(/(['"])vs\/nls\1/g, `$1vs/nls!${moduleId}$1`);
|
return line.replace(/(['"])vs\/nls\1/g, "$1vs/nls!" + moduleId + "$1");
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
sourcemap = patchSourcemap(patches, sourcemap, smc);
|
sourcemap = patchSourcemap(patches, sourcemap, smc);
|
||||||
return { javascript, sourcemap, nlsKeys, nls };
|
return { javascript: javascript, sourcemap: sourcemap, nlsKeys: nlsKeys, nls: nls };
|
||||||
}
|
}
|
||||||
nls_1.patch = patch;
|
nls_1.patch = patch;
|
||||||
function patchFiles(javascriptFile, typescript) {
|
function patchFiles(javascriptFile, typescript) {
|
||||||
// hack?
|
// hack?
|
||||||
const moduleId = javascriptFile.relative
|
var moduleId = javascriptFile.relative
|
||||||
.replace(/\.js$/, '')
|
.replace(/\.js$/, '')
|
||||||
.replace(/\\/g, '/');
|
.replace(/\\/g, '/');
|
||||||
const { javascript, sourcemap, nlsKeys, nls } = patch(moduleId, typescript, javascriptFile.contents.toString(), javascriptFile.sourceMap);
|
var _a = patch(moduleId, typescript, javascriptFile.contents.toString(), javascriptFile.sourceMap), javascript = _a.javascript, sourcemap = _a.sourcemap, nlsKeys = _a.nlsKeys, nls = _a.nls;
|
||||||
const result = [fileFrom(javascriptFile, javascript)];
|
var result = [fileFrom(javascriptFile, javascript)];
|
||||||
result[0].sourceMap = sourcemap;
|
result[0].sourceMap = sourcemap;
|
||||||
if (nlsKeys) {
|
if (nlsKeys) {
|
||||||
result.push(fileFrom(javascriptFile, nlsKeys, javascriptFile.path.replace(/\.js$/, '.nls.keys.js')));
|
result.push(fileFrom(javascriptFile, nlsKeys, javascriptFile.path.replace(/\.js$/, '.nls.keys.js')));
|
||||||
|
|||||||
@@ -6,9 +6,10 @@
|
|||||||
import * as ts from 'typescript';
|
import * as ts from 'typescript';
|
||||||
import * as lazy from 'lazy.js';
|
import * as lazy from 'lazy.js';
|
||||||
import { duplex, through } from 'event-stream';
|
import { duplex, through } from 'event-stream';
|
||||||
import * as File from 'vinyl';
|
import File = require('vinyl');
|
||||||
import * as sm from 'source-map';
|
import * as sm from 'source-map';
|
||||||
import * as path from 'path';
|
import assign = require('object-assign');
|
||||||
|
import path = require('path');
|
||||||
|
|
||||||
declare class FileSourceMap extends File {
|
declare class FileSourceMap extends File {
|
||||||
public sourceMap: sm.RawSourceMap;
|
public sourceMap: sm.RawSourceMap;
|
||||||
@@ -25,7 +26,7 @@ function collect(node: ts.Node, fn: (node: ts.Node) => CollectStepResult): ts.No
|
|||||||
const result: ts.Node[] = [];
|
const result: ts.Node[] = [];
|
||||||
|
|
||||||
function loop(node: ts.Node) {
|
function loop(node: ts.Node) {
|
||||||
const stepResult = fn(node);
|
var stepResult = fn(node);
|
||||||
|
|
||||||
if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) {
|
if (stepResult === CollectStepResult.Yes || stepResult === CollectStepResult.YesAndRecurse) {
|
||||||
result.push(node);
|
result.push(node);
|
||||||
@@ -41,8 +42,8 @@ function collect(node: ts.Node, fn: (node: ts.Node) => CollectStepResult): ts.No
|
|||||||
}
|
}
|
||||||
|
|
||||||
function clone<T>(object: T): T {
|
function clone<T>(object: T): T {
|
||||||
const result = <T>{};
|
var result = <T>{};
|
||||||
for (const id in object) {
|
for (var id in object) {
|
||||||
result[id] = object[id];
|
result[id] = object[id];
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
@@ -66,8 +67,8 @@ define([], [${ wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
|
|||||||
* Returns a stream containing the patched JavaScript and source maps.
|
* Returns a stream containing the patched JavaScript and source maps.
|
||||||
*/
|
*/
|
||||||
function nls(): NodeJS.ReadWriteStream {
|
function nls(): NodeJS.ReadWriteStream {
|
||||||
const input = through();
|
var input = through();
|
||||||
const output = input.pipe(through(function (f: FileSourceMap) {
|
var output = input.pipe(through(function (f: FileSourceMap) {
|
||||||
if (!f.sourceMap) {
|
if (!f.sourceMap) {
|
||||||
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`));
|
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`));
|
||||||
}
|
}
|
||||||
@@ -82,7 +83,7 @@ function nls(): NodeJS.ReadWriteStream {
|
|||||||
source = path.join(root, source);
|
source = path.join(root, source);
|
||||||
}
|
}
|
||||||
|
|
||||||
const typescript = f.sourceMap.sourcesContent![0];
|
const typescript = f.sourceMap.sourcesContent[0];
|
||||||
if (!typescript) {
|
if (!typescript) {
|
||||||
return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`));
|
return this.emit('error', new Error(`File ${f.relative} does not have the original content in the source map.`));
|
||||||
}
|
}
|
||||||
@@ -173,7 +174,7 @@ module nls {
|
|||||||
|
|
||||||
export function analyze(contents: string, options: ts.CompilerOptions = {}): ILocalizeAnalysisResult {
|
export function analyze(contents: string, options: ts.CompilerOptions = {}): ILocalizeAnalysisResult {
|
||||||
const filename = 'file.ts';
|
const filename = 'file.ts';
|
||||||
const serviceHost = new SingleFileServiceHost(Object.assign(clone(options), { noResolve: true }), filename, contents);
|
const serviceHost = new SingleFileServiceHost(assign(clone(options), { noResolve: true }), filename, contents);
|
||||||
const service = ts.createLanguageService(serviceHost);
|
const service = ts.createLanguageService(serviceHost);
|
||||||
const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
|
const sourceFile = ts.createSourceFile(filename, contents, ts.ScriptTarget.ES5, true);
|
||||||
|
|
||||||
@@ -205,8 +206,8 @@ module nls {
|
|||||||
|
|
||||||
// `nls.localize(...)` calls
|
// `nls.localize(...)` calls
|
||||||
const nlsLocalizeCallExpressions = importDeclarations
|
const nlsLocalizeCallExpressions = importDeclarations
|
||||||
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport))
|
.filter(d => d.importClause.namedBindings.kind === ts.SyntaxKind.NamespaceImport)
|
||||||
.map(d => (<ts.NamespaceImport>d.importClause!.namedBindings).name)
|
.map(d => (<ts.NamespaceImport>d.importClause.namedBindings).name)
|
||||||
.concat(importEqualsDeclarations.map(d => d.name))
|
.concat(importEqualsDeclarations.map(d => d.name))
|
||||||
|
|
||||||
// find read-only references to `nls`
|
// find read-only references to `nls`
|
||||||
@@ -225,8 +226,8 @@ module nls {
|
|||||||
|
|
||||||
// `localize` named imports
|
// `localize` named imports
|
||||||
const allLocalizeImportDeclarations = importDeclarations
|
const allLocalizeImportDeclarations = importDeclarations
|
||||||
.filter(d => !!(d.importClause && d.importClause.namedBindings && d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports))
|
.filter(d => d.importClause.namedBindings.kind === ts.SyntaxKind.NamedImports)
|
||||||
.map(d => ([] as any[]).concat((<ts.NamedImports>d.importClause!.namedBindings!).elements))
|
.map(d => [].concat((<ts.NamedImports>d.importClause.namedBindings).elements))
|
||||||
.flatten();
|
.flatten();
|
||||||
|
|
||||||
// `localize` read-only references
|
// `localize` read-only references
|
||||||
@@ -278,7 +279,7 @@ module nls {
|
|||||||
constructor(contents: string) {
|
constructor(contents: string) {
|
||||||
const regex = /\r\n|\r|\n/g;
|
const regex = /\r\n|\r|\n/g;
|
||||||
let index = 0;
|
let index = 0;
|
||||||
let match: RegExpExecArray | null;
|
let match: RegExpExecArray;
|
||||||
|
|
||||||
this.lines = [];
|
this.lines = [];
|
||||||
this.lineEndings = [];
|
this.lineEndings = [];
|
||||||
@@ -359,7 +360,7 @@ module nls {
|
|||||||
patches = patches.reverse();
|
patches = patches.reverse();
|
||||||
let currentLine = -1;
|
let currentLine = -1;
|
||||||
let currentLineDiff = 0;
|
let currentLineDiff = 0;
|
||||||
let source: string | null = null;
|
let source = null;
|
||||||
|
|
||||||
smc.eachMapping(m => {
|
smc.eachMapping(m => {
|
||||||
const patch = patches[patches.length - 1];
|
const patch = patches[patches.length - 1];
|
||||||
|
|||||||
@@ -4,32 +4,29 @@
|
|||||||
*--------------------------------------------------------------------------------------------*/
|
*--------------------------------------------------------------------------------------------*/
|
||||||
'use strict';
|
'use strict';
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const es = require("event-stream");
|
var path = require("path");
|
||||||
const gulp = require("gulp");
|
var gulp = require("gulp");
|
||||||
const concat = require("gulp-concat");
|
var sourcemaps = require("gulp-sourcemaps");
|
||||||
const minifyCSS = require("gulp-cssnano");
|
var filter = require("gulp-filter");
|
||||||
const filter = require("gulp-filter");
|
var minifyCSS = require("gulp-cssnano");
|
||||||
const flatmap = require("gulp-flatmap");
|
var uglify = require("gulp-uglify");
|
||||||
const sourcemaps = require("gulp-sourcemaps");
|
var composer = require("gulp-uglify/composer");
|
||||||
const uglify = require("gulp-uglify");
|
var uglifyes = require("uglify-es");
|
||||||
const composer = require("gulp-uglify/composer");
|
var es = require("event-stream");
|
||||||
const fancyLog = require("fancy-log");
|
var concat = require("gulp-concat");
|
||||||
const ansiColors = require("ansi-colors");
|
var VinylFile = require("vinyl");
|
||||||
const path = require("path");
|
var bundle = require("./bundle");
|
||||||
const pump = require("pump");
|
var util = require("./util");
|
||||||
const uglifyes = require("uglify-es");
|
var gulpUtil = require("gulp-util");
|
||||||
const VinylFile = require("vinyl");
|
var flatmap = require("gulp-flatmap");
|
||||||
const bundle = require("./bundle");
|
var pump = require("pump");
|
||||||
const i18n_1 = require("./i18n");
|
var REPO_ROOT_PATH = path.join(__dirname, '../..');
|
||||||
const stats_1 = require("./stats");
|
|
||||||
const util = require("./util");
|
|
||||||
const REPO_ROOT_PATH = path.join(__dirname, '../..');
|
|
||||||
function log(prefix, message) {
|
function log(prefix, message) {
|
||||||
fancyLog(ansiColors.cyan('[' + prefix + ']'), message);
|
gulpUtil.log(gulpUtil.colors.cyan('[' + prefix + ']'), message);
|
||||||
}
|
}
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
function loaderConfig(emptyPaths) {
|
function loaderConfig(emptyPaths) {
|
||||||
const result = {
|
var result = {
|
||||||
paths: {
|
paths: {
|
||||||
'vs': 'out-build/vs',
|
'vs': 'out-build/vs',
|
||||||
'sql': 'out-build/sql',
|
'sql': 'out-build/sql',
|
||||||
@@ -41,26 +38,26 @@ function loaderConfig(emptyPaths) {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
exports.loaderConfig = loaderConfig;
|
exports.loaderConfig = loaderConfig;
|
||||||
const IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i;
|
var IS_OUR_COPYRIGHT_REGEXP = /Copyright \(C\) Microsoft Corporation/i;
|
||||||
function loader(src, bundledFileHeader, bundleLoader) {
|
function loader(src, bundledFileHeader, bundleLoader) {
|
||||||
let sources = [
|
var sources = [
|
||||||
`${src}/vs/loader.js`
|
src + "/vs/loader.js"
|
||||||
];
|
];
|
||||||
if (bundleLoader) {
|
if (bundleLoader) {
|
||||||
sources = sources.concat([
|
sources = sources.concat([
|
||||||
`${src}/vs/css.js`,
|
src + "/vs/css.js",
|
||||||
`${src}/vs/nls.js`
|
src + "/vs/nls.js"
|
||||||
]);
|
]);
|
||||||
}
|
}
|
||||||
let isFirst = true;
|
var isFirst = true;
|
||||||
return (gulp
|
return (gulp
|
||||||
.src(sources, { base: `${src}` })
|
.src(sources, { base: "" + src })
|
||||||
.pipe(es.through(function (data) {
|
.pipe(es.through(function (data) {
|
||||||
if (isFirst) {
|
if (isFirst) {
|
||||||
isFirst = false;
|
isFirst = false;
|
||||||
this.emit('data', new VinylFile({
|
this.emit('data', new VinylFile({
|
||||||
path: 'fake',
|
path: 'fake',
|
||||||
base: undefined,
|
base: '',
|
||||||
contents: Buffer.from(bundledFileHeader)
|
contents: Buffer.from(bundledFileHeader)
|
||||||
}));
|
}));
|
||||||
this.emit('data', data);
|
this.emit('data', data);
|
||||||
@@ -77,12 +74,12 @@ function loader(src, bundledFileHeader, bundleLoader) {
|
|||||||
})));
|
})));
|
||||||
}
|
}
|
||||||
function toConcatStream(src, bundledFileHeader, sources, dest) {
|
function toConcatStream(src, bundledFileHeader, sources, dest) {
|
||||||
const useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
|
var useSourcemaps = /\.js$/.test(dest) && !/\.nls\.js$/.test(dest);
|
||||||
// If a bundle ends up including in any of the sources our copyright, then
|
// If a bundle ends up including in any of the sources our copyright, then
|
||||||
// insert a fake source at the beginning of each bundle with our copyright
|
// insert a fake source at the beginning of each bundle with our copyright
|
||||||
let containsOurCopyright = false;
|
var containsOurCopyright = false;
|
||||||
for (let i = 0, len = sources.length; i < len; i++) {
|
for (var i = 0, len = sources.length; i < len; i++) {
|
||||||
const fileContents = sources[i].contents;
|
var fileContents = sources[i].contents;
|
||||||
if (IS_OUR_COPYRIGHT_REGEXP.test(fileContents)) {
|
if (IS_OUR_COPYRIGHT_REGEXP.test(fileContents)) {
|
||||||
containsOurCopyright = true;
|
containsOurCopyright = true;
|
||||||
break;
|
break;
|
||||||
@@ -94,9 +91,9 @@ function toConcatStream(src, bundledFileHeader, sources, dest) {
|
|||||||
contents: bundledFileHeader
|
contents: bundledFileHeader
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
const treatedSources = sources.map(function (source) {
|
var treatedSources = sources.map(function (source) {
|
||||||
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
|
var root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
|
||||||
const base = source.path ? root + `/${src}` : undefined;
|
var base = source.path ? root + ("/" + src) : '';
|
||||||
return new VinylFile({
|
return new VinylFile({
|
||||||
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
|
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
|
||||||
base: base,
|
base: base,
|
||||||
@@ -105,8 +102,7 @@ function toConcatStream(src, bundledFileHeader, sources, dest) {
|
|||||||
});
|
});
|
||||||
return es.readArray(treatedSources)
|
return es.readArray(treatedSources)
|
||||||
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
|
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
|
||||||
.pipe(concat(dest))
|
.pipe(concat(dest));
|
||||||
.pipe(stats_1.createStatsStream(dest));
|
|
||||||
}
|
}
|
||||||
function toBundleStream(src, bundledFileHeader, bundles) {
|
function toBundleStream(src, bundledFileHeader, bundles) {
|
||||||
return es.merge(bundles.map(function (bundle) {
|
return es.merge(bundles.map(function (bundle) {
|
||||||
@@ -114,32 +110,33 @@ function toBundleStream(src, bundledFileHeader, bundles) {
|
|||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
function optimizeTask(opts) {
|
function optimizeTask(opts) {
|
||||||
const src = opts.src;
|
var src = opts.src;
|
||||||
const entryPoints = opts.entryPoints;
|
var entryPoints = opts.entryPoints;
|
||||||
const resources = opts.resources;
|
var otherSources = opts.otherSources;
|
||||||
const loaderConfig = opts.loaderConfig;
|
var resources = opts.resources;
|
||||||
const bundledFileHeader = opts.header;
|
var loaderConfig = opts.loaderConfig;
|
||||||
const bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader);
|
var bundledFileHeader = opts.header;
|
||||||
const out = opts.out;
|
var bundleLoader = (typeof opts.bundleLoader === 'undefined' ? true : opts.bundleLoader);
|
||||||
|
var out = opts.out;
|
||||||
return function () {
|
return function () {
|
||||||
const bundlesStream = es.through(); // this stream will contain the bundled files
|
var bundlesStream = es.through(); // this stream will contain the bundled files
|
||||||
const resourcesStream = es.through(); // this stream will contain the resources
|
var resourcesStream = es.through(); // this stream will contain the resources
|
||||||
const bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
|
var bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
|
||||||
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
|
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
|
||||||
if (err || !result) {
|
if (err) {
|
||||||
return bundlesStream.emit('error', JSON.stringify(err));
|
return bundlesStream.emit('error', JSON.stringify(err));
|
||||||
}
|
}
|
||||||
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
|
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
|
||||||
// Remove css inlined resources
|
// Remove css inlined resources
|
||||||
const filteredResources = resources.slice();
|
var filteredResources = resources.slice();
|
||||||
result.cssInlinedResources.forEach(function (resource) {
|
result.cssInlinedResources.forEach(function (resource) {
|
||||||
if (process.env['VSCODE_BUILD_VERBOSE']) {
|
if (process.env['VSCODE_BUILD_VERBOSE']) {
|
||||||
log('optimizer', 'excluding inlined: ' + resource);
|
log('optimizer', 'excluding inlined: ' + resource);
|
||||||
}
|
}
|
||||||
filteredResources.push('!' + resource);
|
filteredResources.push('!' + resource);
|
||||||
});
|
});
|
||||||
gulp.src(filteredResources, { base: `${src}`, allowEmpty: true }).pipe(resourcesStream);
|
gulp.src(filteredResources, { base: "" + src }).pipe(resourcesStream);
|
||||||
const bundleInfoArray = [];
|
var bundleInfoArray = [];
|
||||||
if (opts.bundleInfo) {
|
if (opts.bundleInfo) {
|
||||||
bundleInfoArray.push(new VinylFile({
|
bundleInfoArray.push(new VinylFile({
|
||||||
path: 'bundleInfo.json',
|
path: 'bundleInfo.json',
|
||||||
@@ -149,17 +146,26 @@ function optimizeTask(opts) {
|
|||||||
}
|
}
|
||||||
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
|
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
|
||||||
});
|
});
|
||||||
const result = es.merge(loader(src, bundledFileHeader, bundleLoader), bundlesStream, resourcesStream, bundleInfoStream);
|
var otherSourcesStream = es.through();
|
||||||
|
var otherSourcesStreamArr = [];
|
||||||
|
gulp.src(otherSources, { base: "" + src })
|
||||||
|
.pipe(es.through(function (data) {
|
||||||
|
otherSourcesStreamArr.push(toConcatStream(src, bundledFileHeader, [data], data.relative));
|
||||||
|
}, function () {
|
||||||
|
if (!otherSourcesStreamArr.length) {
|
||||||
|
setTimeout(function () { otherSourcesStream.emit('end'); }, 0);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
es.merge(otherSourcesStreamArr).pipe(otherSourcesStream);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
var result = es.merge(loader(src, bundledFileHeader, bundleLoader), bundlesStream, otherSourcesStream, resourcesStream, bundleInfoStream);
|
||||||
return result
|
return result
|
||||||
.pipe(sourcemaps.write('./', {
|
.pipe(sourcemaps.write('./', {
|
||||||
sourceRoot: undefined,
|
sourceRoot: null,
|
||||||
addComment: true,
|
addComment: true,
|
||||||
includeContent: true
|
includeContent: true
|
||||||
}))
|
}))
|
||||||
.pipe(opts.languages && opts.languages.length ? i18n_1.processNlsFiles({
|
|
||||||
fileHeader: bundledFileHeader,
|
|
||||||
languages: opts.languages
|
|
||||||
}) : es.through())
|
|
||||||
.pipe(gulp.dest(out));
|
.pipe(gulp.dest(out));
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -169,14 +175,14 @@ exports.optimizeTask = optimizeTask;
|
|||||||
* to have a file "context" to include our copyright only once per file.
|
* to have a file "context" to include our copyright only once per file.
|
||||||
*/
|
*/
|
||||||
function uglifyWithCopyrights() {
|
function uglifyWithCopyrights() {
|
||||||
const preserveComments = (f) => {
|
var preserveComments = function (f) {
|
||||||
return (_node, comment) => {
|
return function (node, comment) {
|
||||||
const text = comment.value;
|
var text = comment.value;
|
||||||
const type = comment.type;
|
var type = comment.type;
|
||||||
if (/@minifier_do_not_preserve/.test(text)) {
|
if (/@minifier_do_not_preserve/.test(text)) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
const isOurCopyright = IS_OUR_COPYRIGHT_REGEXP.test(text);
|
var isOurCopyright = IS_OUR_COPYRIGHT_REGEXP.test(text);
|
||||||
if (isOurCopyright) {
|
if (isOurCopyright) {
|
||||||
if (f.__hasOurCopyright) {
|
if (f.__hasOurCopyright) {
|
||||||
return false;
|
return false;
|
||||||
@@ -194,10 +200,10 @@ function uglifyWithCopyrights() {
|
|||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
const minify = composer(uglifyes);
|
var minify = composer(uglifyes);
|
||||||
const input = es.through();
|
var input = es.through();
|
||||||
const output = input
|
var output = input
|
||||||
.pipe(flatmap((stream, f) => {
|
.pipe(flatmap(function (stream, f) {
|
||||||
return stream.pipe(minify({
|
return stream.pipe(minify({
|
||||||
output: {
|
output: {
|
||||||
comments: preserveComments(f),
|
comments: preserveComments(f),
|
||||||
@@ -208,23 +214,18 @@ function uglifyWithCopyrights() {
|
|||||||
return es.duplex(input, output);
|
return es.duplex(input, output);
|
||||||
}
|
}
|
||||||
function minifyTask(src, sourceMapBaseUrl) {
|
function minifyTask(src, sourceMapBaseUrl) {
|
||||||
const sourceMappingURL = sourceMapBaseUrl ? ((f) => `${sourceMapBaseUrl}/${f.relative}.map`) : undefined;
|
var sourceMappingURL = sourceMapBaseUrl && (function (f) { return sourceMapBaseUrl + "/" + f.relative + ".map"; });
|
||||||
return cb => {
|
return function (cb) {
|
||||||
const jsFilter = filter('**/*.js', { restore: true });
|
var jsFilter = filter('**/*.js', { restore: true });
|
||||||
const cssFilter = filter('**/*.css', { restore: true });
|
var cssFilter = filter('**/*.css', { restore: true });
|
||||||
pump(gulp.src([src + '/**', '!' + src + '/**/*.map']), jsFilter, sourcemaps.init({ loadMaps: true }), uglifyWithCopyrights(), jsFilter.restore, cssFilter, minifyCSS({ reduceIdents: false }), cssFilter.restore, sourcemaps.mapSources((sourcePath) => {
|
pump(gulp.src([src + '/**', '!' + src + '/**/*.map']), jsFilter, sourcemaps.init({ loadMaps: true }), uglifyWithCopyrights(), jsFilter.restore, cssFilter, minifyCSS({ reduceIdents: false }), cssFilter.restore, sourcemaps.write('./', {
|
||||||
if (sourcePath === 'bootstrap-fork.js') {
|
sourceMappingURL: sourceMappingURL,
|
||||||
return 'bootstrap-fork.orig.js';
|
sourceRoot: null,
|
||||||
}
|
|
||||||
return sourcePath;
|
|
||||||
}), sourcemaps.write('./', {
|
|
||||||
sourceMappingURL,
|
|
||||||
sourceRoot: undefined,
|
|
||||||
includeContent: true,
|
includeContent: true,
|
||||||
addComment: true
|
addComment: true
|
||||||
}), gulp.dest(src + '-min'), (err) => {
|
}), gulp.dest(src + '-min'), function (err) {
|
||||||
if (err instanceof uglify.GulpUglifyError) {
|
if (err instanceof uglify.GulpUglifyError) {
|
||||||
console.error(`Uglify error in '${err.cause && err.cause.filename}'`);
|
console.error("Uglify error in '" + (err.cause && err.cause.filename) + "'");
|
||||||
}
|
}
|
||||||
cb(err);
|
cb(err);
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -5,36 +5,34 @@
|
|||||||
|
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
import * as es from 'event-stream';
|
import * as path from 'path';
|
||||||
import * as gulp from 'gulp';
|
import * as gulp from 'gulp';
|
||||||
import * as concat from 'gulp-concat';
|
|
||||||
import * as minifyCSS from 'gulp-cssnano';
|
|
||||||
import * as filter from 'gulp-filter';
|
|
||||||
import * as flatmap from 'gulp-flatmap';
|
|
||||||
import * as sourcemaps from 'gulp-sourcemaps';
|
import * as sourcemaps from 'gulp-sourcemaps';
|
||||||
|
import * as filter from 'gulp-filter';
|
||||||
|
import * as minifyCSS from 'gulp-cssnano';
|
||||||
import * as uglify from 'gulp-uglify';
|
import * as uglify from 'gulp-uglify';
|
||||||
import * as composer from 'gulp-uglify/composer';
|
import * as composer from 'gulp-uglify/composer';
|
||||||
import * as fancyLog from 'fancy-log';
|
|
||||||
import * as ansiColors from 'ansi-colors';
|
|
||||||
import * as path from 'path';
|
|
||||||
import * as pump from 'pump';
|
|
||||||
import * as sm from 'source-map';
|
|
||||||
import * as uglifyes from 'uglify-es';
|
import * as uglifyes from 'uglify-es';
|
||||||
|
import * as es from 'event-stream';
|
||||||
|
import * as concat from 'gulp-concat';
|
||||||
import * as VinylFile from 'vinyl';
|
import * as VinylFile from 'vinyl';
|
||||||
import * as bundle from './bundle';
|
import * as bundle from './bundle';
|
||||||
import { Language, processNlsFiles } from './i18n';
|
|
||||||
import { createStatsStream } from './stats';
|
|
||||||
import * as util from './util';
|
import * as util from './util';
|
||||||
|
import * as gulpUtil from 'gulp-util';
|
||||||
|
import * as flatmap from 'gulp-flatmap';
|
||||||
|
import * as pump from 'pump';
|
||||||
|
import * as sm from 'source-map';
|
||||||
|
import { Language } from './i18n';
|
||||||
|
|
||||||
const REPO_ROOT_PATH = path.join(__dirname, '../..');
|
const REPO_ROOT_PATH = path.join(__dirname, '../..');
|
||||||
|
|
||||||
function log(prefix: string, message: string): void {
|
function log(prefix: string, message: string): void {
|
||||||
fancyLog(ansiColors.cyan('[' + prefix + ']'), message);
|
gulpUtil.log(gulpUtil.colors.cyan('[' + prefix + ']'), message);
|
||||||
}
|
}
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
export function loaderConfig(emptyPaths?: string[]) {
|
export function loaderConfig(emptyPaths?: string[]) {
|
||||||
const result: any = {
|
const result = {
|
||||||
paths: {
|
paths: {
|
||||||
'vs': 'out-build/vs',
|
'vs': 'out-build/vs',
|
||||||
'sql': 'out-build/sql',
|
'sql': 'out-build/sql',
|
||||||
@@ -74,7 +72,7 @@ function loader(src: string, bundledFileHeader: string, bundleLoader: boolean):
|
|||||||
isFirst = false;
|
isFirst = false;
|
||||||
this.emit('data', new VinylFile({
|
this.emit('data', new VinylFile({
|
||||||
path: 'fake',
|
path: 'fake',
|
||||||
base: undefined,
|
base: '',
|
||||||
contents: Buffer.from(bundledFileHeader)
|
contents: Buffer.from(bundledFileHeader)
|
||||||
}));
|
}));
|
||||||
this.emit('data', data);
|
this.emit('data', data);
|
||||||
@@ -114,7 +112,7 @@ function toConcatStream(src: string, bundledFileHeader: string, sources: bundle.
|
|||||||
|
|
||||||
const treatedSources = sources.map(function (source) {
|
const treatedSources = sources.map(function (source) {
|
||||||
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
|
const root = source.path ? REPO_ROOT_PATH.replace(/\\/g, '/') : '';
|
||||||
const base = source.path ? root + `/${src}` : undefined;
|
const base = source.path ? root + `/${src}` : '';
|
||||||
|
|
||||||
return new VinylFile({
|
return new VinylFile({
|
||||||
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
|
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
|
||||||
@@ -125,11 +123,10 @@ function toConcatStream(src: string, bundledFileHeader: string, sources: bundle.
|
|||||||
|
|
||||||
return es.readArray(treatedSources)
|
return es.readArray(treatedSources)
|
||||||
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
|
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
|
||||||
.pipe(concat(dest))
|
.pipe(concat(dest));
|
||||||
.pipe(createStatsStream(dest));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
function toBundleStream(src: string, bundledFileHeader: string, bundles: bundle.IConcatFile[]): NodeJS.ReadWriteStream {
|
function toBundleStream(src:string, bundledFileHeader: string, bundles: bundle.IConcatFile[]): NodeJS.ReadWriteStream {
|
||||||
return es.merge(bundles.map(function (bundle) {
|
return es.merge(bundles.map(function (bundle) {
|
||||||
return toConcatStream(src, bundledFileHeader, bundle.sources, bundle.dest);
|
return toConcatStream(src, bundledFileHeader, bundle.sources, bundle.dest);
|
||||||
}));
|
}));
|
||||||
@@ -144,6 +141,10 @@ export interface IOptimizeTaskOpts {
|
|||||||
* (for AMD files, will get bundled and get Copyright treatment)
|
* (for AMD files, will get bundled and get Copyright treatment)
|
||||||
*/
|
*/
|
||||||
entryPoints: bundle.IEntryPoint[];
|
entryPoints: bundle.IEntryPoint[];
|
||||||
|
/**
|
||||||
|
* (for non-AMD files that should get Copyright treatment)
|
||||||
|
*/
|
||||||
|
otherSources: string[];
|
||||||
/**
|
/**
|
||||||
* (svg, etc.)
|
* (svg, etc.)
|
||||||
*/
|
*/
|
||||||
@@ -174,6 +175,7 @@ export interface IOptimizeTaskOpts {
|
|||||||
export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStream {
|
export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStream {
|
||||||
const src = opts.src;
|
const src = opts.src;
|
||||||
const entryPoints = opts.entryPoints;
|
const entryPoints = opts.entryPoints;
|
||||||
|
const otherSources = opts.otherSources;
|
||||||
const resources = opts.resources;
|
const resources = opts.resources;
|
||||||
const loaderConfig = opts.loaderConfig;
|
const loaderConfig = opts.loaderConfig;
|
||||||
const bundledFileHeader = opts.header;
|
const bundledFileHeader = opts.header;
|
||||||
@@ -186,7 +188,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
|
|||||||
const bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
|
const bundleInfoStream = es.through(); // this stream will contain bundleInfo.json
|
||||||
|
|
||||||
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
|
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
|
||||||
if (err || !result) { return bundlesStream.emit('error', JSON.stringify(err)); }
|
if (err) { return bundlesStream.emit('error', JSON.stringify(err)); }
|
||||||
|
|
||||||
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
|
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
|
||||||
|
|
||||||
@@ -198,7 +200,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
|
|||||||
}
|
}
|
||||||
filteredResources.push('!' + resource);
|
filteredResources.push('!' + resource);
|
||||||
});
|
});
|
||||||
gulp.src(filteredResources, { base: `${src}`, allowEmpty: true }).pipe(resourcesStream);
|
gulp.src(filteredResources, { base: `${src}` }).pipe(resourcesStream);
|
||||||
|
|
||||||
const bundleInfoArray: VinylFile[] = [];
|
const bundleInfoArray: VinylFile[] = [];
|
||||||
if (opts.bundleInfo) {
|
if (opts.bundleInfo) {
|
||||||
@@ -211,23 +213,34 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
|
|||||||
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
|
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const otherSourcesStream = es.through();
|
||||||
|
const otherSourcesStreamArr: NodeJS.ReadWriteStream[] = [];
|
||||||
|
|
||||||
|
gulp.src(otherSources, { base: `${src}` })
|
||||||
|
.pipe(es.through(function (data) {
|
||||||
|
otherSourcesStreamArr.push(toConcatStream(src, bundledFileHeader, [data], data.relative));
|
||||||
|
}, function () {
|
||||||
|
if (!otherSourcesStreamArr.length) {
|
||||||
|
setTimeout(function () { otherSourcesStream.emit('end'); }, 0);
|
||||||
|
} else {
|
||||||
|
es.merge(otherSourcesStreamArr).pipe(otherSourcesStream);
|
||||||
|
}
|
||||||
|
}));
|
||||||
|
|
||||||
const result = es.merge(
|
const result = es.merge(
|
||||||
loader(src, bundledFileHeader, bundleLoader),
|
loader(src, bundledFileHeader, bundleLoader),
|
||||||
bundlesStream,
|
bundlesStream,
|
||||||
|
otherSourcesStream,
|
||||||
resourcesStream,
|
resourcesStream,
|
||||||
bundleInfoStream
|
bundleInfoStream
|
||||||
);
|
);
|
||||||
|
|
||||||
return result
|
return result
|
||||||
.pipe(sourcemaps.write('./', {
|
.pipe(sourcemaps.write('./', {
|
||||||
sourceRoot: undefined,
|
sourceRoot: null,
|
||||||
addComment: true,
|
addComment: true,
|
||||||
includeContent: true
|
includeContent: true
|
||||||
}))
|
}))
|
||||||
.pipe(opts.languages && opts.languages.length ? processNlsFiles({
|
|
||||||
fileHeader: bundledFileHeader,
|
|
||||||
languages: opts.languages
|
|
||||||
}) : es.through())
|
|
||||||
.pipe(gulp.dest(out));
|
.pipe(gulp.dest(out));
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -241,7 +254,7 @@ declare class FileWithCopyright extends VinylFile {
|
|||||||
*/
|
*/
|
||||||
function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
|
function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
|
||||||
const preserveComments = (f: FileWithCopyright) => {
|
const preserveComments = (f: FileWithCopyright) => {
|
||||||
return (_node: any, comment: { value: string; type: string; }) => {
|
return (node, comment: { value: string; type: string; }) => {
|
||||||
const text = comment.value;
|
const text = comment.value;
|
||||||
const type = comment.type;
|
const type = comment.type;
|
||||||
|
|
||||||
@@ -269,7 +282,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
const minify = (composer as any)(uglifyes);
|
const minify = composer(uglifyes);
|
||||||
const input = es.through();
|
const input = es.through();
|
||||||
const output = input
|
const output = input
|
||||||
.pipe(flatmap((stream, f) => {
|
.pipe(flatmap((stream, f) => {
|
||||||
@@ -285,7 +298,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) => void {
|
export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) => void {
|
||||||
const sourceMappingURL = sourceMapBaseUrl ? ((f: any) => `${sourceMapBaseUrl}/${f.relative}.map`) : undefined;
|
const sourceMappingURL = sourceMapBaseUrl && (f => `${sourceMapBaseUrl}/${f.relative}.map`);
|
||||||
|
|
||||||
return cb => {
|
return cb => {
|
||||||
const jsFilter = filter('**/*.js', { restore: true });
|
const jsFilter = filter('**/*.js', { restore: true });
|
||||||
@@ -300,22 +313,15 @@ export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) =>
|
|||||||
cssFilter,
|
cssFilter,
|
||||||
minifyCSS({ reduceIdents: false }),
|
minifyCSS({ reduceIdents: false }),
|
||||||
cssFilter.restore,
|
cssFilter.restore,
|
||||||
(<any>sourcemaps).mapSources((sourcePath: string) => {
|
|
||||||
if (sourcePath === 'bootstrap-fork.js') {
|
|
||||||
return 'bootstrap-fork.orig.js';
|
|
||||||
}
|
|
||||||
|
|
||||||
return sourcePath;
|
|
||||||
}),
|
|
||||||
sourcemaps.write('./', {
|
sourcemaps.write('./', {
|
||||||
sourceMappingURL,
|
sourceMappingURL,
|
||||||
sourceRoot: undefined,
|
sourceRoot: null,
|
||||||
includeContent: true,
|
includeContent: true,
|
||||||
addComment: true
|
addComment: true
|
||||||
} as any),
|
}),
|
||||||
gulp.dest(src + '-min')
|
gulp.dest(src + '-min')
|
||||||
, (err: any) => {
|
, (err: any) => {
|
||||||
if (err instanceof (uglify as any).GulpUglifyError) {
|
if (err instanceof uglify.GulpUglifyError) {
|
||||||
console.error(`Uglify error in '${err.cause && err.cause.filename}'`);
|
console.error(`Uglify error in '${err.cause && err.cause.filename}'`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -4,21 +4,20 @@
|
|||||||
*--------------------------------------------------------------------------------------------*/
|
*--------------------------------------------------------------------------------------------*/
|
||||||
'use strict';
|
'use strict';
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const es = require("event-stream");
|
var es = require("event-stream");
|
||||||
const _ = require("underscore");
|
var _ = require("underscore");
|
||||||
const fancyLog = require("fancy-log");
|
var util = require("gulp-util");
|
||||||
const ansiColors = require("ansi-colors");
|
var fs = require("fs");
|
||||||
const fs = require("fs");
|
var path = require("path");
|
||||||
const path = require("path");
|
var allErrors = [];
|
||||||
const allErrors = [];
|
var startTime = null;
|
||||||
let startTime = null;
|
var count = 0;
|
||||||
let count = 0;
|
|
||||||
function onStart() {
|
function onStart() {
|
||||||
if (count++ > 0) {
|
if (count++ > 0) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
startTime = new Date().getTime();
|
startTime = new Date().getTime();
|
||||||
fancyLog(`Starting ${ansiColors.green('compilation')}...`);
|
util.log("Starting " + util.colors.green('compilation') + "...");
|
||||||
}
|
}
|
||||||
function onEnd() {
|
function onEnd() {
|
||||||
if (--count > 0) {
|
if (--count > 0) {
|
||||||
@@ -26,7 +25,7 @@ function onEnd() {
|
|||||||
}
|
}
|
||||||
log();
|
log();
|
||||||
}
|
}
|
||||||
const buildLogPath = path.join(path.dirname(path.dirname(__dirname)), '.build', 'log');
|
var buildLogPath = path.join(path.dirname(path.dirname(__dirname)), '.build', 'log');
|
||||||
try {
|
try {
|
||||||
fs.mkdirSync(path.dirname(buildLogPath));
|
fs.mkdirSync(path.dirname(buildLogPath));
|
||||||
}
|
}
|
||||||
@@ -34,52 +33,61 @@ catch (err) {
|
|||||||
// ignore
|
// ignore
|
||||||
}
|
}
|
||||||
function log() {
|
function log() {
|
||||||
const errors = _.flatten(allErrors);
|
var errors = _.flatten(allErrors);
|
||||||
const seen = new Set();
|
var seen = new Set();
|
||||||
errors.map(err => {
|
errors.map(function (err) {
|
||||||
if (!seen.has(err)) {
|
if (!seen.has(err)) {
|
||||||
seen.add(err);
|
seen.add(err);
|
||||||
fancyLog(`${ansiColors.red('Error')}: ${err}`);
|
util.log(util.colors.red('Error') + ": " + err);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
const regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
|
var regex = /^([^(]+)\((\d+),(\d+)\): (.*)$/;
|
||||||
const messages = errors
|
var messages = errors
|
||||||
.map(err => regex.exec(err))
|
.map(function (err) { return regex.exec(err); })
|
||||||
.filter(match => !!match)
|
.filter(function (match) { return !!match; })
|
||||||
.map(x => x)
|
.map(function (_a) {
|
||||||
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message }));
|
var path = _a[1], line = _a[2], column = _a[3], message = _a[4];
|
||||||
|
return ({ path: path, line: parseInt(line), column: parseInt(column), message: message });
|
||||||
|
});
|
||||||
try {
|
try {
|
||||||
fs.writeFileSync(buildLogPath, JSON.stringify(messages));
|
fs.writeFileSync(buildLogPath, JSON.stringify(messages));
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
//noop
|
//noop
|
||||||
}
|
}
|
||||||
fancyLog(`Finished ${ansiColors.green('compilation')} with ${errors.length} errors after ${ansiColors.magenta((new Date().getTime() - startTime) + ' ms')}`);
|
util.log("Finished " + util.colors.green('compilation') + " with " + errors.length + " errors after " + util.colors.magenta((new Date().getTime() - startTime) + ' ms'));
|
||||||
}
|
}
|
||||||
function createReporter() {
|
function createReporter() {
|
||||||
const errors = [];
|
var errors = [];
|
||||||
allErrors.push(errors);
|
allErrors.push(errors);
|
||||||
const result = (err) => errors.push(err);
|
var ReportFunc = /** @class */ (function () {
|
||||||
result.hasErrors = () => errors.length > 0;
|
function ReportFunc(err) {
|
||||||
result.end = (emitError) => {
|
errors.push(err);
|
||||||
errors.length = 0;
|
}
|
||||||
onStart();
|
ReportFunc.hasErrors = function () {
|
||||||
return es.through(undefined, function () {
|
return errors.length > 0;
|
||||||
onEnd();
|
};
|
||||||
if (emitError && errors.length > 0) {
|
ReportFunc.end = function (emitError) {
|
||||||
if (!errors.__logged__) {
|
errors.length = 0;
|
||||||
log();
|
onStart();
|
||||||
|
return es.through(null, function () {
|
||||||
|
onEnd();
|
||||||
|
if (emitError && errors.length > 0) {
|
||||||
|
errors.__logged__ = true;
|
||||||
|
if (!errors.__logged__) {
|
||||||
|
log();
|
||||||
|
}
|
||||||
|
var err = new Error("Found " + errors.length + " errors");
|
||||||
|
err.__reporter__ = true;
|
||||||
|
this.emit('error', err);
|
||||||
}
|
}
|
||||||
errors.__logged__ = true;
|
else {
|
||||||
const err = new Error(`Found ${errors.length} errors`);
|
this.emit('end');
|
||||||
err.__reporter__ = true;
|
}
|
||||||
this.emit('error', err);
|
});
|
||||||
}
|
};
|
||||||
else {
|
return ReportFunc;
|
||||||
this.emit('end');
|
}());
|
||||||
}
|
return ReportFunc;
|
||||||
});
|
|
||||||
};
|
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
exports.createReporter = createReporter;
|
exports.createReporter = createReporter;
|
||||||
|
|||||||
@@ -7,13 +7,12 @@
|
|||||||
|
|
||||||
import * as es from 'event-stream';
|
import * as es from 'event-stream';
|
||||||
import * as _ from 'underscore';
|
import * as _ from 'underscore';
|
||||||
import * as fancyLog from 'fancy-log';
|
import * as util from 'gulp-util';
|
||||||
import * as ansiColors from 'ansi-colors';
|
|
||||||
import * as fs from 'fs';
|
import * as fs from 'fs';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
|
|
||||||
const allErrors: string[][] = [];
|
const allErrors: string[][] = [];
|
||||||
let startTime: number | null = null;
|
let startTime: number = null;
|
||||||
let count = 0;
|
let count = 0;
|
||||||
|
|
||||||
function onStart(): void {
|
function onStart(): void {
|
||||||
@@ -22,7 +21,7 @@ function onStart(): void {
|
|||||||
}
|
}
|
||||||
|
|
||||||
startTime = new Date().getTime();
|
startTime = new Date().getTime();
|
||||||
fancyLog(`Starting ${ansiColors.green('compilation')}...`);
|
util.log(`Starting ${util.colors.green('compilation')}...`);
|
||||||
}
|
}
|
||||||
|
|
||||||
function onEnd(): void {
|
function onEnd(): void {
|
||||||
@@ -48,7 +47,7 @@ function log(): void {
|
|||||||
errors.map(err => {
|
errors.map(err => {
|
||||||
if (!seen.has(err)) {
|
if (!seen.has(err)) {
|
||||||
seen.add(err);
|
seen.add(err);
|
||||||
fancyLog(`${ansiColors.red('Error')}: ${err}`);
|
util.log(`${util.colors.red('Error')}: ${err}`);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -56,7 +55,6 @@ function log(): void {
|
|||||||
const messages = errors
|
const messages = errors
|
||||||
.map(err => regex.exec(err))
|
.map(err => regex.exec(err))
|
||||||
.filter(match => !!match)
|
.filter(match => !!match)
|
||||||
.map(x => x as string[])
|
|
||||||
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message }));
|
.map(([, path, line, column, message]) => ({ path, line: parseInt(line), column: parseInt(column), message }));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
@@ -66,7 +64,7 @@ function log(): void {
|
|||||||
//noop
|
//noop
|
||||||
}
|
}
|
||||||
|
|
||||||
fancyLog(`Finished ${ansiColors.green('compilation')} with ${errors.length} errors after ${ansiColors.magenta((new Date().getTime() - startTime!) + ' ms')}`);
|
util.log(`Finished ${util.colors.green('compilation')} with ${errors.length} errors after ${util.colors.magenta((new Date().getTime() - startTime) + ' ms')}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface IReporter {
|
export interface IReporter {
|
||||||
@@ -79,32 +77,38 @@ export function createReporter(): IReporter {
|
|||||||
const errors: string[] = [];
|
const errors: string[] = [];
|
||||||
allErrors.push(errors);
|
allErrors.push(errors);
|
||||||
|
|
||||||
const result = (err: string) => errors.push(err);
|
class ReportFunc {
|
||||||
|
constructor(err: string) {
|
||||||
|
errors.push(err);
|
||||||
|
}
|
||||||
|
|
||||||
result.hasErrors = () => errors.length > 0;
|
static hasErrors(): boolean {
|
||||||
|
return errors.length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
result.end = (emitError: boolean): NodeJS.ReadWriteStream => {
|
static end(emitError: boolean): NodeJS.ReadWriteStream {
|
||||||
errors.length = 0;
|
errors.length = 0;
|
||||||
onStart();
|
onStart();
|
||||||
|
|
||||||
return es.through(undefined, function () {
|
return es.through(null, function () {
|
||||||
onEnd();
|
onEnd();
|
||||||
|
|
||||||
if (emitError && errors.length > 0) {
|
if (emitError && errors.length > 0) {
|
||||||
if (!(errors as any).__logged__) {
|
(errors as any).__logged__ = true;
|
||||||
log();
|
|
||||||
|
if (!(errors as any).__logged__) {
|
||||||
|
log();
|
||||||
|
}
|
||||||
|
|
||||||
|
const err = new Error(`Found ${errors.length} errors`);
|
||||||
|
(err as any).__reporter__ = true;
|
||||||
|
this.emit('error', err);
|
||||||
|
} else {
|
||||||
|
this.emit('end');
|
||||||
}
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
(errors as any).__logged__ = true;
|
return <IReporter><any>ReportFunc;
|
||||||
|
|
||||||
const err = new Error(`Found ${errors.length} errors`);
|
|
||||||
(err as any).__reporter__ = true;
|
|
||||||
this.emit('error', err);
|
|
||||||
} else {
|
|
||||||
this.emit('end');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,51 +5,35 @@
|
|||||||
'use strict';
|
'use strict';
|
||||||
var snaps;
|
var snaps;
|
||||||
(function (snaps) {
|
(function (snaps) {
|
||||||
const fs = require('fs');
|
var fs = require('fs');
|
||||||
const path = require('path');
|
var path = require('path');
|
||||||
const os = require('os');
|
var os = require('os');
|
||||||
const cp = require('child_process');
|
var cp = require('child_process');
|
||||||
const mksnapshot = path.join(__dirname, `../../node_modules/.bin/${process.platform === 'win32' ? 'mksnapshot.cmd' : 'mksnapshot'}`);
|
var mksnapshot = path.join(__dirname, "../../node_modules/.bin/" + (process.platform === 'win32' ? 'mksnapshot.cmd' : 'mksnapshot'));
|
||||||
const product = require('../../product.json');
|
var product = require('../../product.json');
|
||||||
const arch = (process.argv.join('').match(/--arch=(.*)/) || [])[1];
|
var arch = (process.argv.join('').match(/--arch=(.*)/) || [])[1];
|
||||||
//
|
//
|
||||||
let loaderFilepath;
|
var loaderFilepath;
|
||||||
let startupBlobFilepath;
|
var startupBlobFilepath;
|
||||||
switch (process.platform) {
|
switch (process.platform) {
|
||||||
case 'darwin':
|
case 'darwin':
|
||||||
loaderFilepath = `VSCode-darwin/${product.nameLong}.app/Contents/Resources/app/out/vs/loader.js`;
|
loaderFilepath = "VSCode-darwin/" + product.nameLong + ".app/Contents/Resources/app/out/vs/loader.js";
|
||||||
startupBlobFilepath = `VSCode-darwin/${product.nameLong}.app/Contents/Frameworks/Electron Framework.framework/Resources/snapshot_blob.bin`;
|
startupBlobFilepath = "VSCode-darwin/" + product.nameLong + ".app/Contents/Frameworks/Electron Framework.framework/Resources/snapshot_blob.bin";
|
||||||
break;
|
break;
|
||||||
case 'win32':
|
case 'win32':
|
||||||
case 'linux':
|
case 'linux':
|
||||||
loaderFilepath = `VSCode-${process.platform}-${arch}/resources/app/out/vs/loader.js`;
|
loaderFilepath = "VSCode-" + process.platform + "-" + arch + "/resources/app/out/vs/loader.js";
|
||||||
startupBlobFilepath = `VSCode-${process.platform}-${arch}/snapshot_blob.bin`;
|
startupBlobFilepath = "VSCode-" + process.platform + "-" + arch + "/snapshot_blob.bin";
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new Error('Unknown platform');
|
|
||||||
}
|
}
|
||||||
loaderFilepath = path.join(__dirname, '../../../', loaderFilepath);
|
loaderFilepath = path.join(__dirname, '../../../', loaderFilepath);
|
||||||
startupBlobFilepath = path.join(__dirname, '../../../', startupBlobFilepath);
|
startupBlobFilepath = path.join(__dirname, '../../../', startupBlobFilepath);
|
||||||
snapshotLoader(loaderFilepath, startupBlobFilepath);
|
snapshotLoader(loaderFilepath, startupBlobFilepath);
|
||||||
function snapshotLoader(loaderFilepath, startupBlobFilepath) {
|
function snapshotLoader(loaderFilepath, startupBlobFilepath) {
|
||||||
const inputFile = fs.readFileSync(loaderFilepath);
|
var inputFile = fs.readFileSync(loaderFilepath);
|
||||||
const wrappedInputFile = `
|
var wrappedInputFile = "\n\t\tvar Monaco_Loader_Init;\n\t\t(function() {\n\t\t\tvar doNotInitLoader = true;\n\t\t\t" + inputFile.toString() + ";\n\t\t\tMonaco_Loader_Init = function() {\n\t\t\t\tAMDLoader.init();\n\t\t\t\tCSSLoaderPlugin.init();\n\t\t\t\tNLSLoaderPlugin.init();\n\n\t\t\t\treturn { define, require };\n\t\t\t}\n\t\t})();\n\t\t";
|
||||||
var Monaco_Loader_Init;
|
var wrappedInputFilepath = path.join(os.tmpdir(), 'wrapped-loader.js');
|
||||||
(function() {
|
|
||||||
var doNotInitLoader = true;
|
|
||||||
${inputFile.toString()};
|
|
||||||
Monaco_Loader_Init = function() {
|
|
||||||
AMDLoader.init();
|
|
||||||
CSSLoaderPlugin.init();
|
|
||||||
NLSLoaderPlugin.init();
|
|
||||||
|
|
||||||
return { define, require };
|
|
||||||
}
|
|
||||||
})();
|
|
||||||
`;
|
|
||||||
const wrappedInputFilepath = path.join(os.tmpdir(), 'wrapped-loader.js');
|
|
||||||
console.log(wrappedInputFilepath);
|
console.log(wrappedInputFilepath);
|
||||||
fs.writeFileSync(wrappedInputFilepath, wrappedInputFile);
|
fs.writeFileSync(wrappedInputFilepath, wrappedInputFile);
|
||||||
cp.execFileSync(mksnapshot, [wrappedInputFilepath, `--startup_blob`, startupBlobFilepath]);
|
cp.execFileSync(mksnapshot, [wrappedInputFilepath, "--startup_blob", startupBlobFilepath]);
|
||||||
}
|
}
|
||||||
})(snaps || (snaps = {}));
|
})(snaps || (snaps = {}));
|
||||||
|
|||||||
@@ -30,10 +30,6 @@ namespace snaps {
|
|||||||
case 'linux':
|
case 'linux':
|
||||||
loaderFilepath = `VSCode-${process.platform}-${arch}/resources/app/out/vs/loader.js`;
|
loaderFilepath = `VSCode-${process.platform}-${arch}/resources/app/out/vs/loader.js`;
|
||||||
startupBlobFilepath = `VSCode-${process.platform}-${arch}/snapshot_blob.bin`;
|
startupBlobFilepath = `VSCode-${process.platform}-${arch}/snapshot_blob.bin`;
|
||||||
break;
|
|
||||||
|
|
||||||
default:
|
|
||||||
throw new Error('Unknown platform');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
loaderFilepath = path.join(__dirname, '../../../', loaderFilepath);
|
loaderFilepath = path.join(__dirname, '../../../', loaderFilepath);
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user