mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-17 11:01:37 -05:00
Compare commits
240 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
087a6a0810 | ||
|
|
495b4ee7c2 | ||
|
|
7833c28b7a | ||
|
|
bbfb68b082 | ||
|
|
b32e7a777c | ||
|
|
a327889d05 | ||
|
|
5ac89e5a49 | ||
|
|
849653927a | ||
|
|
3545483fc1 | ||
|
|
e5a1896414 | ||
|
|
bec8e72688 | ||
|
|
86748e6d69 | ||
|
|
596f09f754 | ||
|
|
563e25f073 | ||
|
|
1800d0baaf | ||
|
|
2182658301 | ||
|
|
3990719054 | ||
|
|
071b510fba | ||
|
|
d2d2ade9f7 | ||
|
|
d97d2e5c91 | ||
|
|
a6ba44e435 | ||
|
|
4967e630fb | ||
|
|
2508464fde | ||
|
|
782623cba9 | ||
|
|
36045c5381 | ||
|
|
7e49167530 | ||
|
|
a680a2a7dc | ||
|
|
4734451f5f | ||
|
|
dafb780987 | ||
|
|
5fba3e31b4 | ||
|
|
0315020cf0 | ||
|
|
6dca896768 | ||
|
|
90454fbe87 | ||
|
|
f9b2136494 | ||
|
|
3d1c1aefb3 | ||
|
|
e6a65599fd | ||
|
|
32ac907403 | ||
|
|
3af05d62b2 | ||
|
|
b5d8dfa509 | ||
|
|
612987d1e5 | ||
|
|
916598e029 | ||
|
|
c45c634938 | ||
|
|
1bed0d0733 | ||
|
|
e0075b7633 | ||
|
|
0029767561 | ||
|
|
93f9828d04 | ||
|
|
c484af0901 | ||
|
|
b2a96bbe50 | ||
|
|
2414f43757 | ||
|
|
22c54a9917 | ||
|
|
a14c0351ba | ||
|
|
31f75a46c8 | ||
|
|
08d1e9cd73 | ||
|
|
0da83b5d4b | ||
|
|
37d7266751 | ||
|
|
ddfb61b46a | ||
|
|
9a2e8cffed | ||
|
|
e9299d1991 | ||
|
|
08d97cc795 | ||
|
|
0de5d7a96a | ||
|
|
2f8f6064a1 | ||
|
|
d3526f8cf7 | ||
|
|
b045e536c1 | ||
|
|
ab91c88b34 | ||
|
|
f91010c398 | ||
|
|
cd0d0f911b | ||
|
|
bd67006f63 | ||
|
|
5aedd96276 | ||
|
|
751a89d839 | ||
|
|
a226e90c38 | ||
|
|
2eeb2b0d71 | ||
|
|
f2779f2a50 | ||
|
|
357bb1916e | ||
|
|
67a9ff3e16 | ||
|
|
7099b11651 | ||
|
|
27e9e8ec2b | ||
|
|
4d1e3263f4 | ||
|
|
cb1d630cfb | ||
|
|
d7b9e9ab78 | ||
|
|
cb7b8b956f | ||
|
|
a0608a51c1 | ||
|
|
217f5e7ee5 | ||
|
|
50c4fd79b7 | ||
|
|
4cfd9bdbc0 | ||
|
|
f76e8ae2c5 | ||
|
|
089577c5a8 | ||
|
|
c116f933e2 | ||
|
|
fe496ab03b | ||
|
|
3c3d8417e9 | ||
|
|
2b2672e4bd | ||
|
|
1bfc8c1914 | ||
|
|
2f854cce4a | ||
|
|
40550d0840 | ||
|
|
38bedea0bd | ||
|
|
75ab5c1a36 | ||
|
|
3d7c081068 | ||
|
|
c276bd8a37 | ||
|
|
890bece70c | ||
|
|
4a4fe584d5 | ||
|
|
bcd72d21c7 | ||
|
|
722f5e56cd | ||
|
|
f3c7b2416b | ||
|
|
383d74ceb4 | ||
|
|
64f61b2954 | ||
|
|
5c0002404a | ||
|
|
66f39fd3eb | ||
|
|
e79e3bdf1d | ||
|
|
9dd3ec9179 | ||
|
|
c06ab27d08 | ||
|
|
fab4185c1f | ||
|
|
bc4b399f87 | ||
|
|
52544fa953 | ||
|
|
8b2ea4f0a0 | ||
|
|
45b1ae1fb1 | ||
|
|
090ac6eab2 | ||
|
|
b7169f3da9 | ||
|
|
587c3ab436 | ||
|
|
80bbd9dbf3 | ||
|
|
c019175fff | ||
|
|
7f79ab47ac | ||
|
|
ba188189a8 | ||
|
|
8e6359b3a4 | ||
|
|
aac77ed982 | ||
|
|
1d284ea66a | ||
|
|
9f5268101d | ||
|
|
0bba972657 | ||
|
|
bab9fc01ea | ||
|
|
692ed02df8 | ||
|
|
2e67d03b56 | ||
|
|
56ca3406c4 | ||
|
|
94e8ce5185 | ||
|
|
bb54b0280a | ||
|
|
a99c34d817 | ||
|
|
c06f45cf0e | ||
|
|
8114498fb5 | ||
|
|
889b60a71b | ||
|
|
45023b2e71 | ||
|
|
1c08e64651 | ||
|
|
3432dac261 | ||
|
|
5adab4fafb | ||
|
|
d45aebcd19 | ||
|
|
6bf21e4340 | ||
|
|
7ee6dfa21e | ||
|
|
bd7341ddc2 | ||
|
|
f9d8f479b5 | ||
|
|
fdc956e116 | ||
|
|
a44df9adab | ||
|
|
dbc2ce0b3a | ||
|
|
51b8e02455 | ||
|
|
4f9dfe9afa | ||
|
|
db05ed840d | ||
|
|
8570910a43 | ||
|
|
8a9ee40524 | ||
|
|
c166ce112b | ||
|
|
cea52d2314 | ||
|
|
d1cc937f9d | ||
|
|
b61fbc806b | ||
|
|
dfc212369a | ||
|
|
47b855adf8 | ||
|
|
d1bffd0eb0 | ||
|
|
f1b64918ce | ||
|
|
005e3f1476 | ||
|
|
49d19a9ab8 | ||
|
|
f5a866aa42 | ||
|
|
f1ddea986a | ||
|
|
040549d264 | ||
|
|
4b51d9b386 | ||
|
|
73fbd787d8 | ||
|
|
b6f848eff1 | ||
|
|
6f21d6e27e | ||
|
|
af9c3d3872 | ||
|
|
7806a29bed | ||
|
|
0412b643d2 | ||
|
|
d7ee37a946 | ||
|
|
af53a13e6b | ||
|
|
98fa028ce5 | ||
|
|
ed4d4e33b5 | ||
|
|
6c83736626 | ||
|
|
959b4fbab5 | ||
|
|
3b7ff61000 | ||
|
|
cf283a9266 | ||
|
|
e28b86ea87 | ||
|
|
362664c4d8 | ||
|
|
c860a0edb2 | ||
|
|
8e11e460b5 | ||
|
|
ce4e7f4bef | ||
|
|
a8c7c69509 | ||
|
|
579e6bf0cb | ||
|
|
332dc03df0 | ||
|
|
24ea675d7d | ||
|
|
3df522536f | ||
|
|
f5aa49ebb9 | ||
|
|
3076390eb1 | ||
|
|
b7c935c602 | ||
|
|
bbb0f39a94 | ||
|
|
fe04fd72bc | ||
|
|
8a350809fd | ||
|
|
642514fd94 | ||
|
|
234c2f7c9e | ||
|
|
1b2e264c7d | ||
|
|
495c2e62e6 | ||
|
|
03baf3610a | ||
|
|
251ae01c3e | ||
|
|
9a1ac20710 | ||
|
|
f640bda802 | ||
|
|
f9ff1c5910 | ||
|
|
a63800deb1 | ||
|
|
e432884e25 | ||
|
|
0c14c3cc7f | ||
|
|
006eedd3a1 | ||
|
|
cb4fe55bef | ||
|
|
b33ffafdc0 | ||
|
|
e2efe69b73 | ||
|
|
8b85a47cf7 | ||
|
|
5afa287e47 | ||
|
|
0cc7c540a9 | ||
|
|
edd867b6fc | ||
|
|
7808496416 | ||
|
|
ad27f7dbba | ||
|
|
a6cb7cbd65 | ||
|
|
146fd41b50 | ||
|
|
fcb6f7f9ee | ||
|
|
94bd1c4d7d | ||
|
|
e4a0e4e0c1 | ||
|
|
b73b09a1d3 | ||
|
|
a69a9778a6 | ||
|
|
6d3995aa29 | ||
|
|
7cf0847ef5 | ||
|
|
8e8e1f6913 | ||
|
|
7a421cc0bd | ||
|
|
fb82440412 | ||
|
|
2263ea48a1 | ||
|
|
1868a6127a | ||
|
|
5e4b8924ec | ||
|
|
62de97da54 | ||
|
|
9e051c6f63 | ||
|
|
a43444e95f | ||
|
|
a3948ac744 | ||
|
|
1cd4b39e9d | ||
|
|
e691b278ae |
@@ -11,6 +11,6 @@ trim_trailing_whitespace = true
|
||||
|
||||
# The indent size used in the `package.json` file cannot be changed
|
||||
# https://github.com/npm/npm/pull/3180#issuecomment-16336516
|
||||
[{*.yml,*.yaml,npm-shrinkwrap.json,package.json}]
|
||||
[{*.yml,*.yaml,package.json}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
@@ -10,5 +10,10 @@
|
||||
"no-extra-semi": "warn",
|
||||
"semi": "warn"
|
||||
},
|
||||
"extends": "eslint:recommended"
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaFeatures": {
|
||||
"experimentalObjectRestSpread": true
|
||||
}
|
||||
}
|
||||
}
|
||||
17
.gitignore
vendored
17
.gitignore
vendored
@@ -1,22 +1,8 @@
|
||||
.DS_Store
|
||||
npm-debug.log
|
||||
Thumbs.db
|
||||
.DS_Store
|
||||
*.dat
|
||||
*.db
|
||||
*.exe
|
||||
*.log
|
||||
*.nupkg
|
||||
*.orig
|
||||
*.vsix
|
||||
*BROWSE.VC*
|
||||
sqltoolsservice
|
||||
coverage
|
||||
test-reports
|
||||
.vscode-test
|
||||
node_modules/
|
||||
.build/
|
||||
.vs/
|
||||
out/
|
||||
out-build/
|
||||
out-editor/
|
||||
@@ -26,4 +12,5 @@ out-vscode/
|
||||
out-vscode-min/
|
||||
build/node_modules
|
||||
coverage/
|
||||
_site
|
||||
test_data/
|
||||
yarn-error.log
|
||||
24
.travis.yml
24
.travis.yml
@@ -7,10 +7,13 @@ os:
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.npm
|
||||
- $HOME/.cache/yarn
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
webhooks:
|
||||
- http://vscode-probot.westus.cloudapp.azure.com:3450/travis/notifications
|
||||
- http://vscode-test-probot.westus.cloudapp.azure.com:3450/travis/notifications
|
||||
|
||||
addons:
|
||||
apt:
|
||||
@@ -31,23 +34,26 @@ before_install:
|
||||
- git submodule update --init --recursive
|
||||
- git clone --depth 1 https://github.com/creationix/nvm.git ./.nvm
|
||||
- source ./.nvm/nvm.sh
|
||||
- nvm install 7.9.0
|
||||
- nvm use 7.9.0
|
||||
- npm config set python `which python`
|
||||
- npm install -g gulp
|
||||
- nvm install 8.9.1
|
||||
- nvm use 8.9.1
|
||||
- npm i -g yarn
|
||||
# - npm config set python `which python`
|
||||
- if [ $TRAVIS_OS_NAME == "linux" ]; then
|
||||
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:99.0;
|
||||
sh -e /etc/init.d/xvfb start;
|
||||
sleep 3;
|
||||
fi
|
||||
# Make npm logs less verbose
|
||||
# - npm config set depth 0
|
||||
# - npm config set loglevel warn
|
||||
|
||||
install:
|
||||
- ./scripts/npm.sh install
|
||||
- yarn
|
||||
|
||||
script:
|
||||
- gulp electron --silent
|
||||
- gulp compile --silent --max_old_space_size=4096
|
||||
- gulp optimize-vscode --silent --max_old_space_size=4096
|
||||
- node_modules/.bin/gulp electron --silent
|
||||
- node_modules/.bin/gulp compile --silent --max_old_space_size=4096
|
||||
- node_modules/.bin/gulp optimize-vscode --silent --max_old_space_size=4096
|
||||
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then ./scripts/test.sh --coverage --reporter dot; else ./scripts/test.sh --reporter dot; fi
|
||||
|
||||
after_success:
|
||||
|
||||
7
.vscode/launch.json
vendored
7
.vscode/launch.json
vendored
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"version": "0.1.0",
|
||||
"configurations": [
|
||||
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
@@ -9,7 +8,7 @@
|
||||
"program": "${workspaceFolder}/node_modules/gulp/bin/gulp.js",
|
||||
"stopOnEntry": true,
|
||||
"args": [
|
||||
"watch-extension:json-client"
|
||||
"hygiene"
|
||||
],
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
@@ -87,6 +86,9 @@
|
||||
"runtimeArgs": [
|
||||
"--inspect=5875"
|
||||
],
|
||||
"skipFiles": [
|
||||
"**/winjs*.js"
|
||||
],
|
||||
"webRoot": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
@@ -104,6 +106,7 @@
|
||||
},
|
||||
"stopOnEntry": false,
|
||||
"args": [
|
||||
"--delay",
|
||||
"--timeout",
|
||||
"2000"
|
||||
],
|
||||
|
||||
8
.vscode/settings.json
vendored
8
.vscode/settings.json
vendored
@@ -10,6 +10,9 @@
|
||||
"when": "$(basename).ts"
|
||||
}
|
||||
},
|
||||
"files.associations": {
|
||||
"OSSREADME.json": "jsonc"
|
||||
},
|
||||
"search.exclude": {
|
||||
"**/node_modules": true,
|
||||
"**/bower_components": true,
|
||||
@@ -34,5 +37,6 @@
|
||||
"command": "${workspaceFolder}\\scripts\\test.bat --coverage --run ${file}"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"typescript.tsdk": "node_modules/typescript/lib"
|
||||
}
|
||||
|
||||
6
.vscode/tasks.json
vendored
6
.vscode/tasks.json
vendored
@@ -33,11 +33,11 @@
|
||||
"task": "tslint",
|
||||
"label": "Run tslint",
|
||||
"problemMatcher": [
|
||||
"$tslint4"
|
||||
"$tslint5"
|
||||
]
|
||||
},
|
||||
{
|
||||
"taskName": "Run tests",
|
||||
"label": "Run tests",
|
||||
"type": "shell",
|
||||
"command": "./scripts/test.sh",
|
||||
"windows": {
|
||||
@@ -50,7 +50,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"taskName": "Run Dev",
|
||||
"label": "Run Dev",
|
||||
"type": "shell",
|
||||
"command": "./scripts/code.sh",
|
||||
"windows": {
|
||||
|
||||
3
.yarnrc
Normal file
3
.yarnrc
Normal file
@@ -0,0 +1,3 @@
|
||||
disturl "https://atom.io/download/electron"
|
||||
target "1.7.11"
|
||||
runtime "electron"
|
||||
78
CHANGELOG.md
78
CHANGELOG.md
@@ -1,5 +1,83 @@
|
||||
# Change Log
|
||||
|
||||
## Version 0.27.3
|
||||
* Release date: March 28, 2017
|
||||
* Release status: Public Preview
|
||||
|
||||
## What's new in this version
|
||||
The March Public Preview release enables some key aspects of the SQL Operations
|
||||
Studio extensibility story. Here are some highlights in this release.
|
||||
|
||||
* Enhance the Manage Dashboard extensibility model to support tabbed Insights and Configuration panes
|
||||
* Dashboard Insights extensions for `sp_whoisactive` from [whoisactive.com](http://whoisactive.com)
|
||||
* Extension Manager enables simple acquisition of 1st-party and 3rd-party extensions
|
||||
* Add additional Extensibility APIs for `connection` and `objectexplorer` management
|
||||
* Community Localization open for 10 languages
|
||||
* Continue to fix important customer impacting GitHub issues
|
||||
|
||||
## Version 0.26.7
|
||||
* Release date: February 16, 2017
|
||||
* Release status: Public Preview Hotfix 1
|
||||
|
||||
## What's new in this version
|
||||
* Bug fix for `#717 Selecting partial query and hitting Cmd or Ctrl+C opens terminal with Error message`
|
||||
|
||||
## Version 0.26.6
|
||||
* Release date: February 15, 2017
|
||||
* Release status: Public Preview
|
||||
|
||||
## What's new in this version
|
||||
The February release fixes several important customer reported issues, as well as various feature improvements. We've also introduced auto-update support in February which will simplify keeping updated with the lastest changes.
|
||||
|
||||
Here's some of the highlights in the February release.
|
||||
|
||||
* Support Auto-Update installation on Windows and macOS
|
||||
* Publish RPM and DEB packages to offical Microsoft repos
|
||||
* Fix `#6 Keep connection and selected database when opening new query tabs`
|
||||
* Fix `#22 'Server Name' and 'Database Name' - Can these be drop downs instead of text` boxes?
|
||||
* Fix #481 Add "Check for updates" option.
|
||||
* SQL Editor colorization and auto-completion fixes
|
||||
* `#584 Keyword "FULL" not highlighted by IntelliSense`
|
||||
* `#345 Colorize SQL functions within the editor`
|
||||
* `#300 [#tempData] latest "]" will display green color`
|
||||
* `#225 Keyword color mismatch`
|
||||
* `#60 invalid sql syntax color highlighting when using temporary table in from clause`
|
||||
* Introduce Connection extensibility API
|
||||
* VS Code Editor 1.19 integration
|
||||
* Update JustinPealing/html-query-plan component to pick-up several Query Plan viewer improvements
|
||||
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* SebastianPfliegel for `Add cursor snippet (#475)`
|
||||
* mikaoelitiana for fix: `revert README and CONTRIBUTING after last VSCode merge (#574)`
|
||||
* alextercete for `Reinstate menu item to install from VSIX (#682)`
|
||||
|
||||
## Version 0.25.4
|
||||
* Release date: January 17, 2017
|
||||
* Release status: Public Preview
|
||||
|
||||
## What's new in this version
|
||||
The January release focuses on addressing a few of the top upvoted feature suggestions, as well as fixing high-priority bugs. This release period coincides with holiday vacations, so the churn in this release is
|
||||
relatively scoped.
|
||||
|
||||
Here's some of the highlights in the January release.
|
||||
|
||||
* Tab-coloring based on Server Group
|
||||
* Saved Server connections are available in Connection Dialog
|
||||
* Enable HotExit feature
|
||||
* Fix broken Run Current Query command
|
||||
* Fix drag-and-drop breaking scripting bug
|
||||
* Fix incorrect pinned Start Menu icon
|
||||
* Fix missing Azure Account branding icon
|
||||
* Change "Server name" to "Server" in Connection Dialog
|
||||
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* alextercete for `Fix "No extension gallery service configured" error (#427)`
|
||||
* SebastianPfliegel for `Add cursor snippet (#475)`
|
||||
|
||||
## Version 0.24.1
|
||||
* Release date: December 19, 2017
|
||||
* Release status: Public Preview
|
||||
|
||||
@@ -771,5 +771,42 @@
|
||||
"\"\"\""
|
||||
],
|
||||
"isProd": true
|
||||
},
|
||||
{
|
||||
"name": "spdlog original",
|
||||
"version": "0.14.0",
|
||||
"repositoryURL": "https://github.com/gabime/spdlog",
|
||||
"license": "MIT",
|
||||
"isProd": true
|
||||
},
|
||||
{
|
||||
"isLicense": true,
|
||||
"name": "spdlog",
|
||||
"version": "0.14.0",
|
||||
"repositoryURL": "https://github.com/gabime/spdlog",
|
||||
"license": "MIT",
|
||||
"licenseDetail": [
|
||||
"MIT License",
|
||||
"",
|
||||
"Copyright (c) Microsoft Corporation. All rights reserved.",
|
||||
"",
|
||||
"Permission is hereby granted, free of charge, to any person obtaining a copy",
|
||||
"of this software and associated documentation files (the \"Software\"), to deal",
|
||||
"in the Software without restriction, including without limitation the rights",
|
||||
"to use, copy, modify, merge, publish, distribute, sublicense, and/or sell",
|
||||
"copies of the Software, and to permit persons to whom the Software is",
|
||||
"furnished to do so, subject to the following conditions:",
|
||||
"",
|
||||
"The above copyright notice and this permission notice shall be included in all",
|
||||
"copies or substantial portions of the Software.",
|
||||
"",
|
||||
"THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR",
|
||||
"IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,",
|
||||
"FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE",
|
||||
"AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER",
|
||||
"LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,",
|
||||
"OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE",
|
||||
"SOFTWARE"
|
||||
]
|
||||
}
|
||||
]
|
||||
|
||||
51
README.md
51
README.md
@@ -1,22 +1,34 @@
|
||||
# SQL Operations Studio
|
||||
|
||||
[](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
|
||||
SQL Operations Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
|
||||
|
||||
**Download SQL Operations Studio December Public Preview**
|
||||
**Download SQL Operations Studio March Public Preview**
|
||||
|
||||
Platform | Link
|
||||
-- | --
|
||||
Windows Setup Installer | https://go.microsoft.com/fwlink/?linkid=865305
|
||||
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=865304
|
||||
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=865306
|
||||
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=865307
|
||||
Linux DEB | https://go.microsoft.com/fwlink/?linkid=865308
|
||||
Linux RPM | https://go.microsoft.com/fwlink/?linkid=865309
|
||||
Windows Setup Installer | https://go.microsoft.com/fwlink/?linkid=870837
|
||||
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=870838
|
||||
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=870839
|
||||
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=870840
|
||||
Linux DEB | https://go.microsoft.com/fwlink/?linkid=870842
|
||||
Linux RPM | https://go.microsoft.com/fwlink/?linkid=870841
|
||||
|
||||
Go to our [download page](https://aka.ms/sqlopsstudio) for more specific instructions.
|
||||
|
||||
Try out the latest insiders build from `master` at https://github.com/Microsoft/sqlopsstudio/releases.
|
||||
|
||||
See the [change log](https://github.com/Microsoft/sqlopsstudio/blob/master/CHANGELOG.md) for additional details of what's in this release.
|
||||
|
||||
**Design Discussions**
|
||||
|
||||
The SQL Operations Studio team would like to incorporate community feedback earlier in the development process. To facilitate this, we'd like to share our designs while features are actively being built.
|
||||
|
||||
We're currently collecting input on the **SQL Agent** experience and enhancements to the Manage Dashboard that we're calling **"Command Center"**. We'll add additional design feedback requests below as we start work in new feature areas. Please leave comments on these issues to help us understand your requirements and shape feature development.
|
||||
|
||||
* [#750 Seeking community feedback on SQL Agent UX prototype](https://github.com/Microsoft/sqlopsstudio/issues/750)
|
||||
|
||||
**Feature Highlights**
|
||||
|
||||
- Cross-Platform DB management for Windows, macOS and Linux with simple XCopy deployment
|
||||
@@ -46,9 +58,34 @@ please see the document [How to Contribute](https://github.com/Microsoft/sqlopss
|
||||
|
||||
This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
|
||||
|
||||
## Localization
|
||||
SQL Operations Studio localization is now open for community contributions. You can contribute to localization for both software and docs. https://aka.ms/SQLOpsStudioLoc
|
||||
|
||||
Localization is now opened for 10 languages: French, Italian, German, Spanish, Simplified Chinese, Traditional Chinese, Japanese, Korean, Russian, and Portuguese (Brazil). Help us make SQL Operations Studio available in your language!
|
||||
|
||||
## Privacy Statement
|
||||
The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement) describes the privacy statement of this software.
|
||||
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* westerncj for `Removed duplicate contribution from README.md (#753)`
|
||||
* ntovas for `Fix for duplicate extensions shown in "Save File" dialog. (#779)`
|
||||
* SebastianPfliegel for `Add cursor snippet (#475)`
|
||||
* mikaoelitiana for fix: `revert README and CONTRIBUTING after last VSCode merge (#574)`
|
||||
* alextercete for `Reinstate menu item to install from VSIX (#682)`
|
||||
* alextercete for `Fix "No extension gallery service configured" error (#427)`
|
||||
* mwiedemeyer for `Fix #58: Default sort order for DB size widget (#111)`
|
||||
* AlexTroshkin for `Show disconnect in context menu only when connectionProfile connected (#150)`
|
||||
* AlexTroshkin for `Fix #138: Invalid syntax color highlighting (identity not highlighting) (#140))`
|
||||
* stebet for `Fix #153: Fixing sql snippets that failed on a DB with case-sensitive collation. (#152)`
|
||||
* SebastianPfliegel `Remove sqlExtensionHelp (#312)`
|
||||
* olljanat for `Implemented npm version check (#314)`
|
||||
* Adam Mechanic for helping with the `whoisactive` extension
|
||||
* All community localization contributors *(will get list of individuals next month)*
|
||||
|
||||
And of course we'd like to thank the authors of all upstream dependencies. Please see a full list in the [ThirdPartyNotices.txt](https://raw.githubusercontent.com/Microsoft/sqlopsstudio/master/ThirdPartyNotices.txt)
|
||||
|
||||
## License
|
||||
|
||||
Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
|
||||
@@ -3,19 +3,18 @@ environment:
|
||||
VSCODE_BUILD_VERBOSE: true
|
||||
|
||||
cache:
|
||||
- '%APPDATA%\npm-cache'
|
||||
- '%LOCALAPPDATA%\Yarn\cache'
|
||||
|
||||
install:
|
||||
- ps: Install-Product node 7.9.0 x64
|
||||
- npm install -g npm@4 --silent
|
||||
- ps: Install-Product node 8.9.1 x64
|
||||
|
||||
build_script:
|
||||
- .\scripts\npm.bat install
|
||||
- yarn
|
||||
- .\node_modules\.bin\gulp electron
|
||||
- .\node_modules\.bin\tsc -p .\src\tsconfig.monaco.json --noEmit
|
||||
- npm run compile
|
||||
|
||||
test_script:
|
||||
- node --version
|
||||
- npm --version
|
||||
- .\scripts\test.bat
|
||||
- .\scripts\test-integration.bat
|
||||
|
||||
12
build/builtInExtensions.json
Normal file
12
build/builtInExtensions.json
Normal file
@@ -0,0 +1,12 @@
|
||||
[
|
||||
{
|
||||
"name": "ms-vscode.node-debug",
|
||||
"version": "1.21.8",
|
||||
"repo": "https://github.com/Microsoft/vscode-node-debug"
|
||||
},
|
||||
{
|
||||
"name": "ms-vscode.node-debug2",
|
||||
"version": "1.21.2",
|
||||
"repo": "https://github.com/Microsoft/vscode-node-debug2"
|
||||
}
|
||||
]
|
||||
20
build/builtin/.eslintrc
Normal file
20
build/builtin/.eslintrc
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true,
|
||||
"browser": true
|
||||
},
|
||||
"rules": {
|
||||
"no-console": 0,
|
||||
"no-cond-assign": 0,
|
||||
"no-unused-vars": 1,
|
||||
"no-extra-semi": "warn",
|
||||
"semi": "warn"
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaFeatures": {
|
||||
"experimentalObjectRestSpread": true
|
||||
}
|
||||
}
|
||||
}
|
||||
126
build/builtin/browser-main.js
Normal file
126
build/builtin/browser-main.js
Normal file
@@ -0,0 +1,126 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
// @ts-ignore review
|
||||
const { remote } = require('electron');
|
||||
const dialog = remote.dialog;
|
||||
|
||||
const builtInExtensionsPath = path.join(__dirname, '..', 'builtInExtensions.json');
|
||||
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
||||
|
||||
function readJson(filePath) {
|
||||
return JSON.parse(fs.readFileSync(filePath, { encoding: 'utf8' }));
|
||||
}
|
||||
|
||||
function writeJson(filePath, obj) {
|
||||
fs.writeFileSync(filePath, JSON.stringify(obj, null, 2));
|
||||
}
|
||||
|
||||
function renderOption(form, id, title, value, checked) {
|
||||
const input = document.createElement('input');
|
||||
input.type = 'radio';
|
||||
input.id = id;
|
||||
input.name = 'choice';
|
||||
input.value = value;
|
||||
input.checked = !!checked;
|
||||
form.appendChild(input);
|
||||
|
||||
const label = document.createElement('label');
|
||||
label.setAttribute('for', id);
|
||||
label.textContent = title;
|
||||
form.appendChild(label);
|
||||
|
||||
return input;
|
||||
}
|
||||
|
||||
function render(el, state) {
|
||||
function setState(state) {
|
||||
try {
|
||||
writeJson(controlFilePath, state.control);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
}
|
||||
|
||||
el.innerHTML = '';
|
||||
render(el, state);
|
||||
}
|
||||
|
||||
const ul = document.createElement('ul');
|
||||
const { builtin, control } = state;
|
||||
|
||||
for (const ext of builtin) {
|
||||
const controlState = control[ext.name] || 'marketplace';
|
||||
|
||||
const li = document.createElement('li');
|
||||
ul.appendChild(li);
|
||||
|
||||
const name = document.createElement('code');
|
||||
name.textContent = ext.name;
|
||||
li.appendChild(name);
|
||||
|
||||
const form = document.createElement('form');
|
||||
li.appendChild(form);
|
||||
|
||||
const marketplaceInput = renderOption(form, `marketplace-${ext.name}`, 'Marketplace', 'marketplace', controlState === 'marketplace');
|
||||
marketplaceInput.onchange = function () {
|
||||
control[ext.name] = 'marketplace';
|
||||
setState({ builtin, control });
|
||||
};
|
||||
|
||||
const disabledInput = renderOption(form, `disabled-${ext.name}`, 'Disabled', 'disabled', controlState === 'disabled');
|
||||
disabledInput.onchange = function () {
|
||||
control[ext.name] = 'disabled';
|
||||
setState({ builtin, control });
|
||||
};
|
||||
|
||||
let local = undefined;
|
||||
|
||||
if (controlState !== 'marketplace' && controlState !== 'disabled') {
|
||||
local = controlState;
|
||||
}
|
||||
|
||||
const localInput = renderOption(form, `local-${ext.name}`, 'Local', 'local', !!local);
|
||||
localInput.onchange = function () {
|
||||
const result = dialog.showOpenDialog(remote.getCurrentWindow(), {
|
||||
title: 'Choose Folder',
|
||||
properties: ['openDirectory']
|
||||
});
|
||||
|
||||
if (result && result.length >= 1) {
|
||||
control[ext.name] = result[0];
|
||||
}
|
||||
|
||||
setState({ builtin, control });
|
||||
};
|
||||
|
||||
if (local) {
|
||||
const localSpan = document.createElement('code');
|
||||
localSpan.className = 'local';
|
||||
localSpan.textContent = local;
|
||||
form.appendChild(localSpan);
|
||||
}
|
||||
}
|
||||
|
||||
el.appendChild(ul);
|
||||
}
|
||||
|
||||
function main() {
|
||||
const el = document.getElementById('extensions');
|
||||
const builtin = readJson(builtInExtensionsPath);
|
||||
let control;
|
||||
|
||||
try {
|
||||
control = readJson(controlFilePath);
|
||||
} catch (err) {
|
||||
control = {};
|
||||
}
|
||||
|
||||
render(el, { builtin, control });
|
||||
}
|
||||
|
||||
window.onload = main;
|
||||
46
build/builtin/index.html
Normal file
46
build/builtin/index.html
Normal file
@@ -0,0 +1,46 @@
|
||||
<!-- Copyright (C) Microsoft Corporation. All rights reserved. -->
|
||||
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<title>Manage Built-in Extensions</title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<script src="browser-main.js"></script>
|
||||
<style>
|
||||
body {
|
||||
font-family: 'Trebuchet MS', 'Lucida Sans Unicode', 'Lucida Grande', 'Lucida Sans', Arial, sans-serif;
|
||||
font-size: 10pt;
|
||||
}
|
||||
|
||||
code {
|
||||
font-family: 'Menlo', 'Courier New', 'Courier', monospace;
|
||||
}
|
||||
|
||||
ul {
|
||||
padding-left: 1em;
|
||||
}
|
||||
|
||||
li {
|
||||
list-style: none;
|
||||
padding: 0.3em 0;
|
||||
}
|
||||
|
||||
label {
|
||||
margin-right: 1em;
|
||||
}
|
||||
|
||||
form {
|
||||
padding: 0.3em 0 0.3em 0.3em;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<h1>Built-in Extensions</h1>
|
||||
<div id="extensions"></div>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
20
build/builtin/main.js
Normal file
20
build/builtin/main.js
Normal file
@@ -0,0 +1,20 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
const { app, BrowserWindow } = require('electron');
|
||||
const url = require('url');
|
||||
const path = require('path');
|
||||
|
||||
let window = null;
|
||||
|
||||
app.once('ready', () => {
|
||||
window = new BrowserWindow({ width: 800, height: 600 });
|
||||
window.setMenuBarVisibility(false);
|
||||
window.loadURL(url.format({ pathname: path.join(__dirname, 'index.html'), protocol: 'file:', slashes: true }));
|
||||
// window.webContents.openDevTools();
|
||||
window.once('closed', () => window = null);
|
||||
});
|
||||
|
||||
app.on('window-all-closed', () => app.quit());
|
||||
5
build/builtin/package.json
Normal file
5
build/builtin/package.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"name": "builtin",
|
||||
"version": "0.1.0",
|
||||
"main": "main.js"
|
||||
}
|
||||
74
build/dependencies.js
Normal file
74
build/dependencies.js
Normal file
@@ -0,0 +1,74 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const path = require('path');
|
||||
const parseSemver = require('parse-semver');
|
||||
const cp = require('child_process');
|
||||
const _ = require('underscore');
|
||||
|
||||
function asYarnDependency(prefix, tree) {
|
||||
let parseResult;
|
||||
|
||||
try {
|
||||
parseResult = parseSemver(tree.name);
|
||||
} catch (err) {
|
||||
err.message += `: ${tree.name}`;
|
||||
console.warn(`Could not parse semver: ${tree.name}`);
|
||||
return null;
|
||||
}
|
||||
|
||||
// not an actual dependency in disk
|
||||
if (parseResult.version !== parseResult.range) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const name = parseResult.name;
|
||||
const version = parseResult.version;
|
||||
const dependencyPath = path.join(prefix, name);
|
||||
const children = [];
|
||||
|
||||
for (const child of (tree.children || [])) {
|
||||
const dep = asYarnDependency(path.join(prefix, name, 'node_modules'), child);
|
||||
|
||||
if (dep) {
|
||||
children.push(dep);
|
||||
}
|
||||
}
|
||||
|
||||
return { name, version, path: dependencyPath, children };
|
||||
}
|
||||
|
||||
function getYarnProductionDependencies(cwd) {
|
||||
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'ignore'] });
|
||||
const match = /^{"type":"tree".*$/m.exec(raw);
|
||||
|
||||
if (!match || match.length !== 1) {
|
||||
throw new Error('Could not parse result of `yarn list --json`');
|
||||
}
|
||||
|
||||
const trees = JSON.parse(match[0]).data.trees;
|
||||
|
||||
return trees
|
||||
.map(tree => asYarnDependency(path.join(cwd, 'node_modules'), tree))
|
||||
.filter(dep => !!dep);
|
||||
}
|
||||
|
||||
function getProductionDependencies(cwd) {
|
||||
const result = [];
|
||||
const deps = getYarnProductionDependencies(cwd);
|
||||
const flatten = dep => { result.push({ name: dep.name, version: dep.version, path: dep.path }); dep.children.forEach(flatten); };
|
||||
deps.forEach(flatten);
|
||||
|
||||
return _.uniq(result);
|
||||
}
|
||||
|
||||
module.exports.getProductionDependencies = getProductionDependencies;
|
||||
|
||||
if (require.main === module) {
|
||||
const root = path.dirname(__dirname);
|
||||
console.log(JSON.stringify(getProductionDependencies(root), null, ' '));
|
||||
}
|
||||
@@ -12,6 +12,7 @@ var File = require('vinyl');
|
||||
|
||||
var root = path.dirname(__dirname);
|
||||
var sha1 = util.getVersion(root);
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
var semver = require('./monaco/package.json').version;
|
||||
var headerVersion = semver + '(' + sha1 + ')';
|
||||
|
||||
@@ -21,14 +22,14 @@ var editorEntryPoints = [
|
||||
{
|
||||
name: 'vs/editor/editor.main',
|
||||
include: [],
|
||||
exclude: [ 'vs/css', 'vs/nls' ],
|
||||
prepend: [ 'out-build/vs/css.js', 'out-build/vs/nls.js' ],
|
||||
exclude: ['vs/css', 'vs/nls'],
|
||||
prepend: ['out-build/vs/css.js', 'out-build/vs/nls.js'],
|
||||
},
|
||||
{
|
||||
name: 'vs/base/common/worker/simpleWorker',
|
||||
include: [ 'vs/editor/common/services/editorSimpleWorker' ],
|
||||
prepend: [ 'vs/loader.js' ],
|
||||
append: [ 'vs/base/worker/workerMain' ],
|
||||
include: ['vs/editor/common/services/editorSimpleWorker'],
|
||||
prepend: ['vs/loader.js'],
|
||||
append: ['vs/base/worker/workerMain'],
|
||||
dest: 'vs/base/worker/workerMain.js'
|
||||
}
|
||||
];
|
||||
@@ -79,14 +80,15 @@ gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-client-build'],
|
||||
bundleLoader: false,
|
||||
header: BUNDLED_FILE_HEADER,
|
||||
bundleInfo: true,
|
||||
out: 'out-editor'
|
||||
out: 'out-editor',
|
||||
languages: undefined
|
||||
}));
|
||||
|
||||
gulp.task('clean-minified-editor', util.rimraf('out-editor-min'));
|
||||
gulp.task('minify-editor', ['clean-minified-editor', 'optimize-editor'], common.minifyTask('out-editor'));
|
||||
|
||||
gulp.task('clean-editor-distro', util.rimraf('out-monaco-editor-core'));
|
||||
gulp.task('editor-distro', ['clean-editor-distro', 'minify-editor', 'optimize-editor'], function() {
|
||||
gulp.task('editor-distro', ['clean-editor-distro', 'minify-editor', 'optimize-editor'], function () {
|
||||
return es.merge(
|
||||
// other assets
|
||||
es.merge(
|
||||
@@ -97,17 +99,17 @@ gulp.task('editor-distro', ['clean-editor-distro', 'minify-editor', 'optimize-ed
|
||||
|
||||
// package.json
|
||||
gulp.src('build/monaco/package.json')
|
||||
.pipe(es.through(function(data) {
|
||||
.pipe(es.through(function (data) {
|
||||
var json = JSON.parse(data.contents.toString());
|
||||
json.private = false;
|
||||
data.contents = new Buffer(JSON.stringify(json, null, ' '));
|
||||
data.contents = Buffer.from(JSON.stringify(json, null, ' '));
|
||||
this.emit('data', data);
|
||||
}))
|
||||
.pipe(gulp.dest('out-monaco-editor-core')),
|
||||
|
||||
// README.md
|
||||
gulp.src('build/monaco/README-npm.md')
|
||||
.pipe(es.through(function(data) {
|
||||
.pipe(es.through(function (data) {
|
||||
this.emit('data', new File({
|
||||
path: data.path.replace(/README-npm\.md/, 'README.md'),
|
||||
base: data.base,
|
||||
@@ -124,10 +126,10 @@ gulp.task('editor-distro', ['clean-editor-distro', 'minify-editor', 'optimize-ed
|
||||
// min folder
|
||||
es.merge(
|
||||
gulp.src('out-editor-min/**/*')
|
||||
).pipe(filterStream(function(path) {
|
||||
).pipe(filterStream(function (path) {
|
||||
// no map files
|
||||
return !/(\.js\.map$)|(nls\.metadata\.json$)|(bundleInfo\.json$)/.test(path);
|
||||
})).pipe(es.through(function(data) {
|
||||
})).pipe(es.through(function (data) {
|
||||
// tweak the sourceMappingURL
|
||||
if (!/\.js$/.test(data.path)) {
|
||||
this.emit('data', data);
|
||||
@@ -140,49 +142,50 @@ gulp.task('editor-distro', ['clean-editor-distro', 'minify-editor', 'optimize-ed
|
||||
var newStr = '//# sourceMappingURL=' + relativePathToMap.replace(/\\/g, '/');
|
||||
strContents = strContents.replace(/\/\/\# sourceMappingURL=[^ ]+$/, newStr);
|
||||
|
||||
data.contents = new Buffer(strContents);
|
||||
data.contents = Buffer.from(strContents);
|
||||
this.emit('data', data);
|
||||
})).pipe(gulp.dest('out-monaco-editor-core/min')),
|
||||
|
||||
// min-maps folder
|
||||
es.merge(
|
||||
gulp.src('out-editor-min/**/*')
|
||||
).pipe(filterStream(function(path) {
|
||||
).pipe(filterStream(function (path) {
|
||||
// no map files
|
||||
return /\.js\.map$/.test(path);
|
||||
})).pipe(gulp.dest('out-monaco-editor-core/min-maps'))
|
||||
);
|
||||
});
|
||||
|
||||
gulp.task('analyze-editor-distro', function() {
|
||||
gulp.task('analyze-editor-distro', function () {
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
var bundleInfo = require('../out-editor/bundleInfo.json');
|
||||
var graph = bundleInfo.graph;
|
||||
var bundles = bundleInfo.bundles;
|
||||
|
||||
var inverseGraph = {};
|
||||
Object.keys(graph).forEach(function(module) {
|
||||
Object.keys(graph).forEach(function (module) {
|
||||
var dependencies = graph[module];
|
||||
dependencies.forEach(function(dep) {
|
||||
dependencies.forEach(function (dep) {
|
||||
inverseGraph[dep] = inverseGraph[dep] || [];
|
||||
inverseGraph[dep].push(module);
|
||||
});
|
||||
});
|
||||
|
||||
var detailed = {};
|
||||
Object.keys(bundles).forEach(function(entryPoint) {
|
||||
Object.keys(bundles).forEach(function (entryPoint) {
|
||||
var included = bundles[entryPoint];
|
||||
var includedMap = {};
|
||||
included.forEach(function(included) {
|
||||
included.forEach(function (included) {
|
||||
includedMap[included] = true;
|
||||
});
|
||||
|
||||
var explanation = [];
|
||||
included.map(function(included) {
|
||||
included.map(function (included) {
|
||||
if (included.indexOf('!') >= 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
var reason = (inverseGraph[included]||[]).filter(function(mod) {
|
||||
var reason = (inverseGraph[included] || []).filter(function (mod) {
|
||||
return !!includedMap[mod];
|
||||
});
|
||||
explanation.push({
|
||||
@@ -198,7 +201,7 @@ gulp.task('analyze-editor-distro', function() {
|
||||
});
|
||||
|
||||
function filterStream(testFunc) {
|
||||
return es.through(function(data) {
|
||||
return es.through(function (data) {
|
||||
if (!testFunc(data.relative)) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ const sourcemaps = require('gulp-sourcemaps');
|
||||
const nlsDev = require('vscode-nls-dev');
|
||||
const root = path.dirname(__dirname);
|
||||
const commit = util.getVersion(root);
|
||||
const i18n = require('./lib/i18n');
|
||||
|
||||
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
|
||||
|
||||
@@ -29,7 +30,8 @@ const compilations = glob.sync('**/tsconfig.json', {
|
||||
});
|
||||
|
||||
const getBaseUrl = out => `https://ticino.blob.core.windows.net/sourcemaps/${commit}/${out}`;
|
||||
const languages = ['chs', 'cht', 'jpn', 'kor', 'deu', 'fra', 'esn', 'rus', 'ita'];
|
||||
|
||||
const languages = i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
|
||||
|
||||
const tasks = compilations.map(function (tsconfigFile) {
|
||||
const absolutePath = path.join(extensionsPath, tsconfigFile);
|
||||
@@ -55,13 +57,25 @@ const tasks = compilations.map(function (tsconfigFile) {
|
||||
const srcBase = path.join(root, 'src');
|
||||
const src = path.join(srcBase, '**');
|
||||
const out = path.join(root, 'out');
|
||||
const i18n = path.join(__dirname, '..', 'i18n');
|
||||
const i18nPath = path.join(__dirname, '..', 'i18n');
|
||||
const baseUrl = getBaseUrl(out);
|
||||
|
||||
let headerId, headerOut;
|
||||
let index = relativeDirname.indexOf('/');
|
||||
if (index < 0) {
|
||||
headerId = 'vscode.' + relativeDirname;
|
||||
headerOut = 'out';
|
||||
} else {
|
||||
headerId = 'vscode.' + relativeDirname.substr(0, index);
|
||||
headerOut = relativeDirname.substr(index + 1) + '/out';
|
||||
}
|
||||
|
||||
function createPipeline(build, emitError) {
|
||||
const reporter = createReporter();
|
||||
|
||||
tsOptions.inlineSources = !!build;
|
||||
tsOptions.base = path.dirname(absolutePath);
|
||||
|
||||
const compilation = tsb.create(tsOptions, null, null, err => reporter(err.toString()));
|
||||
|
||||
return function () {
|
||||
@@ -80,7 +94,9 @@ const tasks = compilations.map(function (tsconfigFile) {
|
||||
sourceRoot: '../src'
|
||||
}))
|
||||
.pipe(tsFilter.restore)
|
||||
.pipe(build ? nlsDev.createAdditionalLanguageFiles(languages, i18n, out) : es.through())
|
||||
.pipe(build ? nlsDev.createAdditionalLanguageFiles(languages, i18nPath, out) : es.through())
|
||||
.pipe(build ? nlsDev.bundleMetaDataFiles(headerId, headerOut) : es.through())
|
||||
.pipe(build ? nlsDev.bundleLanguageFiles() : es.through())
|
||||
.pipe(reporter.end(emitError));
|
||||
|
||||
return es.duplex(input, output);
|
||||
@@ -127,7 +143,7 @@ const tasks = compilations.map(function (tsconfigFile) {
|
||||
const watchInput = watcher(src, srcOpts);
|
||||
|
||||
return watchInput
|
||||
.pipe(util.incremental(() => pipeline(true), input))
|
||||
.pipe(util.incremental(() => pipeline(), input))
|
||||
.pipe(gulp.dest(out));
|
||||
});
|
||||
|
||||
|
||||
@@ -12,7 +12,11 @@ const gulptslint = require('gulp-tslint');
|
||||
const gulpeslint = require('gulp-eslint');
|
||||
const tsfmt = require('typescript-formatter');
|
||||
const tslint = require('tslint');
|
||||
const VinylFile = require('vinyl');
|
||||
const vfs = require('vinyl-fs');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const pall = require('p-all');
|
||||
|
||||
/**
|
||||
* Hygiene works by creating cascading subsets of all our files and
|
||||
@@ -29,53 +33,56 @@ const all = [
|
||||
'extensions/**/*',
|
||||
'scripts/**/*',
|
||||
'src/**/*',
|
||||
'test/**/*'
|
||||
];
|
||||
|
||||
const eolFilter = [
|
||||
'**',
|
||||
'!ThirdPartyNotices.txt',
|
||||
'!LICENSE.txt',
|
||||
'!extensions/**/out/**',
|
||||
'!**/node_modules/**',
|
||||
'!**/fixtures/**',
|
||||
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot}',
|
||||
'!build/{lib,tslintRules}/**/*.js',
|
||||
'!build/monaco/**',
|
||||
'!build/win32/**',
|
||||
'!build/**/*.sh',
|
||||
'!build/tfs/**/*.js',
|
||||
'!**/Dockerfile'
|
||||
'test/**/*',
|
||||
'!**/node_modules/**'
|
||||
];
|
||||
|
||||
const indentationFilter = [
|
||||
'**',
|
||||
|
||||
// except specific files
|
||||
'!ThirdPartyNotices.txt',
|
||||
'!**/*.md',
|
||||
'!**/*.ps1',
|
||||
'!**/*.template',
|
||||
'!**/*.yaml',
|
||||
'!**/*.yml',
|
||||
'!**/lib/**',
|
||||
'!extensions/**/*.d.ts',
|
||||
'!src/typings/**/*.d.ts',
|
||||
'!src/vs/*/**/*.d.ts',
|
||||
'!**/*.d.ts.recipe',
|
||||
'!LICENSE.txt',
|
||||
'!src/vs/nls.js',
|
||||
'!src/vs/css.js',
|
||||
'!src/vs/loader.js',
|
||||
'!src/vs/base/common/marked/raw.marked.js',
|
||||
'!src/vs/base/common/winjs.base.raw.js',
|
||||
'!src/vs/base/node/terminateProcess.sh',
|
||||
'!src/vs/base/node/ps-win.ps1',
|
||||
'!test/assert.js',
|
||||
|
||||
// except specific folders
|
||||
'!test/smoke/out/**',
|
||||
'!extensions/vscode-api-tests/testWorkspace/**',
|
||||
'!extensions/vscode-api-tests/testWorkspace2/**',
|
||||
'!build/monaco/**',
|
||||
'!build/win32/**',
|
||||
|
||||
// except multiple specific files
|
||||
'!**/package.json',
|
||||
'!**/npm-shrinkwrap.json',
|
||||
'!**/yarn.lock',
|
||||
|
||||
// except multiple specific folders
|
||||
'!**/octicons/**',
|
||||
'!**/vs/base/common/marked/raw.marked.js',
|
||||
'!**/vs/base/common/winjs.base.raw.js',
|
||||
'!**/vs/base/node/terminateProcess.sh',
|
||||
'!**/vs/nls.js',
|
||||
'!**/vs/css.js',
|
||||
'!**/vs/loader.js',
|
||||
'!**/fixtures/**',
|
||||
'!**/lib/**',
|
||||
'!extensions/**/out/**',
|
||||
'!extensions/**/snippets/**',
|
||||
'!extensions/**/syntaxes/**',
|
||||
'!extensions/**/themes/**',
|
||||
'!extensions/**/colorize-fixtures/**',
|
||||
'!extensions/vscode-api-tests/testWorkspace/**'
|
||||
|
||||
// except specific file types
|
||||
'!src/vs/*/**/*.d.ts',
|
||||
'!src/typings/**/*.d.ts',
|
||||
'!extensions/**/*.d.ts',
|
||||
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe}',
|
||||
'!build/{lib,tslintRules}/**/*.js',
|
||||
'!build/**/*.sh',
|
||||
'!build/tfs/**/*.js',
|
||||
'!**/Dockerfile',
|
||||
'!extensions/markdown/media/*.js'
|
||||
];
|
||||
|
||||
const copyrightFilter = [
|
||||
@@ -93,6 +100,7 @@ const copyrightFilter = [
|
||||
'!**/*.xpm',
|
||||
'!**/*.opts',
|
||||
'!**/*.disabled',
|
||||
'!**/*.code-workspace',
|
||||
'!build/**/*.init',
|
||||
'!resources/linux/snap/snapcraft.yaml',
|
||||
'!resources/win32/bin/code.js',
|
||||
@@ -122,7 +130,9 @@ const tslintFilter = [
|
||||
'!**/node_modules/**',
|
||||
'!extensions/typescript/test/colorize-fixtures/**',
|
||||
'!extensions/vscode-api-tests/testWorkspace/**',
|
||||
'!extensions/**/*.test.ts'
|
||||
'!extensions/vscode-api-tests/testWorkspace2/**',
|
||||
'!extensions/**/*.test.ts',
|
||||
'!extensions/html/server/lib/jquery.d.ts'
|
||||
];
|
||||
|
||||
const copyrightHeader = [
|
||||
@@ -132,17 +142,6 @@ const copyrightHeader = [
|
||||
' *--------------------------------------------------------------------------------------------*/'
|
||||
].join('\n');
|
||||
|
||||
function reportFailures(failures) {
|
||||
failures.forEach(failure => {
|
||||
const name = failure.name || failure.fileName;
|
||||
const position = failure.startPosition;
|
||||
const line = position.lineAndCharacter ? position.lineAndCharacter.line : position.line;
|
||||
const character = position.lineAndCharacter ? position.lineAndCharacter.character : position.character;
|
||||
|
||||
console.error(`${name}:${line + 1}:${character + 1}:${failure.failure}`);
|
||||
});
|
||||
}
|
||||
|
||||
gulp.task('eslint', () => {
|
||||
return vfs.src(all, { base: '.', follow: true, allowEmpty: true })
|
||||
.pipe(filter(eslintFilter))
|
||||
@@ -152,31 +151,23 @@ gulp.task('eslint', () => {
|
||||
});
|
||||
|
||||
gulp.task('tslint', () => {
|
||||
const options = { summarizeFailureOutput: true };
|
||||
// {{SQL CARBON EDIT}}
|
||||
const options = { emitError: false };
|
||||
|
||||
return vfs.src(all, { base: '.', follow: true, allowEmpty: true })
|
||||
.pipe(filter(tslintFilter))
|
||||
.pipe(gulptslint({ rulesDirectory: 'build/lib/tslint' }))
|
||||
.pipe(gulptslint.report(reportFailures, options));
|
||||
.pipe(gulptslint.default({ rulesDirectory: 'build/lib/tslint' }))
|
||||
.pipe(gulptslint.default.report(options));
|
||||
});
|
||||
|
||||
const hygiene = exports.hygiene = (some, options) => {
|
||||
options = options || {};
|
||||
function hygiene(some) {
|
||||
let errorCount = 0;
|
||||
|
||||
const eol = es.through(function (file) {
|
||||
if (/\r\n?/g.test(file.contents.toString('utf8'))) {
|
||||
console.error(file.relative + ': Bad EOL found');
|
||||
errorCount++;
|
||||
}
|
||||
|
||||
this.emit('data', file);
|
||||
});
|
||||
|
||||
const indentation = es.through(function (file) {
|
||||
file.contents
|
||||
.toString('utf8')
|
||||
.split(/\r\n|\r|\n/)
|
||||
const lines = file.contents.toString('utf8').split(/\r\n|\r|\n/);
|
||||
file.__lines = lines;
|
||||
|
||||
lines
|
||||
.forEach((line, i) => {
|
||||
if (/^\s*$/.test(line)) {
|
||||
// empty or whitespace lines are OK
|
||||
@@ -194,9 +185,14 @@ const hygiene = exports.hygiene = (some, options) => {
|
||||
});
|
||||
|
||||
const copyrights = es.through(function (file) {
|
||||
if (file.contents.toString('utf8').indexOf(copyrightHeader) !== 0) {
|
||||
console.error(file.relative + ': Missing or bad copyright statement');
|
||||
errorCount++;
|
||||
const lines = file.__lines;
|
||||
|
||||
for (let i = 0; i < copyrightHeaderLines.length; i++) {
|
||||
if (lines[i] !== copyrightHeaderLines[i]) {
|
||||
console.error(file.relative + ': Missing or bad copyright statement');
|
||||
errorCount++;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
this.emit('data', file);
|
||||
@@ -204,12 +200,20 @@ const hygiene = exports.hygiene = (some, options) => {
|
||||
|
||||
const formatting = es.map(function (file, cb) {
|
||||
tsfmt.processString(file.path, file.contents.toString('utf8'), {
|
||||
verify: true,
|
||||
verify: false,
|
||||
tsfmt: true,
|
||||
// verbose: true
|
||||
// keep checkJS happy
|
||||
editorconfig: undefined,
|
||||
replace: undefined,
|
||||
tsconfig: undefined,
|
||||
tslint: undefined
|
||||
}).then(result => {
|
||||
if (result.error) {
|
||||
console.error(result.message);
|
||||
let original = result.src.replace(/\r\n/gm, '\n');
|
||||
let formatted = result.dest.replace(/\r\n/gm, '\n');
|
||||
|
||||
if (original !== formatted) {
|
||||
console.error('File not formatted:', file.relative);
|
||||
errorCount++;
|
||||
}
|
||||
cb(null, file);
|
||||
@@ -219,32 +223,31 @@ const hygiene = exports.hygiene = (some, options) => {
|
||||
});
|
||||
});
|
||||
|
||||
const tslintConfiguration = tslint.Configuration.findConfiguration('tslint.json', '.');
|
||||
const tslintOptions = { fix: false, formatter: 'json' };
|
||||
const tsLinter = new tslint.Linter(tslintOptions);
|
||||
|
||||
const tsl = es.through(function (file) {
|
||||
const configuration = tslint.Configuration.findConfiguration(null, '.');
|
||||
const options = { formatter: 'json', rulesDirectory: 'build/lib/tslint' };
|
||||
const contents = file.contents.toString('utf8');
|
||||
const linter = new tslint.Linter(options);
|
||||
linter.lint(file.relative, contents, configuration.results);
|
||||
const result = linter.getResult();
|
||||
|
||||
if (result.failureCount > 0) {
|
||||
reportFailures(result.failures);
|
||||
errorCount += result.failureCount;
|
||||
}
|
||||
|
||||
tsLinter.lint(file.relative, contents, tslintConfiguration.results);
|
||||
this.emit('data', file);
|
||||
});
|
||||
|
||||
const result = vfs.src(some || all, { base: '.', follow: true, allowEmpty: true })
|
||||
let input;
|
||||
|
||||
if (Array.isArray(some) || typeof some === 'string' || !some) {
|
||||
input = vfs.src(some || all, { base: '.', follow: true, allowEmpty: true });
|
||||
} else {
|
||||
input = some;
|
||||
}
|
||||
|
||||
const result = input
|
||||
.pipe(filter(f => !f.stat.isDirectory()))
|
||||
.pipe(filter(eolFilter))
|
||||
// {{SQL CARBON EDIT}}
|
||||
//.pipe(options.skipEOL ? es.through() : eol)
|
||||
.pipe(filter(indentationFilter))
|
||||
.pipe(indentation)
|
||||
.pipe(filter(copyrightFilter))
|
||||
.pipe(filter(copyrightFilter));
|
||||
// {{SQL CARBON EDIT}}
|
||||
//.pipe(copyrights);
|
||||
// .pipe(copyrights);
|
||||
|
||||
const typescript = result
|
||||
.pipe(filter(tslintFilter))
|
||||
@@ -255,22 +258,51 @@ const hygiene = exports.hygiene = (some, options) => {
|
||||
.pipe(filter(eslintFilter))
|
||||
.pipe(gulpeslint('src/.eslintrc'))
|
||||
.pipe(gulpeslint.formatEach('compact'));
|
||||
// {{SQL CARBON EDIT}}
|
||||
// {{SQL CARBON EDIT}}
|
||||
// .pipe(gulpeslint.failAfterError());
|
||||
|
||||
let count = 0;
|
||||
return es.merge(typescript, javascript)
|
||||
.pipe(es.through(null, function () {
|
||||
// {{SQL CARBON EDIT}}
|
||||
// if (errorCount > 0) {
|
||||
// this.emit('error', 'Hygiene failed with ' + errorCount + ' errors. Check \'build/gulpfile.hygiene.js\'.');
|
||||
// } else {
|
||||
// this.emit('end');
|
||||
// }
|
||||
this.emit('end');
|
||||
.pipe(es.through(function (data) {
|
||||
// {{SQL CARBON EDIT}}
|
||||
this.emit('end');
|
||||
}));
|
||||
};
|
||||
}
|
||||
|
||||
gulp.task('hygiene', () => hygiene(''));
|
||||
function createGitIndexVinyls(paths) {
|
||||
const cp = require('child_process');
|
||||
const repositoryPath = process.cwd();
|
||||
|
||||
const fns = paths.map(relativePath => () => new Promise((c, e) => {
|
||||
const fullPath = path.join(repositoryPath, relativePath);
|
||||
|
||||
fs.stat(fullPath, (err, stat) => {
|
||||
if (err && err.code === 'ENOENT') { // ignore deletions
|
||||
return c(null);
|
||||
} else if (err) {
|
||||
return e(err);
|
||||
}
|
||||
|
||||
cp.exec(`git show :${relativePath}`, { maxBuffer: 2000 * 1024, encoding: 'buffer' }, (err, out) => {
|
||||
if (err) {
|
||||
return e(err);
|
||||
}
|
||||
|
||||
c(new VinylFile({
|
||||
path: fullPath,
|
||||
base: repositoryPath,
|
||||
contents: out,
|
||||
stat
|
||||
}));
|
||||
});
|
||||
});
|
||||
}));
|
||||
|
||||
return pall(fns, { concurrency: 4 })
|
||||
.then(r => r.filter(p => !!p));
|
||||
}
|
||||
|
||||
gulp.task('hygiene', () => hygiene());
|
||||
|
||||
// this allows us to run hygiene as a git pre-commit hook
|
||||
if (require.main === module) {
|
||||
@@ -281,33 +313,38 @@ if (require.main === module) {
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
cp.exec('git config core.autocrlf', (err, out) => {
|
||||
const skipEOL = out.trim() === 'true';
|
||||
|
||||
if (process.argv.length > 2) {
|
||||
return hygiene(process.argv.slice(2), { skipEOL: skipEOL }).on('error', err => {
|
||||
console.error();
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
if (process.argv.length > 2) {
|
||||
hygiene(process.argv.slice(2)).on('error', err => {
|
||||
console.error();
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
} else {
|
||||
cp.exec('git diff --cached --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
|
||||
if (err) {
|
||||
console.error();
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
return;
|
||||
}
|
||||
|
||||
const some = out
|
||||
.split(/\r?\n/)
|
||||
.filter(l => !!l);
|
||||
|
||||
hygiene(some, { skipEOL: skipEOL }).on('error', err => {
|
||||
console.error();
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
if (some.length > 0) {
|
||||
console.log('Reading git index versions...');
|
||||
|
||||
createGitIndexVinyls(some)
|
||||
.then(vinyls => new Promise((c, e) => hygiene(es.readArray(vinyls))
|
||||
.on('end', () => c())
|
||||
.on('error', e)))
|
||||
.catch(err => {
|
||||
console.error();
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,13 +14,18 @@ const util = require('./lib/util');
|
||||
const remote = require('gulp-remote-src');
|
||||
const zip = require('gulp-vinyl-zip');
|
||||
const assign = require('object-assign');
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const jeditor = require('gulp-json-editor');
|
||||
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const pkg = require('../package.json');
|
||||
|
||||
gulp.task('mixin', function () {
|
||||
const repo = process.env['VSCODE_MIXIN_REPO'];
|
||||
|
||||
if (!repo) {
|
||||
console.log('Missing VSCODE_MIXIN_REPO, skipping mixin');
|
||||
// {{SQL CARBON EDIT}}
|
||||
const updateUrl = process.env['SQLOPS_UPDATEURL'];
|
||||
if (!updateUrl) {
|
||||
console.log('Missing SQLOPS_UPDATEURL, skipping mixin');
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -31,39 +36,20 @@ gulp.task('mixin', function () {
|
||||
return;
|
||||
}
|
||||
|
||||
const url = `https://github.com/${repo}/archive/${pkg.distro}.zip`;
|
||||
const opts = { base: url };
|
||||
const username = process.env['VSCODE_MIXIN_USERNAME'];
|
||||
const password = process.env['VSCODE_MIXIN_PASSWORD'];
|
||||
|
||||
if (username || password) {
|
||||
opts.auth = { user: username || '', pass: password || '' };
|
||||
// {{SQL CARBON EDIT}}
|
||||
let serviceUrl = 'https://raw.githubusercontent.com/Microsoft/sqlopsstudio/release/extensions/extensionsGallery.json';
|
||||
if (quality === 'insider') {
|
||||
serviceUrl = `https://raw.githubusercontent.com/Microsoft/sqlopsstudio/release/extensions/extensionsGallery-${quality}.json`;
|
||||
}
|
||||
let newValues = {
|
||||
"updateUrl": updateUrl,
|
||||
"quality": quality,
|
||||
"extensionsGallery": {
|
||||
"serviceUrl": serviceUrl
|
||||
}
|
||||
};
|
||||
|
||||
console.log('Mixing in sources from \'' + url + '\':');
|
||||
|
||||
let all = remote('', opts)
|
||||
.pipe(zip.src())
|
||||
.pipe(filter(function (f) { return !f.isDirectory(); }))
|
||||
.pipe(util.rebase(1));
|
||||
|
||||
if (quality) {
|
||||
const productJsonFilter = filter('product.json', { restore: true });
|
||||
const mixin = all
|
||||
.pipe(filter(['quality/' + quality + '/**']))
|
||||
.pipe(util.rebase(2))
|
||||
.pipe(productJsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(json(o => assign({}, require('../product.json'), o)))
|
||||
.pipe(productJsonFilter.restore);
|
||||
|
||||
all = es.merge(mixin);
|
||||
}
|
||||
|
||||
return all
|
||||
.pipe(es.mapSync(function (f) {
|
||||
console.log(f.relative);
|
||||
return f;
|
||||
}))
|
||||
return gulp.src('./product.json')
|
||||
.pipe(jeditor(newValues))
|
||||
.pipe(gulp.dest('.'));
|
||||
});
|
||||
@@ -13,18 +13,6 @@ const filter = require('gulp-filter');
|
||||
|
||||
gulp.task('clean-mssql-extension', util.rimraf('extensions/mssql/node_modules'));
|
||||
gulp.task('clean-credentials-extension', util.rimraf('extensions/credentials/node_modules'));
|
||||
gulp.task('clean-client', util.rimraf('dataprotocol-node/client/node_modules'));
|
||||
gulp.task('clean-jsonrpc', util.rimraf('dataprotocol-node/jsonrpc/node_modules'));
|
||||
gulp.task('clean-server', util.rimraf('dataprotocol-node/server/node_modules'));
|
||||
gulp.task('clean-types', util.rimraf('dataprotocol-node/types/node_modules'));
|
||||
gulp.task('clean-extensions-modules', util.rimraf('extensions-modules/node_modules'));
|
||||
gulp.task('clean-protocol', ['clean-extensions-modules', 'clean-mssql-extension', 'clean-credentials-extension', 'clean-client', 'clean-jsonrpc', 'clean-server', 'clean-types']);
|
||||
|
||||
// Tasks to clean extensions modules
|
||||
gulp.task('clean-mssql-ext-mod', util.rimraf('extensions/mssql/node_modules/extensions-modules'));
|
||||
gulp.task('clean-credentials-ext-mod', util.rimraf('extensions/credentials/node_modules/extensions-modules'));
|
||||
gulp.task('clean-build-ext-mod', util.rimraf('build/node_modules/extensions-modules'));
|
||||
gulp.task('clean-ext-mod', ['clean-mssql-ext-mod', 'clean-credentials-ext-mod', 'clean-build-ext-mod', 'clean-extensions-modules']);
|
||||
|
||||
gulp.task('fmt', () => formatStagedFiles());
|
||||
const formatFiles = (some) => {
|
||||
|
||||
@@ -27,20 +27,26 @@ const common = require('./lib/optimize');
|
||||
const nlsDev = require('vscode-nls-dev');
|
||||
const root = path.dirname(__dirname);
|
||||
const commit = util.getVersion(root);
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const packageJson = require('../package.json');
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const product = require('../product.json');
|
||||
const shrinkwrap = require('../npm-shrinkwrap.json');
|
||||
const crypto = require('crypto');
|
||||
const i18n = require('./lib/i18n');
|
||||
var del = require('del');
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const serviceInstaller = require('extensions-modules/lib/languageservice/serviceInstallerUtil');
|
||||
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
|
||||
const platformInfo = require('service-downloader/out/platform').PlatformInformation;
|
||||
const glob = require('glob');
|
||||
const deps = require('./dependencies');
|
||||
const getElectronVersion = require('./lib/electron').getElectronVersion;
|
||||
const createAsar = require('./lib/asar').createAsar;
|
||||
|
||||
const productDependencies = Object.keys(product.dependencies || {});
|
||||
const dependencies = Object.keys(shrinkwrap.dependencies)
|
||||
.concat(productDependencies); // additional dependencies from our product configuration
|
||||
const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname));
|
||||
// @ts-ignore
|
||||
// {{SQL CARBON EDIT}}
|
||||
var del = require('del');
|
||||
const extensionsRoot = path.join(root, 'extensions');
|
||||
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
|
||||
const baseModules = Object.keys(process.binding('natives')).filter(n => !/^_|\//.test(n));
|
||||
// {{SQL CARBON EDIT}}
|
||||
const nodeModules = [
|
||||
@@ -50,19 +56,28 @@ const nodeModules = [
|
||||
'rxjs/Subject',
|
||||
'rxjs/Observer',
|
||||
'ng2-charts/ng2-charts']
|
||||
.concat(dependencies)
|
||||
.concat(Object.keys(product.dependencies || {}))
|
||||
.concat(_.uniq(productionDependencies.map(d => d.name)))
|
||||
.concat(baseModules);
|
||||
|
||||
// Build
|
||||
|
||||
const builtInExtensions = [
|
||||
{ name: 'ms-vscode.node-debug', version: '1.18.3' },
|
||||
{ name: 'ms-vscode.node-debug2', version: '1.18.5' }
|
||||
];
|
||||
// Build
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const builtInExtensions = require('./builtInExtensions.json');
|
||||
|
||||
const excludedExtensions = [
|
||||
'vscode-api-tests',
|
||||
'vscode-colorize-tests'
|
||||
'vscode-colorize-tests',
|
||||
'ms-vscode.node-debug',
|
||||
'ms-vscode.node-debug2',
|
||||
];
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const vsce = require('vsce');
|
||||
const sqlBuiltInExtensions = [
|
||||
// Add SQL built-in extensions here.
|
||||
// the extension will be excluded from SQLOps package and will have separate vsix packages
|
||||
'agent'
|
||||
];
|
||||
|
||||
const vscodeEntryPoints = _.flatten([
|
||||
@@ -79,8 +94,8 @@ const vscodeResources = [
|
||||
'out-build/bootstrap-amd.js',
|
||||
'out-build/paths.js',
|
||||
'out-build/vs/**/*.{svg,png,cur,html}',
|
||||
'out-build/vs/base/node/startupTimers.js',
|
||||
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh}',
|
||||
'out-build/vs/base/common/performance.js',
|
||||
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,ps-win.ps1}',
|
||||
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
|
||||
'out-build/vs/workbench/browser/media/*-theme.css',
|
||||
'out-build/vs/workbench/electron-browser/bootstrap/**',
|
||||
@@ -93,8 +108,9 @@ const vscodeResources = [
|
||||
'out-build/vs/workbench/parts/welcome/walkThrough/**/*.md',
|
||||
'out-build/vs/workbench/services/files/**/*.exe',
|
||||
'out-build/vs/workbench/services/files/**/*.md',
|
||||
'out-build/vs/code/electron-browser/sharedProcess.js',
|
||||
// {{SQL CARBON EDIT}}
|
||||
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
|
||||
'out-build/vs/code/electron-browser/issue/issueReporter.js',
|
||||
// {{SQL CARBON EDIT}}
|
||||
'out-build/sql/workbench/electron-browser/splashscreen/*',
|
||||
'out-build/sql/**/*.{svg,png,cur,html}',
|
||||
'out-build/sql/base/browser/ui/table/media/*.{gif,png,svg}',
|
||||
@@ -111,6 +127,7 @@ const vscodeResources = [
|
||||
'out-build/sql/parts/grid/views/**/*.html',
|
||||
'out-build/sql/parts/tasks/**/*.html',
|
||||
'out-build/sql/parts/taskHistory/viewlet/media/**',
|
||||
'out-build/sql/parts/jobManagement/common/media/*.svg',
|
||||
'out-build/sql/media/objectTypes/*.svg',
|
||||
'out-build/sql/media/icons/*.svg',
|
||||
'!**/test/**'
|
||||
@@ -122,10 +139,7 @@ const BUNDLED_FILE_HEADER = [
|
||||
' *--------------------------------------------------------*/'
|
||||
].join('\n');
|
||||
|
||||
var languages = ['chs', 'cht', 'jpn', 'kor', 'deu', 'fra', 'esn', 'rus', 'ita'];
|
||||
if (process.env.VSCODE_QUALITY !== 'stable') {
|
||||
languages = languages.concat(['ptb', 'hun', 'trk']); // Add languages requested by the community to non-stable builds
|
||||
}
|
||||
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
|
||||
|
||||
gulp.task('clean-optimized-vscode', util.rimraf('out-vscode'));
|
||||
gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compile-extensions-build'], common.optimizeTask({
|
||||
@@ -135,7 +149,8 @@ gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compil
|
||||
loaderConfig: common.loaderConfig(nodeModules),
|
||||
header: BUNDLED_FILE_HEADER,
|
||||
out: 'out-vscode',
|
||||
languages: languages
|
||||
languages: languages,
|
||||
bundleInfo: undefined
|
||||
}));
|
||||
|
||||
|
||||
@@ -154,7 +169,7 @@ gulp.task('minify-vscode', ['clean-minified-vscode', 'optimize-index-js'], commo
|
||||
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
|
||||
|
||||
const config = {
|
||||
version: packageJson.electronVersion,
|
||||
version: getElectronVersion(),
|
||||
productAppName: product.nameLong,
|
||||
companyName: 'Microsoft Corporation',
|
||||
copyright: 'Copyright (C) 2018 Microsoft. All rights reserved',
|
||||
@@ -176,7 +191,7 @@ const config = {
|
||||
name: product.nameLong,
|
||||
urlSchemes: [product.urlProtocol]
|
||||
}],
|
||||
darwinCredits: darwinCreditsTemplate ? new Buffer(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : void 0,
|
||||
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : void 0,
|
||||
linuxExecutableName: product.applicationName,
|
||||
winIcon: 'resources/win32/code.ico',
|
||||
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || void 0,
|
||||
@@ -240,10 +255,30 @@ function computeChecksum(filename) {
|
||||
return hash;
|
||||
}
|
||||
|
||||
function packageBuiltInExtensions() {
|
||||
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
|
||||
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
|
||||
const packagePath = path.join(path.dirname(root), element.name + '.vsix');
|
||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
||||
vsce.createVSIX({
|
||||
cwd: element.path,
|
||||
packagePath: packagePath
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function packageTask(platform, arch, opts) {
|
||||
opts = opts || {};
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
// {{SQL CARBON EDIT}}
|
||||
const destination = path.join(path.dirname(root), 'sqlops') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
||||
platform = platform || process.platform;
|
||||
|
||||
@@ -269,7 +304,10 @@ function packageTask(platform, arch, opts) {
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name));
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
// {{SQL CARBON EDIT}}
|
||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1);
|
||||
packageBuiltInExtensions();
|
||||
|
||||
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
||||
const nlsFilter = filter('**/*.nls.json', { restore: true });
|
||||
@@ -283,15 +321,20 @@ function packageTask(platform, arch, opts) {
|
||||
.pipe(nlsFilter.restore);
|
||||
}));
|
||||
|
||||
const localExtensionDependencies = gulp.src('extensions/node_modules/**', { base: '.' });
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const extensionDepsSrc = [
|
||||
..._.flatten(extensionsProductionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
|
||||
];
|
||||
|
||||
const localExtensionDependencies = gulp.src(extensionDepsSrc, { base: '.', dot: true })
|
||||
.pipe(filter(['**', '!**/package-lock.json']))
|
||||
.pipe(util.cleanNodeModule('account-provider-azure', ['node_modules/date-utils/doc/**', 'node_modules/adal_node/node_modules/**'], undefined))
|
||||
.pipe(util.cleanNodeModule('typescript', ['**/**'], undefined));
|
||||
|
||||
|
||||
const sources = es.merge(src, localExtensions, localExtensionDependencies)
|
||||
.pipe(util.setExecutableBit(['**/*.sh']))
|
||||
.pipe(filter(['**',
|
||||
'!**/*.js.map',
|
||||
'!extensions/**/node_modules/**/{test, tests}/**',
|
||||
'!extensions/**/node_modules/**/test.js']));
|
||||
.pipe(filter(['**', '!**/*.js.map']));
|
||||
|
||||
let version = packageJson.version;
|
||||
const quality = product.quality;
|
||||
@@ -315,11 +358,13 @@ function packageTask(platform, arch, opts) {
|
||||
|
||||
// TODO the API should be copied to `out` during compile, not here
|
||||
const api = gulp.src('src/vs/vscode.d.ts').pipe(rename('out/vs/vscode.d.ts'));
|
||||
// {{SQL CARBON EDIT}}
|
||||
// {{SQL CARBON EDIT}}
|
||||
const dataApi = gulp.src('src/vs/data.d.ts').pipe(rename('out/sql/data.d.ts'));
|
||||
|
||||
const depsSrc = _.flatten(dependencies
|
||||
.map(function (d) { return ['node_modules/' + d + '/**', '!node_modules/' + d + '/**/{test,tests}/**']; }));
|
||||
const depsSrc = [
|
||||
..._.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
|
||||
..._.flatten(Object.keys(product.dependencies || {}).map(d => [`node_modules/${d}/**`, `!node_modules/${d}/**/{test,tests}/**`]))
|
||||
];
|
||||
|
||||
const deps = gulp.src(depsSrc, { base: '.', dot: true })
|
||||
.pipe(filter(['**', '!**/package-lock.json']))
|
||||
@@ -327,16 +372,35 @@ function packageTask(platform, arch, opts) {
|
||||
.pipe(util.cleanNodeModule('oniguruma', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/*.js']))
|
||||
.pipe(util.cleanNodeModule('windows-mutex', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('native-keymap', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('native-is-elevated', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('native-watchdog', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('spdlog', ['binding.gyp', 'build/**', 'deps/**', 'src/**', 'test/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('jschardet', ['dist/**']))
|
||||
.pipe(util.cleanNodeModule('windows-foreground-love', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('windows-process-tree', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('gc-signals', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/index.js']))
|
||||
.pipe(util.cleanNodeModule('v8-profiler', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/index.js']))
|
||||
.pipe(util.cleanNodeModule('keytar', ['binding.gyp', 'build/**', 'src/**', 'script/**', 'node_modules/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/**']))
|
||||
.pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/*.exe', 'build/Release/*.dll', 'build/Release/*.node']))
|
||||
// {{SQL CARBON EDIT}}
|
||||
.pipe(util.cleanNodeModule('chart.js', ['node_modules/**'], undefined))
|
||||
.pipe(util.cleanNodeModule('emmet', ['node_modules/**'], undefined))
|
||||
.pipe(util.cleanNodeModule('pty.js', ['build/**'], ['build/Release/**']))
|
||||
.pipe(util.cleanNodeModule('jquery-ui', ['external/**', 'demos/**'], undefined))
|
||||
.pipe(util.cleanNodeModule('core-js', ['**/**'], undefined))
|
||||
.pipe(util.cleanNodeModule('slickgrid', ['node_modules/**', 'examples/**'], undefined))
|
||||
.pipe(util.cleanNodeModule('nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
|
||||
.pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node']));
|
||||
.pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node']))
|
||||
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*'], 'app/node_modules.asar'));
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
let copiedModules = gulp.src([
|
||||
'node_modules/jquery/**/*.*',
|
||||
'node_modules/reflect-metadata/**/*.*',
|
||||
'node_modules/slickgrid/**/*.*',
|
||||
'node_modules/underscore/**/*.*',
|
||||
'node_modules/zone.js/**/*.*',
|
||||
'node_modules/chart.js/**/*.*'
|
||||
], { base: '.', dot: true });
|
||||
|
||||
let all = es.merge(
|
||||
packageJsonStream,
|
||||
@@ -344,7 +408,8 @@ function packageTask(platform, arch, opts) {
|
||||
license,
|
||||
watermark,
|
||||
api,
|
||||
// {{SQL CARBON EDIT}}
|
||||
// {{SQL CARBON EDIT}}
|
||||
copiedModules,
|
||||
dataApi,
|
||||
sources,
|
||||
deps
|
||||
@@ -415,25 +480,21 @@ gulp.task('vscode-linux-x64-min', ['minify-vscode', 'clean-vscode-linux-x64'], p
|
||||
gulp.task('vscode-linux-arm-min', ['minify-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm', { minified: true }));
|
||||
|
||||
// Transifex Localizations
|
||||
const vscodeLanguages = [
|
||||
'zh-hans',
|
||||
'zh-hant',
|
||||
'ja',
|
||||
'ko',
|
||||
'de',
|
||||
'fr',
|
||||
'es',
|
||||
'ru',
|
||||
'it',
|
||||
'pt-br',
|
||||
'hu',
|
||||
'tr'
|
||||
];
|
||||
const setupDefaultLanguages = [
|
||||
'zh-hans',
|
||||
'zh-hant',
|
||||
'ko'
|
||||
];
|
||||
|
||||
const innoSetupConfig = {
|
||||
'zh-cn': { codePage: 'CP936', defaultInfo: { name: 'Simplified Chinese', id: '$0804', } },
|
||||
'zh-tw': { codePage: 'CP950', defaultInfo: { name: 'Traditional Chinese', id: '$0404' } },
|
||||
'ko': { codePage: 'CP949', defaultInfo: { name: 'Korean', id: '$0412' } },
|
||||
'ja': { codePage: 'CP932' },
|
||||
'de': { codePage: 'CP1252' },
|
||||
'fr': { codePage: 'CP1252' },
|
||||
'es': { codePage: 'CP1252' },
|
||||
'ru': { codePage: 'CP1251' },
|
||||
'it': { codePage: 'CP1252' },
|
||||
'pt-br': { codePage: 'CP1252' },
|
||||
'hu': { codePage: 'CP1250' },
|
||||
'tr': { codePage: 'CP1254' }
|
||||
};
|
||||
|
||||
const apiHostname = process.env.TRANSIFEX_API_URL;
|
||||
const apiName = process.env.TRANSIFEX_API_NAME;
|
||||
@@ -441,27 +502,48 @@ const apiToken = process.env.TRANSIFEX_API_TOKEN;
|
||||
|
||||
gulp.task('vscode-translations-push', ['optimize-vscode'], function () {
|
||||
const pathToMetadata = './out-vscode/nls.metadata.json';
|
||||
const pathToExtensions = './extensions/**/*.nls.json';
|
||||
const pathToExtensions = './extensions/*';
|
||||
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
|
||||
|
||||
return es.merge(
|
||||
gulp.src(pathToMetadata).pipe(i18n.prepareXlfFiles()),
|
||||
gulp.src(pathToSetup).pipe(i18n.prepareXlfFiles()),
|
||||
gulp.src(pathToExtensions).pipe(i18n.prepareXlfFiles('vscode-extensions'))
|
||||
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
|
||||
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
|
||||
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
|
||||
).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
|
||||
).pipe(i18n.pushXlfFiles(apiHostname, apiName, apiToken));
|
||||
});
|
||||
|
||||
gulp.task('vscode-translations-pull', function () {
|
||||
gulp.task('vscode-translations-push-test', ['optimize-vscode'], function () {
|
||||
const pathToMetadata = './out-vscode/nls.metadata.json';
|
||||
const pathToExtensions = './extensions/*';
|
||||
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
|
||||
|
||||
return es.merge(
|
||||
i18n.pullXlfFiles('vscode-editor', apiHostname, apiName, apiToken, vscodeLanguages),
|
||||
i18n.pullXlfFiles('vscode-workbench', apiHostname, apiName, apiToken, vscodeLanguages),
|
||||
i18n.pullXlfFiles('vscode-extensions', apiHostname, apiName, apiToken, vscodeLanguages),
|
||||
i18n.pullXlfFiles('vscode-setup', apiHostname, apiName, apiToken, setupDefaultLanguages)
|
||||
).pipe(vfs.dest('../vscode-localization'));
|
||||
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
|
||||
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
|
||||
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
|
||||
).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
|
||||
).pipe(vfs.dest('../vscode-transifex-input'));
|
||||
});
|
||||
|
||||
gulp.task('vscode-translations-pull', function () {
|
||||
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
|
||||
i18n.pullCoreAndExtensionsXlfFiles(apiHostname, apiName, apiToken, language).pipe(vfs.dest(`../vscode-localization/${language.id}/build`));
|
||||
|
||||
let includeDefault = !!innoSetupConfig[language.id].defaultInfo;
|
||||
i18n.pullSetupXlfFiles(apiHostname, apiName, apiToken, language, includeDefault).pipe(vfs.dest(`../vscode-localization/${language.id}/setup`));
|
||||
});
|
||||
});
|
||||
|
||||
gulp.task('vscode-translations-import', function () {
|
||||
return gulp.src('../vscode-localization/**/*.xlf').pipe(i18n.prepareJsonFiles()).pipe(vfs.dest('./i18n'));
|
||||
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
|
||||
gulp.src(`../vscode-localization/${language.id}/build/*/*.xlf`)
|
||||
.pipe(i18n.prepareI18nFiles())
|
||||
.pipe(vfs.dest(`./i18n/${language.folderName}`));
|
||||
gulp.src(`../vscode-localization/${language.id}/setup/*/*.xlf`)
|
||||
.pipe(i18n.prepareIslFiles(language, innoSetupConfig[language.id]))
|
||||
.pipe(vfs.dest(`./build/win32/i18n`));
|
||||
});
|
||||
});
|
||||
|
||||
// Sourcemaps
|
||||
@@ -487,7 +569,8 @@ gulp.task('upload-vscode-sourcemaps', ['minify-vscode'], () => {
|
||||
const allConfigDetailsPath = path.join(os.tmpdir(), 'configuration.json');
|
||||
gulp.task('upload-vscode-configuration', ['generate-vscode-configuration'], () => {
|
||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||
if (!branch.endsWith('/master') && !branch.indexOf('/release/') >= 0) {
|
||||
|
||||
if (!/\/master$/.test(branch) && branch.indexOf('/release/') < 0) {
|
||||
console.log(`Only runs on master and release branches, not ${branch}`);
|
||||
return;
|
||||
}
|
||||
@@ -582,6 +665,7 @@ function versionStringToNumber(versionStr) {
|
||||
return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10);
|
||||
}
|
||||
|
||||
// This task is only run for the MacOS build
|
||||
gulp.task('generate-vscode-configuration', () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
|
||||
@@ -591,7 +675,8 @@ gulp.task('generate-vscode-configuration', () => {
|
||||
|
||||
const userDataDir = path.join(os.tmpdir(), 'tmpuserdata');
|
||||
const extensionsDir = path.join(os.tmpdir(), 'tmpextdir');
|
||||
const appPath = path.join(buildDir, 'VSCode-darwin/Visual\\ Studio\\ Code\\ -\\ Insiders.app/Contents/Resources/app/bin/code');
|
||||
const appName = process.env.VSCODE_QUALITY === 'insider' ? 'Visual\\ Studio\\ Code\\ -\\ Insiders.app' : 'Visual\\ Studio\\ Code.app';
|
||||
const appPath = path.join(buildDir, `VSCode-darwin/${appName}/Contents/Resources/app/bin/code`);
|
||||
const codeProc = cp.exec(`${appPath} --export-default-configuration='${allConfigDetailsPath}' --wait --user-data-dir='${userDataDir}' --extensions-dir='${extensionsDir}'`);
|
||||
|
||||
const timer = setTimeout(() => {
|
||||
@@ -620,27 +705,25 @@ gulp.task('generate-vscode-configuration', () => {
|
||||
// {{SQL CARBON EDIT}}
|
||||
// Install service locally before building carbon
|
||||
|
||||
function installService(extObj, path) {
|
||||
var installer = new serviceInstaller.ServiceInstaller(extObj, path);
|
||||
installer.getServiceInstallDirectoryRoot().then(serviceInstallFolder => {
|
||||
console.log('Cleaning up the install folder: ' + serviceInstallFolder);
|
||||
del(serviceInstallFolder + '/*').then(() => {
|
||||
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
|
||||
installer.installService();
|
||||
}, delError => {
|
||||
console.log('failed to delete the install folder error: ' + delError);
|
||||
});
|
||||
}, getFolderPathError => {
|
||||
console.log('failed to call getServiceInstallDirectoryRoot error: ' + getFolderPathError);
|
||||
function installService() {
|
||||
let config = require('../extensions/mssql/src/config.json');
|
||||
return platformInfo.getCurrent().then(p => {
|
||||
let runtime = p.runtimeId;
|
||||
// fix path since it won't be correct
|
||||
config.installDirectory = path.join(__dirname, '../extensions/mssql/src', config.installDirectory);
|
||||
var installer = new serviceDownloader(config);
|
||||
let serviceInstallFolder = installer.getInstallDirectory(runtime);
|
||||
console.log('Cleaning up the install folder: ' + serviceInstallFolder);
|
||||
return del(serviceInstallFolder + '/*').then(() => {
|
||||
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
|
||||
return installer.installService(runtime);
|
||||
}, delError => {
|
||||
console.log('failed to delete the install folder error: ' + delError);
|
||||
});
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
gulp.task('install-sqltoolsservice', () => {
|
||||
var mssqlExt = require('../extensions/mssql/client/out/models/constants');
|
||||
var extObj = new mssqlExt.Constants();
|
||||
var path = '../extensions/mssql/client/out/config.json';
|
||||
return installService(extObj, path);
|
||||
return installService();
|
||||
});
|
||||
|
||||
|
||||
|
||||
@@ -12,9 +12,12 @@ const shell = require('gulp-shell');
|
||||
const es = require('event-stream');
|
||||
const vfs = require('vinyl-fs');
|
||||
const util = require('./lib/util');
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const packageJson = require('../package.json');
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const product = require('../product.json');
|
||||
const rpmDependencies = require('../resources/linux/rpm/dependencies');
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const rpmDependencies = require('../resources/linux/rpm/dependencies.json');
|
||||
|
||||
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
|
||||
|
||||
@@ -111,8 +114,7 @@ function buildDebPackage(arch) {
|
||||
return shell.task([
|
||||
'chmod 755 ' + product.applicationName + '-' + debArch + '/DEBIAN/postinst ' + product.applicationName + '-' + debArch + '/DEBIAN/prerm ' + product.applicationName + '-' + debArch + '/DEBIAN/postrm',
|
||||
'mkdir -p deb',
|
||||
'fakeroot dpkg-deb -b ' + product.applicationName + '-' + debArch + ' deb',
|
||||
'dpkg-scanpackages deb /dev/null > Packages'
|
||||
'fakeroot dpkg-deb -b ' + product.applicationName + '-' + debArch + ' deb'
|
||||
], { cwd: '.build/linux/deb/' + debArch });
|
||||
}
|
||||
|
||||
@@ -220,10 +222,10 @@ function prepareSnapPackage(arch) {
|
||||
|
||||
function buildSnapPackage(arch) {
|
||||
const snapBuildPath = getSnapBuildPath(arch);
|
||||
|
||||
const snapFilename = `${product.applicationName}-${packageJson.version}-${linuxPackageRevision}-${arch}.snap`;
|
||||
return shell.task([
|
||||
`chmod +x ${snapBuildPath}/electron-launch`,
|
||||
`cd ${snapBuildPath} && snapcraft snap`
|
||||
`cd ${snapBuildPath} && snapcraft snap --output ../${snapFilename}`
|
||||
]);
|
||||
}
|
||||
|
||||
|
||||
@@ -11,8 +11,11 @@ const assert = require('assert');
|
||||
const cp = require('child_process');
|
||||
const _7z = require('7zip')['7z'];
|
||||
const util = require('./lib/util');
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const pkg = require('../package.json');
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const product = require('../product.json');
|
||||
const vfs = require('vinyl-fs');
|
||||
|
||||
const repoPath = path.dirname(__dirname);
|
||||
// {{SQL CARBON EDIT}}
|
||||
@@ -91,3 +94,13 @@ gulp.task('vscode-win32-ia32-archive', ['clean-vscode-win32-ia32-archive'], arch
|
||||
|
||||
gulp.task('clean-vscode-win32-x64-archive', util.rimraf(zipDir('x64')));
|
||||
gulp.task('vscode-win32-x64-archive', ['clean-vscode-win32-x64-archive'], archiveWin32Setup('x64'));
|
||||
|
||||
function copyInnoUpdater(arch) {
|
||||
return () => {
|
||||
return gulp.src('build/win32/{inno_updater.exe,vcruntime140.dll}', { base: 'build/win32' })
|
||||
.pipe(vfs.dest(path.join(buildPath(arch), 'tools')));
|
||||
};
|
||||
}
|
||||
|
||||
gulp.task('vscode-win32-ia32-copy-inno-updater', copyInnoUpdater('ia32'));
|
||||
gulp.task('vscode-win32-x64-copy-inno-updater', copyInnoUpdater('x64'));
|
||||
118
build/lib/asar.js
Normal file
118
build/lib/asar.js
Normal file
@@ -0,0 +1,118 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var path = require("path");
|
||||
var es = require("event-stream");
|
||||
var pickle = require("chromium-pickle-js");
|
||||
var Filesystem = require("asar/lib/filesystem");
|
||||
var VinylFile = require("vinyl");
|
||||
var minimatch = require("minimatch");
|
||||
function createAsar(folderPath, unpackGlobs, destFilename) {
|
||||
var shouldUnpackFile = function (file) {
|
||||
for (var i = 0; i < unpackGlobs.length; i++) {
|
||||
if (minimatch(file.relative, unpackGlobs[i])) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
var filesystem = new Filesystem(folderPath);
|
||||
var out = [];
|
||||
// Keep track of pending inserts
|
||||
var pendingInserts = 0;
|
||||
var onFileInserted = function () { pendingInserts--; };
|
||||
// Do not insert twice the same directory
|
||||
var seenDir = {};
|
||||
var insertDirectoryRecursive = function (dir) {
|
||||
if (seenDir[dir]) {
|
||||
return;
|
||||
}
|
||||
var lastSlash = dir.lastIndexOf('/');
|
||||
if (lastSlash === -1) {
|
||||
lastSlash = dir.lastIndexOf('\\');
|
||||
}
|
||||
if (lastSlash !== -1) {
|
||||
insertDirectoryRecursive(dir.substring(0, lastSlash));
|
||||
}
|
||||
seenDir[dir] = true;
|
||||
filesystem.insertDirectory(dir);
|
||||
};
|
||||
var insertDirectoryForFile = function (file) {
|
||||
var lastSlash = file.lastIndexOf('/');
|
||||
if (lastSlash === -1) {
|
||||
lastSlash = file.lastIndexOf('\\');
|
||||
}
|
||||
if (lastSlash !== -1) {
|
||||
insertDirectoryRecursive(file.substring(0, lastSlash));
|
||||
}
|
||||
};
|
||||
var insertFile = function (relativePath, stat, shouldUnpack) {
|
||||
insertDirectoryForFile(relativePath);
|
||||
pendingInserts++;
|
||||
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
|
||||
};
|
||||
return es.through(function (file) {
|
||||
if (file.stat.isDirectory()) {
|
||||
return;
|
||||
}
|
||||
if (!file.stat.isFile()) {
|
||||
throw new Error("unknown item in stream!");
|
||||
}
|
||||
var shouldUnpack = shouldUnpackFile(file);
|
||||
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
|
||||
if (shouldUnpack) {
|
||||
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
|
||||
var relative = path.relative(folderPath, file.path);
|
||||
this.queue(new VinylFile({
|
||||
cwd: folderPath,
|
||||
base: folderPath,
|
||||
path: path.join(destFilename + '.unpacked', relative),
|
||||
stat: file.stat,
|
||||
contents: file.contents
|
||||
}));
|
||||
}
|
||||
else {
|
||||
// The file goes inside of xx.asar
|
||||
out.push(file.contents);
|
||||
}
|
||||
}, function () {
|
||||
var _this = this;
|
||||
var finish = function () {
|
||||
{
|
||||
var headerPickle = pickle.createEmpty();
|
||||
headerPickle.writeString(JSON.stringify(filesystem.header));
|
||||
var headerBuf = headerPickle.toBuffer();
|
||||
var sizePickle = pickle.createEmpty();
|
||||
sizePickle.writeUInt32(headerBuf.length);
|
||||
var sizeBuf = sizePickle.toBuffer();
|
||||
out.unshift(headerBuf);
|
||||
out.unshift(sizeBuf);
|
||||
}
|
||||
var contents = Buffer.concat(out);
|
||||
out.length = 0;
|
||||
_this.queue(new VinylFile({
|
||||
cwd: folderPath,
|
||||
base: folderPath,
|
||||
path: destFilename,
|
||||
contents: contents
|
||||
}));
|
||||
_this.queue(null);
|
||||
};
|
||||
// Call finish() only when all file inserts have finished...
|
||||
if (pendingInserts === 0) {
|
||||
finish();
|
||||
}
|
||||
else {
|
||||
onFileInserted = function () {
|
||||
pendingInserts--;
|
||||
if (pendingInserts === 0) {
|
||||
finish();
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
exports.createAsar = createAsar;
|
||||
131
build/lib/asar.ts
Normal file
131
build/lib/asar.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as path from 'path';
|
||||
import * as es from 'event-stream';
|
||||
import * as pickle from 'chromium-pickle-js';
|
||||
import * as Filesystem from 'asar/lib/filesystem';
|
||||
import * as VinylFile from 'vinyl';
|
||||
import * as minimatch from 'minimatch';
|
||||
|
||||
export function createAsar(folderPath: string, unpackGlobs: string[], destFilename: string): NodeJS.ReadWriteStream {
|
||||
|
||||
const shouldUnpackFile = (file: VinylFile): boolean => {
|
||||
for (let i = 0; i < unpackGlobs.length; i++) {
|
||||
if (minimatch(file.relative, unpackGlobs[i])) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
||||
const filesystem = new Filesystem(folderPath);
|
||||
const out: Buffer[] = [];
|
||||
|
||||
// Keep track of pending inserts
|
||||
let pendingInserts = 0;
|
||||
let onFileInserted = () => { pendingInserts--; };
|
||||
|
||||
// Do not insert twice the same directory
|
||||
const seenDir: { [key: string]: boolean; } = {};
|
||||
const insertDirectoryRecursive = (dir: string) => {
|
||||
if (seenDir[dir]) {
|
||||
return;
|
||||
}
|
||||
|
||||
let lastSlash = dir.lastIndexOf('/');
|
||||
if (lastSlash === -1) {
|
||||
lastSlash = dir.lastIndexOf('\\');
|
||||
}
|
||||
if (lastSlash !== -1) {
|
||||
insertDirectoryRecursive(dir.substring(0, lastSlash));
|
||||
}
|
||||
seenDir[dir] = true;
|
||||
filesystem.insertDirectory(dir);
|
||||
};
|
||||
|
||||
const insertDirectoryForFile = (file: string) => {
|
||||
let lastSlash = file.lastIndexOf('/');
|
||||
if (lastSlash === -1) {
|
||||
lastSlash = file.lastIndexOf('\\');
|
||||
}
|
||||
if (lastSlash !== -1) {
|
||||
insertDirectoryRecursive(file.substring(0, lastSlash));
|
||||
}
|
||||
};
|
||||
|
||||
const insertFile = (relativePath: string, stat: { size: number; mode: number; }, shouldUnpack: boolean) => {
|
||||
insertDirectoryForFile(relativePath);
|
||||
pendingInserts++;
|
||||
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
|
||||
};
|
||||
|
||||
return es.through(function (file) {
|
||||
if (file.stat.isDirectory()) {
|
||||
return;
|
||||
}
|
||||
if (!file.stat.isFile()) {
|
||||
throw new Error(`unknown item in stream!`);
|
||||
}
|
||||
const shouldUnpack = shouldUnpackFile(file);
|
||||
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
|
||||
|
||||
if (shouldUnpack) {
|
||||
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
|
||||
const relative = path.relative(folderPath, file.path);
|
||||
this.queue(new VinylFile({
|
||||
cwd: folderPath,
|
||||
base: folderPath,
|
||||
path: path.join(destFilename + '.unpacked', relative),
|
||||
stat: file.stat,
|
||||
contents: file.contents
|
||||
}));
|
||||
} else {
|
||||
// The file goes inside of xx.asar
|
||||
out.push(file.contents);
|
||||
}
|
||||
}, function () {
|
||||
|
||||
let finish = () => {
|
||||
{
|
||||
const headerPickle = pickle.createEmpty();
|
||||
headerPickle.writeString(JSON.stringify(filesystem.header));
|
||||
const headerBuf = headerPickle.toBuffer();
|
||||
|
||||
const sizePickle = pickle.createEmpty();
|
||||
sizePickle.writeUInt32(headerBuf.length);
|
||||
const sizeBuf = sizePickle.toBuffer();
|
||||
|
||||
out.unshift(headerBuf);
|
||||
out.unshift(sizeBuf);
|
||||
}
|
||||
|
||||
const contents = Buffer.concat(out);
|
||||
out.length = 0;
|
||||
|
||||
this.queue(new VinylFile({
|
||||
cwd: folderPath,
|
||||
base: folderPath,
|
||||
path: destFilename,
|
||||
contents: contents
|
||||
}));
|
||||
this.queue(null);
|
||||
};
|
||||
|
||||
// Call finish() only when all file inserts have finished...
|
||||
if (pendingInserts === 0) {
|
||||
finish();
|
||||
} else {
|
||||
onFileInserted = () => {
|
||||
pendingInserts--;
|
||||
if (pendingInserts === 0) {
|
||||
finish();
|
||||
}
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
122
build/lib/builtInExtensions.js
Normal file
122
build/lib/builtInExtensions.js
Normal file
@@ -0,0 +1,122 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const os = require('os');
|
||||
const mkdirp = require('mkdirp');
|
||||
const rimraf = require('rimraf');
|
||||
const es = require('event-stream');
|
||||
const rename = require('gulp-rename');
|
||||
const vfs = require('vinyl-fs');
|
||||
const ext = require('./extensions');
|
||||
const util = require('gulp-util');
|
||||
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
// @ts-ignore Microsoft/TypeScript#21262 complains about a require of a JSON file
|
||||
const builtInExtensions = require('../builtInExtensions.json');
|
||||
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
||||
|
||||
function getExtensionPath(extension) {
|
||||
return path.join(root, '.build', 'builtInExtensions', extension.name);
|
||||
}
|
||||
|
||||
function isUpToDate(extension) {
|
||||
const packagePath = path.join(getExtensionPath(extension), 'package.json');
|
||||
|
||||
if (!fs.existsSync(packagePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const packageContents = fs.readFileSync(packagePath, { encoding: 'utf8' });
|
||||
|
||||
try {
|
||||
const diskVersion = JSON.parse(packageContents).version;
|
||||
return (diskVersion === extension.version);
|
||||
} catch (err) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function syncMarketplaceExtension(extension) {
|
||||
if (isUpToDate(extension)) {
|
||||
util.log(util.colors.blue('[marketplace]'), `${extension.name}@${extension.version}`, util.colors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
|
||||
rimraf.sync(getExtensionPath(extension));
|
||||
|
||||
return ext.fromMarketplace(extension.name, extension.version)
|
||||
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
|
||||
.pipe(vfs.dest('.build/builtInExtensions'))
|
||||
.on('end', () => util.log(util.colors.blue('[marketplace]'), extension.name, util.colors.green('✔︎')));
|
||||
}
|
||||
|
||||
function syncExtension(extension, controlState) {
|
||||
switch (controlState) {
|
||||
case 'disabled':
|
||||
util.log(util.colors.blue('[disabled]'), util.colors.gray(extension.name));
|
||||
return es.readArray([]);
|
||||
|
||||
case 'marketplace':
|
||||
return syncMarketplaceExtension(extension);
|
||||
|
||||
default:
|
||||
if (!fs.existsSync(controlState)) {
|
||||
util.log(util.colors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
||||
return es.readArray([]);
|
||||
|
||||
} else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
|
||||
util.log(util.colors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
||||
return es.readArray([]);
|
||||
}
|
||||
|
||||
util.log(util.colors.blue('[local]'), `${extension.name}: ${util.colors.cyan(controlState)}`, util.colors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
}
|
||||
|
||||
function readControlFile() {
|
||||
try {
|
||||
return JSON.parse(fs.readFileSync(controlFilePath, 'utf8'));
|
||||
} catch (err) {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
|
||||
function writeControlFile(control) {
|
||||
mkdirp.sync(path.dirname(controlFilePath));
|
||||
fs.writeFileSync(controlFilePath, JSON.stringify(control, null, 2));
|
||||
}
|
||||
|
||||
function main() {
|
||||
util.log('Syncronizing built-in extensions...');
|
||||
util.log(`You can manage built-in extensions with the ${util.colors.cyan('--builtin')} flag`);
|
||||
|
||||
const control = readControlFile();
|
||||
const streams = [];
|
||||
|
||||
for (const extension of builtInExtensions) {
|
||||
let controlState = control[extension.name] || 'marketplace';
|
||||
control[extension.name] = controlState;
|
||||
|
||||
streams.push(syncExtension(extension, controlState));
|
||||
}
|
||||
|
||||
writeControlFile(control);
|
||||
|
||||
es.merge(streams)
|
||||
.on('error', err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
})
|
||||
.on('end', () => {
|
||||
process.exit(0);
|
||||
});
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -217,6 +217,7 @@ function removeDuplicateTSBoilerplate(destFiles) {
|
||||
{ start: /^var __metadata/, end: /^};$/ },
|
||||
{ start: /^var __param/, end: /^};$/ },
|
||||
{ start: /^var __awaiter/, end: /^};$/ },
|
||||
{ start: /^var __generator/, end: /^};$/ },
|
||||
];
|
||||
destFiles.forEach(function (destFile) {
|
||||
var SEEN_BOILERPLATE = [];
|
||||
|
||||
@@ -44,11 +44,11 @@ interface ILoaderPluginReqFunc {
|
||||
|
||||
export interface IEntryPoint {
|
||||
name: string;
|
||||
include: string[];
|
||||
exclude: string[];
|
||||
include?: string[];
|
||||
exclude?: string[];
|
||||
prepend: string[];
|
||||
append: string[];
|
||||
dest: string;
|
||||
append?: string[];
|
||||
dest?: string;
|
||||
}
|
||||
|
||||
interface IEntryPointMap {
|
||||
@@ -339,6 +339,7 @@ function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
|
||||
{ start: /^var __metadata/, end: /^};$/ },
|
||||
{ start: /^var __param/, end: /^};$/ },
|
||||
{ start: /^var __awaiter/, end: /^};$/ },
|
||||
{ start: /^var __generator/, end: /^};$/ },
|
||||
];
|
||||
|
||||
destFiles.forEach((destFile) => {
|
||||
|
||||
@@ -22,6 +22,9 @@ var rootDir = path.join(__dirname, '../../src');
|
||||
var options = require('../../src/tsconfig.json').compilerOptions;
|
||||
options.verbose = false;
|
||||
options.sourceMap = true;
|
||||
if (process.env['VSCODE_NO_SOURCEMAP']) {
|
||||
options.sourceMap = false;
|
||||
}
|
||||
options.rootDir = rootDir;
|
||||
options.sourceRoot = util.toFileUri(rootDir);
|
||||
function createCompile(build, emitError) {
|
||||
@@ -58,9 +61,13 @@ function compileTask(out, build) {
|
||||
return function () {
|
||||
var compile = createCompile(build, true);
|
||||
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
|
||||
return src
|
||||
.pipe(compile())
|
||||
.pipe(dtsFilter)
|
||||
.pipe(gulp.dest(out))
|
||||
.pipe(dtsFilter.restore)
|
||||
.pipe(monacodtsTask(out, false));
|
||||
};
|
||||
}
|
||||
@@ -70,54 +77,19 @@ function watchTask(out, build) {
|
||||
var compile = createCompile(build);
|
||||
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
|
||||
var watchSrc = watch('src/**', { base: 'src' });
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
|
||||
return watchSrc
|
||||
.pipe(util.incremental(compile, src, true))
|
||||
.pipe(dtsFilter)
|
||||
.pipe(gulp.dest(out))
|
||||
.pipe(dtsFilter.restore)
|
||||
.pipe(monacodtsTask(out, true));
|
||||
};
|
||||
}
|
||||
exports.watchTask = watchTask;
|
||||
function reloadTypeScriptNodeModule() {
|
||||
var util = require('gulp-util');
|
||||
function log(message) {
|
||||
var rest = [];
|
||||
for (var _i = 1; _i < arguments.length; _i++) {
|
||||
rest[_i - 1] = arguments[_i];
|
||||
}
|
||||
util.log.apply(util, [util.colors.cyan('[memory watch dog]'), message].concat(rest));
|
||||
}
|
||||
function heapUsed() {
|
||||
return (process.memoryUsage().heapUsed / 1024 / 1024).toFixed(2) + ' MB';
|
||||
}
|
||||
return es.through(function (data) {
|
||||
this.emit('data', data);
|
||||
}, function () {
|
||||
log('memory usage after compilation finished: ' + heapUsed());
|
||||
// It appears we are running into some variant of
|
||||
// https://bugs.chromium.org/p/v8/issues/detail?id=2073
|
||||
//
|
||||
// Even though all references are dropped, some
|
||||
// optimized methods in the TS compiler end up holding references
|
||||
// to the entire TypeScript language host (>600MB)
|
||||
//
|
||||
// The idea is to force v8 to drop references to these
|
||||
// optimized methods, by "reloading" the typescript node module
|
||||
log('Reloading typescript node module...');
|
||||
var resolvedName = require.resolve('typescript');
|
||||
var originalModule = require.cache[resolvedName];
|
||||
delete require.cache[resolvedName];
|
||||
var newExports = require('typescript');
|
||||
require.cache[resolvedName] = originalModule;
|
||||
for (var prop in newExports) {
|
||||
if (newExports.hasOwnProperty(prop)) {
|
||||
originalModule.exports[prop] = newExports[prop];
|
||||
}
|
||||
}
|
||||
log('typescript node module reloaded.');
|
||||
this.emit('end');
|
||||
});
|
||||
}
|
||||
function monacodtsTask(out, isWatch) {
|
||||
var basePath = path.resolve(process.cwd(), out);
|
||||
var neededFiles = {};
|
||||
monacodts.getFilesToWatch(out).forEach(function (filePath) {
|
||||
filePath = path.normalize(filePath);
|
||||
@@ -160,7 +132,7 @@ function monacodtsTask(out, isWatch) {
|
||||
}));
|
||||
}
|
||||
resultStream = es.through(function (data) {
|
||||
var filePath = path.normalize(data.path);
|
||||
var filePath = path.normalize(path.resolve(basePath, data.relative));
|
||||
if (neededFiles[filePath]) {
|
||||
setInputFile(filePath, data.contents.toString());
|
||||
}
|
||||
|
||||
@@ -25,6 +25,9 @@ const rootDir = path.join(__dirname, '../../src');
|
||||
const options = require('../../src/tsconfig.json').compilerOptions;
|
||||
options.verbose = false;
|
||||
options.sourceMap = true;
|
||||
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
||||
options.sourceMap = false;
|
||||
}
|
||||
options.rootDir = rootDir;
|
||||
options.sourceRoot = util.toFileUri(rootDir);
|
||||
|
||||
@@ -49,7 +52,6 @@ function createCompile(build: boolean, emitError?: boolean): (token?: util.ICanc
|
||||
.pipe(tsFilter)
|
||||
.pipe(util.loadSourcemaps())
|
||||
.pipe(ts(token))
|
||||
// .pipe(build ? reloadTypeScriptNodeModule() : es.through())
|
||||
.pipe(noDeclarationsFilter)
|
||||
.pipe(build ? nls() : es.through())
|
||||
.pipe(noDeclarationsFilter.restore)
|
||||
@@ -75,9 +77,14 @@ export function compileTask(out: string, build: boolean): () => NodeJS.ReadWrite
|
||||
gulp.src('node_modules/typescript/lib/lib.d.ts'),
|
||||
);
|
||||
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
|
||||
|
||||
return src
|
||||
.pipe(compile())
|
||||
.pipe(dtsFilter)
|
||||
.pipe(gulp.dest(out))
|
||||
.pipe(dtsFilter.restore)
|
||||
.pipe(monacodtsTask(out, false));
|
||||
};
|
||||
}
|
||||
@@ -93,62 +100,22 @@ export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteSt
|
||||
);
|
||||
const watchSrc = watch('src/**', { base: 'src' });
|
||||
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
|
||||
|
||||
return watchSrc
|
||||
.pipe(util.incremental(compile, src, true))
|
||||
.pipe(dtsFilter)
|
||||
.pipe(gulp.dest(out))
|
||||
.pipe(dtsFilter.restore)
|
||||
.pipe(monacodtsTask(out, true));
|
||||
};
|
||||
}
|
||||
|
||||
function reloadTypeScriptNodeModule(): NodeJS.ReadWriteStream {
|
||||
var util = require('gulp-util');
|
||||
function log(message: any, ...rest: any[]): void {
|
||||
util.log(util.colors.cyan('[memory watch dog]'), message, ...rest);
|
||||
}
|
||||
|
||||
function heapUsed(): string {
|
||||
return (process.memoryUsage().heapUsed / 1024 / 1024).toFixed(2) + ' MB';
|
||||
}
|
||||
|
||||
return es.through(function (data) {
|
||||
this.emit('data', data);
|
||||
}, function () {
|
||||
|
||||
log('memory usage after compilation finished: ' + heapUsed());
|
||||
|
||||
// It appears we are running into some variant of
|
||||
// https://bugs.chromium.org/p/v8/issues/detail?id=2073
|
||||
//
|
||||
// Even though all references are dropped, some
|
||||
// optimized methods in the TS compiler end up holding references
|
||||
// to the entire TypeScript language host (>600MB)
|
||||
//
|
||||
// The idea is to force v8 to drop references to these
|
||||
// optimized methods, by "reloading" the typescript node module
|
||||
|
||||
log('Reloading typescript node module...');
|
||||
|
||||
var resolvedName = require.resolve('typescript');
|
||||
|
||||
var originalModule = require.cache[resolvedName];
|
||||
delete require.cache[resolvedName];
|
||||
var newExports = require('typescript');
|
||||
require.cache[resolvedName] = originalModule;
|
||||
|
||||
for (var prop in newExports) {
|
||||
if (newExports.hasOwnProperty(prop)) {
|
||||
originalModule.exports[prop] = newExports[prop];
|
||||
}
|
||||
}
|
||||
|
||||
log('typescript node module reloaded.');
|
||||
|
||||
this.emit('end');
|
||||
});
|
||||
}
|
||||
|
||||
function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
|
||||
|
||||
const basePath = path.resolve(process.cwd(), out);
|
||||
|
||||
const neededFiles: { [file: string]: boolean; } = {};
|
||||
monacodts.getFilesToWatch(out).forEach(function (filePath) {
|
||||
filePath = path.normalize(filePath);
|
||||
@@ -196,7 +163,7 @@ function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
|
||||
}
|
||||
|
||||
resultStream = es.through(function (data) {
|
||||
const filePath = path.normalize(data.path);
|
||||
const filePath = path.normalize(path.resolve(basePath, data.relative));
|
||||
if (neededFiles[filePath]) {
|
||||
setInputFile(filePath, data.contents.toString());
|
||||
}
|
||||
|
||||
28
build/lib/electron.js
Normal file
28
build/lib/electron.js
Normal file
@@ -0,0 +1,28 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
|
||||
function getElectronVersion() {
|
||||
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
|
||||
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
|
||||
|
||||
return target;
|
||||
}
|
||||
|
||||
module.exports.getElectronVersion = getElectronVersion;
|
||||
|
||||
// returns 0 if the right version of electron is in .build/electron
|
||||
if (require.main === module) {
|
||||
const version = getElectronVersion();
|
||||
const versionFile = path.join(root, '.build', 'electron', 'version');
|
||||
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `v${version}`;
|
||||
|
||||
process.exit(isUpToDate ? 0 : 1);
|
||||
}
|
||||
@@ -20,7 +20,7 @@ var vsce = require("vsce");
|
||||
var File = require("vinyl");
|
||||
function fromLocal(extensionPath) {
|
||||
var result = es.through();
|
||||
vsce.listFiles({ cwd: extensionPath })
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
||||
.then(function (fileNames) {
|
||||
var files = fileNames
|
||||
.map(function (fileName) { return path.join(extensionPath, fileName); })
|
||||
|
||||
@@ -22,7 +22,7 @@ import * as File from 'vinyl';
|
||||
export function fromLocal(extensionPath: string): Stream {
|
||||
const result = es.through();
|
||||
|
||||
vsce.listFiles({ cwd: extensionPath })
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
||||
.then(fileNames => {
|
||||
const files = fileNames
|
||||
.map(fileName => path.join(extensionPath, fileName))
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -46,10 +46,6 @@
|
||||
"name": "vs/workbench/parts/execution",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/explorers",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/extensions",
|
||||
"project": "vscode-workbench"
|
||||
@@ -71,7 +67,11 @@
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/nps",
|
||||
"name": "vs/workbench/parts/localizations",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/logs",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
@@ -138,6 +138,10 @@
|
||||
"name": "vs/workbench/parts/welcome",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/actions",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/configuration",
|
||||
"project": "vscode-workbench"
|
||||
@@ -146,6 +150,10 @@
|
||||
"name": "vs/workbench/services/crashReporter",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/dialogs",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/editor",
|
||||
"project": "vscode-workbench"
|
||||
@@ -154,6 +162,10 @@
|
||||
"name": "vs/workbench/services/extensions",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/jsonschemas",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/files",
|
||||
"project": "vscode-workbench"
|
||||
@@ -162,10 +174,6 @@
|
||||
"name": "vs/workbench/services/keybinding",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/message",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/mode",
|
||||
"project": "vscode-workbench"
|
||||
@@ -193,10 +201,6 @@
|
||||
{
|
||||
"name": "vs/workbench/services/decorations",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "setup_messages",
|
||||
"project": "vscode-workbench"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
1040
build/lib/i18n.ts
1040
build/lib/i18n.ts
File diff suppressed because it is too large
Load Diff
@@ -79,7 +79,7 @@ function isImportNode(node) {
|
||||
function fileFrom(file, contents, path) {
|
||||
if (path === void 0) { path = file.path; }
|
||||
return new File({
|
||||
contents: new Buffer(contents),
|
||||
contents: Buffer.from(contents),
|
||||
base: file.base,
|
||||
cwd: file.cwd,
|
||||
path: path
|
||||
|
||||
@@ -131,7 +131,7 @@ module nls {
|
||||
|
||||
export function fileFrom(file: File, contents: string, path: string = file.path) {
|
||||
return new File({
|
||||
contents: new Buffer(contents),
|
||||
contents: Buffer.from(contents),
|
||||
base: file.base,
|
||||
cwd: file.cwd,
|
||||
path: path
|
||||
|
||||
@@ -59,7 +59,7 @@ function loader(bundledFileHeader, bundleLoader) {
|
||||
this.emit('data', new VinylFile({
|
||||
path: 'fake',
|
||||
base: '',
|
||||
contents: new Buffer(bundledFileHeader)
|
||||
contents: Buffer.from(bundledFileHeader)
|
||||
}));
|
||||
this.emit('data', data);
|
||||
}
|
||||
@@ -98,7 +98,7 @@ function toConcatStream(bundledFileHeader, sources, dest) {
|
||||
return new VinylFile({
|
||||
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
|
||||
base: base,
|
||||
contents: new Buffer(source.contents)
|
||||
contents: Buffer.from(source.contents)
|
||||
});
|
||||
});
|
||||
return es.readArray(treatedSources)
|
||||
@@ -141,7 +141,7 @@ function optimizeTask(opts) {
|
||||
bundleInfoArray.push(new VinylFile({
|
||||
path: 'bundleInfo.json',
|
||||
base: '.',
|
||||
contents: new Buffer(JSON.stringify(result.bundleData, null, '\t'))
|
||||
contents: Buffer.from(JSON.stringify(result.bundleData, null, '\t'))
|
||||
}));
|
||||
}
|
||||
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
|
||||
@@ -174,7 +174,6 @@ function optimizeTask(opts) {
|
||||
};
|
||||
}
|
||||
exports.optimizeTask = optimizeTask;
|
||||
;
|
||||
/**
|
||||
* Wrap around uglify and allow the preserveComments function
|
||||
* to have a file "context" to include our copyright only once per file.
|
||||
@@ -212,8 +211,7 @@ function uglifyWithCopyrights() {
|
||||
return stream.pipe(minify({
|
||||
output: {
|
||||
comments: preserveComments(f),
|
||||
// linux tfs build agent is crashing, does this help?§
|
||||
max_line_len: 3200000
|
||||
max_line_len: 1024
|
||||
}
|
||||
}));
|
||||
}));
|
||||
@@ -238,4 +236,3 @@ function minifyTask(src, sourceMapBaseUrl) {
|
||||
};
|
||||
}
|
||||
exports.minifyTask = minifyTask;
|
||||
;
|
||||
|
||||
@@ -31,7 +31,7 @@ function log(prefix: string, message: string): void {
|
||||
}
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
export function loaderConfig(emptyPaths: string[]) {
|
||||
export function loaderConfig(emptyPaths?: string[]) {
|
||||
const result = {
|
||||
paths: {
|
||||
'vs': 'out-build/vs',
|
||||
@@ -73,7 +73,7 @@ function loader(bundledFileHeader: string, bundleLoader: boolean): NodeJS.ReadWr
|
||||
this.emit('data', new VinylFile({
|
||||
path: 'fake',
|
||||
base: '',
|
||||
contents: new Buffer(bundledFileHeader)
|
||||
contents: Buffer.from(bundledFileHeader)
|
||||
}));
|
||||
this.emit('data', data);
|
||||
} else {
|
||||
@@ -117,7 +117,7 @@ function toConcatStream(bundledFileHeader: string, sources: bundle.IFile[], dest
|
||||
return new VinylFile({
|
||||
path: source.path ? root + '/' + source.path.replace(/\\/g, '/') : 'fake',
|
||||
base: base,
|
||||
contents: new Buffer(source.contents)
|
||||
contents: Buffer.from(source.contents)
|
||||
});
|
||||
});
|
||||
|
||||
@@ -165,7 +165,7 @@ export interface IOptimizeTaskOpts {
|
||||
/**
|
||||
* (languages to process)
|
||||
*/
|
||||
languages: string[];
|
||||
languages: i18n.Language[];
|
||||
}
|
||||
export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStream {
|
||||
const entryPoints = opts.entryPoints;
|
||||
@@ -201,7 +201,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
|
||||
bundleInfoArray.push(new VinylFile({
|
||||
path: 'bundleInfo.json',
|
||||
base: '.',
|
||||
contents: new Buffer(JSON.stringify(result.bundleData, null, '\t'))
|
||||
contents: Buffer.from(JSON.stringify(result.bundleData, null, '\t'))
|
||||
}));
|
||||
}
|
||||
es.readArray(bundleInfoArray).pipe(bundleInfoStream);
|
||||
@@ -241,7 +241,7 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
|
||||
}))
|
||||
.pipe(gulp.dest(out));
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
declare class FileWithCopyright extends VinylFile {
|
||||
public __hasOurCopyright: boolean;
|
||||
@@ -287,8 +287,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
|
||||
return stream.pipe(minify({
|
||||
output: {
|
||||
comments: preserveComments(<FileWithCopyright>f),
|
||||
// linux tfs build agent is crashing, does this help?§
|
||||
max_line_len: 3200000
|
||||
max_line_len: 1024
|
||||
}
|
||||
}));
|
||||
}));
|
||||
@@ -296,7 +295,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
|
||||
return es.duplex(input, output);
|
||||
}
|
||||
|
||||
export function minifyTask(src: string, sourceMapBaseUrl: string): (cb: any) => void {
|
||||
export function minifyTask(src: string, sourceMapBaseUrl?: string): (cb: any) => void {
|
||||
const sourceMappingURL = sourceMapBaseUrl && (f => `${sourceMapBaseUrl}/${f.relative}.map`);
|
||||
|
||||
return cb => {
|
||||
@@ -327,4 +326,4 @@ export function minifyTask(src: string, sourceMapBaseUrl: string): (cb: any) =>
|
||||
cb(err);
|
||||
});
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
@@ -88,10 +88,11 @@ var NoUnexternalizedStringsRuleWalker = /** @class */ (function (_super) {
|
||||
var info = this.findDescribingParent(node);
|
||||
// Ignore strings in import and export nodes.
|
||||
if (info && info.isImport && doubleQuoted) {
|
||||
this.addFailureAtNode(node, NoUnexternalizedStringsRuleWalker.ImportFailureMessage, new Lint.Fix(NoUnexternalizedStringsRuleWalker.ImportFailureMessage, [
|
||||
this.createReplacement(node.getStart(), 1, '\''),
|
||||
this.createReplacement(node.getStart() + text.length - 1, 1, '\''),
|
||||
]));
|
||||
var fix = [
|
||||
Lint.Replacement.replaceFromTo(node.getStart(), 1, '\''),
|
||||
Lint.Replacement.replaceFromTo(node.getStart() + text.length - 1, 1, '\''),
|
||||
];
|
||||
this.addFailureAtNode(node, NoUnexternalizedStringsRuleWalker.ImportFailureMessage, fix);
|
||||
return;
|
||||
}
|
||||
var callInfo = info ? info.callInfo : null;
|
||||
@@ -101,8 +102,9 @@ var NoUnexternalizedStringsRuleWalker = /** @class */ (function (_super) {
|
||||
}
|
||||
if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) {
|
||||
var s = node.getText();
|
||||
var replacement = new Lint.Replacement(node.getStart(), node.getWidth(), "nls.localize('KEY-" + s.substring(1, s.length - 1) + "', " + s + ")");
|
||||
var fix = new Lint.Fix('Unexternalitzed string', [replacement]);
|
||||
var fix = [
|
||||
Lint.Replacement.replaceFromTo(node.getStart(), node.getWidth(), "nls.localize('KEY-" + s.substring(1, s.length - 1) + "', " + s + ")"),
|
||||
];
|
||||
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), "Unexternalized string found: " + node.getText(), fix));
|
||||
return;
|
||||
}
|
||||
@@ -134,16 +136,24 @@ var NoUnexternalizedStringsRuleWalker = /** @class */ (function (_super) {
|
||||
}
|
||||
}
|
||||
}
|
||||
var messageArg = callInfo.argIndex === this.messageIndex
|
||||
? callInfo.callExpression.arguments[this.messageIndex]
|
||||
: null;
|
||||
if (messageArg && messageArg !== node) {
|
||||
var messageArg = callInfo.callExpression.arguments[this.messageIndex];
|
||||
if (messageArg && messageArg.kind !== ts.SyntaxKind.StringLiteral) {
|
||||
this.addFailure(this.createFailure(messageArg.getStart(), messageArg.getWidth(), "Message argument to '" + callInfo.callExpression.expression.getText() + "' must be a string literal."));
|
||||
return;
|
||||
}
|
||||
};
|
||||
NoUnexternalizedStringsRuleWalker.prototype.recordKey = function (keyNode, messageNode) {
|
||||
var text = keyNode.getText();
|
||||
// We have an empty key
|
||||
if (text.match(/(['"]) *\1/)) {
|
||||
if (messageNode) {
|
||||
this.addFailureAtNode(keyNode, "Key is empty for message: " + messageNode.getText());
|
||||
}
|
||||
else {
|
||||
this.addFailureAtNode(keyNode, "Key is empty.");
|
||||
}
|
||||
return;
|
||||
}
|
||||
var occurrences = this.usedKeys[text];
|
||||
if (!occurrences) {
|
||||
occurrences = [];
|
||||
@@ -176,7 +186,7 @@ var NoUnexternalizedStringsRuleWalker = /** @class */ (function (_super) {
|
||||
node = parent;
|
||||
}
|
||||
};
|
||||
NoUnexternalizedStringsRuleWalker.ImportFailureMessage = 'Do not use double qoutes for imports.';
|
||||
NoUnexternalizedStringsRuleWalker.ImportFailureMessage = 'Do not use double quotes for imports.';
|
||||
NoUnexternalizedStringsRuleWalker.DOUBLE_QUOTE = '"';
|
||||
return NoUnexternalizedStringsRuleWalker;
|
||||
}(Lint.RuleWalker));
|
||||
|
||||
@@ -45,7 +45,7 @@ interface KeyMessagePair {
|
||||
|
||||
class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
|
||||
private static ImportFailureMessage = 'Do not use double qoutes for imports.';
|
||||
private static ImportFailureMessage = 'Do not use double quotes for imports.';
|
||||
|
||||
private static DOUBLE_QUOTE: string = '"';
|
||||
|
||||
@@ -104,13 +104,14 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
let info = this.findDescribingParent(node);
|
||||
// Ignore strings in import and export nodes.
|
||||
if (info && info.isImport && doubleQuoted) {
|
||||
const fix = [
|
||||
Lint.Replacement.replaceFromTo(node.getStart(), 1, '\''),
|
||||
Lint.Replacement.replaceFromTo(node.getStart() + text.length - 1, 1, '\''),
|
||||
];
|
||||
this.addFailureAtNode(
|
||||
node,
|
||||
NoUnexternalizedStringsRuleWalker.ImportFailureMessage,
|
||||
new Lint.Fix(NoUnexternalizedStringsRuleWalker.ImportFailureMessage, [
|
||||
this.createReplacement(node.getStart(), 1, '\''),
|
||||
this.createReplacement(node.getStart() + text.length - 1, 1, '\''),
|
||||
])
|
||||
fix
|
||||
);
|
||||
return;
|
||||
}
|
||||
@@ -122,8 +123,9 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
|
||||
if (doubleQuoted && (!callInfo || callInfo.argIndex === -1 || !this.signatures[functionName])) {
|
||||
const s = node.getText();
|
||||
const replacement = new Lint.Replacement(node.getStart(), node.getWidth(), `nls.localize('KEY-${s.substring(1, s.length - 1)}', ${s})`);
|
||||
const fix = new Lint.Fix('Unexternalitzed string', [replacement]);
|
||||
const fix = [
|
||||
Lint.Replacement.replaceFromTo(node.getStart(), node.getWidth(), `nls.localize('KEY-${s.substring(1, s.length - 1)}', ${s})`),
|
||||
];
|
||||
this.addFailure(this.createFailure(node.getStart(), node.getWidth(), `Unexternalized string found: ${node.getText()}`, fix));
|
||||
return;
|
||||
}
|
||||
@@ -154,10 +156,10 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
}
|
||||
}
|
||||
}
|
||||
let messageArg: ts.Expression = callInfo.argIndex === this.messageIndex
|
||||
? callInfo.callExpression.arguments[this.messageIndex]
|
||||
: null;
|
||||
if (messageArg && messageArg !== node) {
|
||||
|
||||
const messageArg = callInfo.callExpression.arguments[this.messageIndex];
|
||||
|
||||
if (messageArg && messageArg.kind !== ts.SyntaxKind.StringLiteral) {
|
||||
this.addFailure(this.createFailure(
|
||||
messageArg.getStart(), messageArg.getWidth(),
|
||||
`Message argument to '${callInfo.callExpression.expression.getText()}' must be a string literal.`));
|
||||
@@ -167,6 +169,15 @@ class NoUnexternalizedStringsRuleWalker extends Lint.RuleWalker {
|
||||
|
||||
private recordKey(keyNode: ts.StringLiteral, messageNode: ts.Node) {
|
||||
let text = keyNode.getText();
|
||||
// We have an empty key
|
||||
if (text.match(/(['"]) *\1/)) {
|
||||
if (messageNode) {
|
||||
this.addFailureAtNode(keyNode, `Key is empty for message: ${messageNode.getText()}`);
|
||||
} else {
|
||||
this.addFailureAtNode(keyNode, `Key is empty.`);
|
||||
}
|
||||
return;
|
||||
}
|
||||
let occurrences: KeyMessagePair[] = this.usedKeys[text];
|
||||
if (!occurrences) {
|
||||
occurrences = [];
|
||||
|
||||
9
build/lib/typings/event-stream.d.ts
vendored
9
build/lib/typings/event-stream.d.ts
vendored
@@ -1,7 +1,14 @@
|
||||
declare module "event-stream" {
|
||||
import { Stream } from 'stream';
|
||||
import { ThroughStream } from 'through';
|
||||
import { ThroughStream as _ThroughStream} from 'through';
|
||||
import { MapStream } from 'map-stream';
|
||||
import * as File from 'vinyl';
|
||||
|
||||
export interface ThroughStream extends _ThroughStream {
|
||||
queue(data: File | null);
|
||||
push(data: File | null);
|
||||
paused: boolean;
|
||||
}
|
||||
|
||||
function merge(streams: Stream[]): ThroughStream;
|
||||
function merge(...streams: Stream[]): ThroughStream;
|
||||
|
||||
@@ -143,7 +143,7 @@ function loadSourcemaps() {
|
||||
cb(null, f);
|
||||
return;
|
||||
}
|
||||
f.contents = new Buffer(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
|
||||
f.contents = Buffer.from(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
|
||||
fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', function (err, contents) {
|
||||
if (err) {
|
||||
return cb(err);
|
||||
@@ -160,7 +160,7 @@ function stripSourceMappingURL() {
|
||||
var output = input
|
||||
.pipe(es.mapSync(function (f) {
|
||||
var contents = f.contents.toString('utf8');
|
||||
f.contents = new Buffer(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
|
||||
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
|
||||
return f;
|
||||
}));
|
||||
return es.duplex(input, output);
|
||||
@@ -173,7 +173,6 @@ function rimraf(dir) {
|
||||
if (!err) {
|
||||
return cb();
|
||||
}
|
||||
;
|
||||
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
|
||||
return setTimeout(function () { return retry(cb); }, 10);
|
||||
}
|
||||
|
||||
@@ -28,7 +28,7 @@ export interface IStreamProvider {
|
||||
(cancellationToken?: ICancellationToken): NodeJS.ReadWriteStream;
|
||||
}
|
||||
|
||||
export function incremental(streamProvider: IStreamProvider, initial: NodeJS.ReadWriteStream, supportsCancellation: boolean): NodeJS.ReadWriteStream {
|
||||
export function incremental(streamProvider: IStreamProvider, initial: NodeJS.ReadWriteStream, supportsCancellation?: boolean): NodeJS.ReadWriteStream {
|
||||
const input = es.through();
|
||||
const output = es.through();
|
||||
let state = 'idle';
|
||||
@@ -129,7 +129,7 @@ export function skipDirectories(): NodeJS.ReadWriteStream {
|
||||
});
|
||||
}
|
||||
|
||||
export function cleanNodeModule(name: string, excludes: string[], includes: string[]): NodeJS.ReadWriteStream {
|
||||
export function cleanNodeModule(name: string, excludes: string[], includes?: string[]): NodeJS.ReadWriteStream {
|
||||
const toGlob = (path: string) => '**/node_modules/' + name + (path ? '/' + path : '');
|
||||
const negate = (str: string) => '!' + str;
|
||||
|
||||
@@ -190,7 +190,7 @@ export function loadSourcemaps(): NodeJS.ReadWriteStream {
|
||||
return;
|
||||
}
|
||||
|
||||
f.contents = new Buffer(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
|
||||
f.contents = Buffer.from(contents.replace(/\/\/# sourceMappingURL=(.*)$/g, ''), 'utf8');
|
||||
|
||||
fs.readFile(path.join(path.dirname(f.path), lastMatch[1]), 'utf8', (err, contents) => {
|
||||
if (err) { return cb(err); }
|
||||
@@ -209,7 +209,7 @@ export function stripSourceMappingURL(): NodeJS.ReadWriteStream {
|
||||
const output = input
|
||||
.pipe(es.mapSync<VinylFile, VinylFile>(f => {
|
||||
const contents = (<Buffer>f.contents).toString('utf8');
|
||||
f.contents = new Buffer(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
|
||||
f.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, ''), 'utf8');
|
||||
return f;
|
||||
}));
|
||||
|
||||
@@ -223,7 +223,7 @@ export function rimraf(dir: string): (cb: any) => void {
|
||||
_rimraf(dir, { maxBusyTries: 1 }, (err: any) => {
|
||||
if (!err) {
|
||||
return cb();
|
||||
};
|
||||
}
|
||||
|
||||
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
|
||||
return setTimeout(() => retry(cb), 10);
|
||||
|
||||
1
build/lib/watch/.gitignore
vendored
Normal file
1
build/lib/watch/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
.yarnrc
|
||||
@@ -9,7 +9,7 @@ const es = require('event-stream');
|
||||
function handleDeletions() {
|
||||
return es.mapSync(f => {
|
||||
if (/\.ts$/.test(f.relative) && !f.contents) {
|
||||
f.contents = new Buffer('');
|
||||
f.contents = Buffer.from('');
|
||||
f.stat = { mtime: new Date() };
|
||||
}
|
||||
|
||||
|
||||
@@ -30,12 +30,12 @@ function watch(root) {
|
||||
path: path,
|
||||
base: root
|
||||
});
|
||||
|
||||
//@ts-ignore
|
||||
file.event = type;
|
||||
result.emit('data', file);
|
||||
}
|
||||
|
||||
nsfw(root, function(events) {
|
||||
nsfw(root, function (events) {
|
||||
for (var i = 0; i < events.length; i++) {
|
||||
var e = events[i];
|
||||
var changeType = e.action;
|
||||
@@ -47,16 +47,16 @@ function watch(root) {
|
||||
handleEvent(path.join(e.directory, e.file), toChangeType(changeType));
|
||||
}
|
||||
}
|
||||
}).then(function(watcher) {
|
||||
}).then(function (watcher) {
|
||||
watcher.start();
|
||||
});
|
||||
});
|
||||
|
||||
return result;
|
||||
return result;
|
||||
}
|
||||
|
||||
var cache = Object.create(null);
|
||||
|
||||
module.exports = function(pattern, options) {
|
||||
module.exports = function (pattern, options) {
|
||||
options = options || {};
|
||||
|
||||
var cwd = path.normalize(options.cwd || process.cwd());
|
||||
@@ -66,7 +66,7 @@ module.exports = function(pattern, options) {
|
||||
watcher = cache[cwd] = watch(cwd);
|
||||
}
|
||||
|
||||
var rebase = !options.base ? es.through() : es.mapSync(function(f) {
|
||||
var rebase = !options.base ? es.through() : es.mapSync(function (f) {
|
||||
f.base = options.base;
|
||||
return f;
|
||||
});
|
||||
@@ -74,13 +74,13 @@ module.exports = function(pattern, options) {
|
||||
return watcher
|
||||
.pipe(filter(['**', '!.git{,/**}'])) // ignore all things git
|
||||
.pipe(filter(pattern))
|
||||
.pipe(es.map(function(file, cb) {
|
||||
fs.stat(file.path, function(err, stat) {
|
||||
.pipe(es.map(function (file, cb) {
|
||||
fs.stat(file.path, function (err, stat) {
|
||||
if (err && err.code === 'ENOENT') { return cb(null, file); }
|
||||
if (err) { return cb(); }
|
||||
if (!stat.isFile()) { return cb(); }
|
||||
|
||||
fs.readFile(file.path, function(err, contents) {
|
||||
fs.readFile(file.path, function (err, contents) {
|
||||
if (err && err.code === 'ENOENT') { return cb(null, file); }
|
||||
if (err) { return cb(); }
|
||||
|
||||
|
||||
@@ -24,7 +24,8 @@ function watch(root) {
|
||||
var result = es.through();
|
||||
var child = cp.spawn(watcherPath, [root]);
|
||||
|
||||
child.stdout.on('data', function(data) {
|
||||
child.stdout.on('data', function (data) {
|
||||
// @ts-ignore
|
||||
var lines = data.toString('utf8').split('\n');
|
||||
for (var i = 0; i < lines.length; i++) {
|
||||
var line = lines[i].trim();
|
||||
@@ -46,17 +47,17 @@ function watch(root) {
|
||||
path: changePathFull,
|
||||
base: root
|
||||
});
|
||||
|
||||
//@ts-ignore
|
||||
file.event = toChangeType(changeType);
|
||||
result.emit('data', file);
|
||||
}
|
||||
});
|
||||
|
||||
child.stderr.on('data', function(data) {
|
||||
child.stderr.on('data', function (data) {
|
||||
result.emit('error', data);
|
||||
});
|
||||
|
||||
child.on('exit', function(code) {
|
||||
child.on('exit', function (code) {
|
||||
result.emit('error', 'Watcher died with code ' + code);
|
||||
child = null;
|
||||
});
|
||||
@@ -70,7 +71,7 @@ function watch(root) {
|
||||
|
||||
var cache = Object.create(null);
|
||||
|
||||
module.exports = function(pattern, options) {
|
||||
module.exports = function (pattern, options) {
|
||||
options = options || {};
|
||||
|
||||
var cwd = path.normalize(options.cwd || process.cwd());
|
||||
|
||||
1302
build/lib/watch/yarn.lock
Normal file
1302
build/lib/watch/yarn.lock
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,16 +1,6 @@
|
||||
THIRD-PARTY SOFTWARE NOTICES AND INFORMATION
|
||||
For Microsoft vscode-jsonrpc
|
||||
The Source EULA
|
||||
|
||||
This project incorporates material from the project(s) listed below (collectively, “Third Party Code”).
|
||||
Microsoft is not the original author of the Third Party Code. The original copyright notice and license
|
||||
under which Microsoft received such Third Party Code are set out below. This Third Party Code is licensed
|
||||
to you under their original license terms set forth below. Microsoft reserves all other rights not expressly
|
||||
granted, whether by implication, estoppel or otherwise.
|
||||
|
||||
1. DefinitelyTyped version 0.0.1 (https://github.com/borisyankov/DefinitelyTyped)
|
||||
|
||||
This project is licensed under the MIT license.
|
||||
Copyrights are respective of each contributor listed at the beginning of each definition file.
|
||||
Copyright (c) 2016 Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
@@ -19,13 +9,13 @@ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -32,7 +32,7 @@ END OF winjs NOTICES AND INFORMATION
|
||||
|
||||
%% string_scorer version 0.1.20 (https://github.com/joshaven/string_score)
|
||||
=========================================
|
||||
This software is released under the MIT license:
|
||||
This software is released under the Source EULA:
|
||||
|
||||
Copyright (c) Joshaven Potter
|
||||
|
||||
@@ -60,7 +60,7 @@ END OF string_scorer NOTICES AND INFORMATION
|
||||
|
||||
%% chjj-marked NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
The MIT License (MIT)
|
||||
The Source EULA
|
||||
|
||||
Copyright (c) 2011-2014, Christopher Jeffrey (https://github.com/chjj/)
|
||||
|
||||
|
||||
@@ -52,7 +52,7 @@ declare module monaco.editor {
|
||||
#include(vs/editor/standalone/common/standaloneThemeService): BuiltinTheme, IStandaloneThemeData, IColors
|
||||
#include(vs/editor/common/modes/supports/tokenization): ITokenThemeRule
|
||||
#include(vs/editor/common/services/webWorker): MonacoWebWorker, IWebWorkerOptions
|
||||
#include(vs/editor/standalone/browser/standaloneCodeEditor): IEditorConstructionOptions, IDiffEditorConstructionOptions, IStandaloneCodeEditor, IStandaloneDiffEditor
|
||||
#include(vs/editor/standalone/browser/standaloneCodeEditor): IActionDescriptor, IEditorConstructionOptions, IDiffEditorConstructionOptions, IStandaloneCodeEditor, IStandaloneDiffEditor
|
||||
export interface ICommandHandler {
|
||||
(...args:any[]): void;
|
||||
}
|
||||
@@ -62,19 +62,24 @@ export interface ICommandHandler {
|
||||
#include(vs/editor/standalone/browser/colorizer): IColorizerOptions, IColorizerElementOptions
|
||||
#include(vs/base/common/scrollable): ScrollbarVisibility
|
||||
#include(vs/platform/theme/common/themeService): ThemeColor
|
||||
#includeAll(vs/editor/common/editorCommon;IMode=>languages.IMode;LanguageIdentifier=>languages.LanguageIdentifier;editorOptions.=>): ISelection, IScrollEvent
|
||||
#includeAll(vs/editor/common/model;LanguageIdentifier=>languages.LanguageIdentifier): IScrollEvent
|
||||
#includeAll(vs/editor/common/editorCommon;editorOptions.=>): IScrollEvent
|
||||
#includeAll(vs/editor/common/model/textModelEvents):
|
||||
#includeAll(vs/editor/common/controller/cursorEvents):
|
||||
#includeAll(vs/editor/common/config/editorOptions):
|
||||
#includeAll(vs/editor/browser/editorBrowser;editorCommon.=>;editorOptions.=>):
|
||||
#include(vs/editor/common/config/fontInfo): FontInfo, BareFontInfo
|
||||
|
||||
//compatibility:
|
||||
export type IReadOnlyModel = ITextModel;
|
||||
export type IModel = ITextModel;
|
||||
}
|
||||
|
||||
declare module monaco.languages {
|
||||
|
||||
#includeAll(vs/editor/standalone/browser/standaloneLanguages;modes.=>;editorCommon.=>editor.;IMarkerData=>editor.IMarkerData):
|
||||
#includeAll(vs/editor/standalone/browser/standaloneLanguages;modes.=>;editorCommon.=>editor.;model.=>editor.;IMarkerData=>editor.IMarkerData):
|
||||
#includeAll(vs/editor/common/modes/languageConfiguration):
|
||||
#includeAll(vs/editor/common/modes;editorCommon.IRange=>IRange;editorCommon.IPosition=>IPosition;editorCommon.=>editor.):
|
||||
#includeAll(vs/editor/common/modes;editorCommon.IRange=>IRange;editorCommon.IPosition=>IPosition;editorCommon.=>editor.;IMarkerData=>editor.IMarkerData;model.=>editor.):
|
||||
#include(vs/editor/common/services/modeService): ILanguageExtensionPoint
|
||||
#includeAll(vs/editor/standalone/common/monarch/monarchTypes):
|
||||
|
||||
|
||||
@@ -11,47 +11,5 @@
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://github.com/Microsoft/vscode/issues"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/minimist": "1.2.0",
|
||||
"@types/mocha": "2.2.39",
|
||||
"@types/semver": "5.3.30",
|
||||
"@types/sinon": "1.16.34",
|
||||
"debounce": "^1.0.0",
|
||||
"eslint": "^3.4.0",
|
||||
"event-stream": "^3.1.7",
|
||||
"ghooks": "1.0.3",
|
||||
"glob": "^5.0.13",
|
||||
"gulp": "^3.8.9",
|
||||
"gulp-bom": "^1.0.0",
|
||||
"gulp-concat": "^2.6.0",
|
||||
"gulp-cssnano": "^2.1.0",
|
||||
"gulp-filter": "^3.0.0",
|
||||
"gulp-flatmap": "^1.0.0",
|
||||
"gulp-rename": "^1.2.0",
|
||||
"gulp-sourcemaps": "^1.11.0",
|
||||
"gulp-tsb": "^2.0.3",
|
||||
"gulp-tslint": "^7.0.1",
|
||||
"gulp-uglify": "^2.0.0",
|
||||
"gulp-util": "^3.0.6",
|
||||
"gulp-watch": "^4.3.9",
|
||||
"is": "^3.1.0",
|
||||
"istanbul": "^0.3.17",
|
||||
"jsdom-no-contextify": "^3.1.0",
|
||||
"lazy.js": "^0.4.2",
|
||||
"minimatch": "^2.0.10",
|
||||
"mocha": "^2.2.5",
|
||||
"object-assign": "^4.0.1",
|
||||
"pump": "^1.0.1",
|
||||
"remap-istanbul": "^0.6.4",
|
||||
"rimraf": "^2.2.8",
|
||||
"sinon": "^1.17.2",
|
||||
"source-map": "^0.4.4",
|
||||
"tslint": "^4.3.1",
|
||||
"typescript": "2.5.2",
|
||||
"typescript-formatter": "4.0.1",
|
||||
"underscore": "^1.8.2",
|
||||
"vinyl": "^0.4.5",
|
||||
"vscode-nls-dev": "^2.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
4668
build/monaco/yarn.lock
Normal file
4668
build/monaco/yarn.lock
Normal file
File diff suppressed because it is too large
Load Diff
@@ -5,14 +5,15 @@
|
||||
|
||||
const cp = require('child_process');
|
||||
const path = require('path');
|
||||
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
|
||||
const fs = require('fs');
|
||||
const yarn = process.platform === 'win32' ? 'yarn.cmd' : 'yarn';
|
||||
|
||||
function npmInstall(location, opts) {
|
||||
function yarnInstall(location, opts) {
|
||||
opts = opts || {};
|
||||
opts.cwd = location;
|
||||
opts.stdio = 'inherit';
|
||||
|
||||
const result = cp.spawnSync(npm, ['install'], opts);
|
||||
const result = cp.spawnSync(yarn, ['install'], opts);
|
||||
|
||||
if (result.error || result.status !== 0) {
|
||||
process.exit(1);
|
||||
@@ -20,44 +21,45 @@ function npmInstall(location, opts) {
|
||||
}
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const protocol = [
|
||||
'jsonrpc',
|
||||
'types',
|
||||
'client'
|
||||
];
|
||||
|
||||
protocol.forEach(item => npmInstall(`dataprotocol-node/${item}`));
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
npmInstall('extensions-modules');
|
||||
npmInstall('extensions'); // node modules shared by all extensions
|
||||
yarnInstall('extensions'); // node modules shared by all extensions
|
||||
|
||||
const extensions = [
|
||||
'vscode-colorize-tests',
|
||||
'git',
|
||||
'json',
|
||||
'mssql',
|
||||
'mssql',
|
||||
'configuration-editing',
|
||||
'extension-editing',
|
||||
'markdown',
|
||||
'markdown-basics',
|
||||
'git',
|
||||
'merge-conflict',
|
||||
'insights-default',
|
||||
'account-provider-azure'
|
||||
'account-provider-azure',
|
||||
'agent'
|
||||
];
|
||||
|
||||
extensions.forEach(extension => npmInstall(`extensions/${extension}`));
|
||||
extensions.forEach(extension => yarnInstall(`extensions/${extension}`));
|
||||
|
||||
function npmInstallBuildDependencies() {
|
||||
// make sure we install gulp watch for the system installed
|
||||
function yarnInstallBuildDependencies() {
|
||||
// make sure we install the deps of build/lib/watch for the system installed
|
||||
// node, since that is the driver of gulp
|
||||
//@ts-ignore
|
||||
const env = Object.assign({}, process.env);
|
||||
const watchPath = path.join(path.dirname(__dirname), 'lib', 'watch');
|
||||
const yarnrcPath = path.join(watchPath, '.yarnrc');
|
||||
|
||||
delete env['npm_config_disturl'];
|
||||
delete env['npm_config_target'];
|
||||
delete env['npm_config_runtime'];
|
||||
const disturl = 'https://nodejs.org/download/release';
|
||||
const target = process.versions.node;
|
||||
const runtime = 'node';
|
||||
|
||||
npmInstall(path.join(path.dirname(__dirname), 'lib', 'watch'), { env });
|
||||
const yarnrc = `disturl "${disturl}"
|
||||
target "${target}"
|
||||
runtime "${runtime}"`;
|
||||
|
||||
fs.writeFileSync(yarnrcPath, yarnrc, 'utf8');
|
||||
yarnInstall(watchPath, { env });
|
||||
}
|
||||
|
||||
npmInstall(`build`); // node modules required for build
|
||||
npmInstallBuildDependencies(); // node modules for watching, specific to host node version, not electron
|
||||
yarnInstall(`build`); // node modules required for build
|
||||
yarnInstall('test/smoke'); // node modules required for smoketest
|
||||
yarnInstallBuildDependencies(); // node modules for watching, specific to host node version, not electron
|
||||
@@ -3,13 +3,21 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
if (process.env['npm_config_disturl'] !== 'https://atom.io/download/electron') {
|
||||
console.error("You can't use plain npm to install Code's dependencies.");
|
||||
console.error(
|
||||
/^win/.test(process.platform)
|
||||
? "Please run '.\\scripts\\npm.bat install' instead."
|
||||
: "Please run './scripts/npm.sh install' instead."
|
||||
);
|
||||
let err = false;
|
||||
|
||||
const major = parseInt(/^(\d+)\./.exec(process.versions.node)[1]);
|
||||
|
||||
if (major < 8) {
|
||||
console.error('\033[1;31m*** Please use node>=8.\033[0;0m');
|
||||
err = true;
|
||||
}
|
||||
|
||||
if (!/yarn\.js$|yarnpkg$/.test(process.env['npm_execpath'])) {
|
||||
console.error('\033[1;31m*** Please use yarn to install dependencies.\033[0;0m');
|
||||
err = true;
|
||||
}
|
||||
|
||||
if (err) {
|
||||
console.error('');
|
||||
process.exit(1);
|
||||
}
|
||||
@@ -4,9 +4,11 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
const cp = require('child_process');
|
||||
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
function updateGrammar(location) {
|
||||
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
|
||||
const result = cp.spawnSync(npm, ['run', 'update-grammar'], {
|
||||
cwd: location,
|
||||
stdio: 'inherit'
|
||||
@@ -17,50 +19,17 @@ function updateGrammar(location) {
|
||||
}
|
||||
}
|
||||
|
||||
const extensions = [
|
||||
// 'bat' Grammar no longer available
|
||||
'clojure',
|
||||
'coffeescript',
|
||||
'cpp',
|
||||
'csharp',
|
||||
'css',
|
||||
'diff',
|
||||
'docker',
|
||||
'fsharp',
|
||||
'gitsyntax',
|
||||
'go',
|
||||
'groovy',
|
||||
'handlebars',
|
||||
'hlsl',
|
||||
'html',
|
||||
'ini',
|
||||
'java',
|
||||
// 'javascript', updated through JavaScript
|
||||
'json',
|
||||
'less',
|
||||
'lua',
|
||||
'make',
|
||||
'markdown',
|
||||
'objective-c',
|
||||
'perl',
|
||||
'php',
|
||||
// 'powershell', grammar not ready yet, @daviwil will ping when ready
|
||||
'pug',
|
||||
'python',
|
||||
'r',
|
||||
'razor',
|
||||
'ruby',
|
||||
'rust',
|
||||
'scss',
|
||||
'shaderlab',
|
||||
'shellscript',
|
||||
'sql',
|
||||
'swift',
|
||||
'typescript',
|
||||
'vb',
|
||||
'xml',
|
||||
'yaml'
|
||||
];
|
||||
const allExtensionFolders = fs.readdirSync('extensions');
|
||||
const extensions = allExtensionFolders.filter(e => {
|
||||
try {
|
||||
let packageJSON = JSON.parse(fs.readFileSync(path.join('extensions', e, 'package.json')).toString());
|
||||
return packageJSON && packageJSON.scripts && packageJSON.scripts['update-grammar'];
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
|
||||
console.log(`Updating ${extensions.length} grammars...`);
|
||||
|
||||
extensions.forEach(extension => updateGrammar(`extensions/${extension}`));
|
||||
|
||||
@@ -70,4 +39,5 @@ if (process.platform === 'win32') {
|
||||
cp.spawn('.\scripts\test-integration.bat', [], { env: process.env, stdio: 'inherit' });
|
||||
} else {
|
||||
cp.spawn('/bin/bash', ['./scripts/test-integration.sh'], { env: process.env, stdio: 'inherit' });
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -14,14 +14,19 @@ var url = require('url');
|
||||
|
||||
function getOptions(urlString) {
|
||||
var _url = url.parse(urlString);
|
||||
var headers = {
|
||||
'User-Agent': 'VSCode'
|
||||
};
|
||||
var token = process.env['GITHUB_TOKEN'];
|
||||
if (token) {
|
||||
headers['Authorization'] = 'token ' + token
|
||||
}
|
||||
return {
|
||||
protocol: _url.protocol,
|
||||
host: _url.host,
|
||||
port: _url.port,
|
||||
path: _url.path,
|
||||
headers: {
|
||||
'User-Agent': 'NodeJS'
|
||||
}
|
||||
headers: headers
|
||||
};
|
||||
}
|
||||
|
||||
@@ -32,12 +37,16 @@ function download(url, redirectCount) {
|
||||
response.on('data', function (data) {
|
||||
content += data.toString();
|
||||
}).on('end', function () {
|
||||
if (response.statusCode === 403 && response.headers['x-ratelimit-remaining'] === '0') {
|
||||
e('GitHub API rate exceeded. Set GITHUB_TOKEN environment variable to increase rate limit.');
|
||||
return;
|
||||
}
|
||||
let count = redirectCount || 0;
|
||||
if (count < 5 && response.statusCode >= 300 && response.statusCode <= 303 || response.statusCode === 307) {
|
||||
let location = response.headers['location'];
|
||||
if (location) {
|
||||
console.log("Redirected " + url + " to " + location);
|
||||
download(location, count+1).then(c, e);
|
||||
download(location, count + 1).then(c, e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -59,17 +68,13 @@ function getCommitSha(repoId, repoPath) {
|
||||
commitDate: lastCommit.commit.author.date
|
||||
});
|
||||
} catch (e) {
|
||||
console.error("Failed extracting the SHA: " + content);
|
||||
return Promise.resolve(null);
|
||||
return Promise.reject(new Error("Failed extracting the SHA: " + content));
|
||||
}
|
||||
}, function () {
|
||||
console.error('Failed loading ' + commitInfo);
|
||||
return Promise.resolve(null);
|
||||
});
|
||||
}
|
||||
|
||||
exports.update = function (repoId, repoPath, dest, modifyGrammar) {
|
||||
var contentPath = 'https://raw.githubusercontent.com/' + repoId + '/master/' + repoPath;
|
||||
exports.update = function (repoId, repoPath, dest, modifyGrammar, version = 'master') {
|
||||
var contentPath = 'https://raw.githubusercontent.com/' + repoId + `/${version}/` + repoPath;
|
||||
console.log('Reading from ' + contentPath);
|
||||
return download(contentPath).then(function (content) {
|
||||
var ext = path.extname(repoPath);
|
||||
@@ -81,8 +86,7 @@ exports.update = function (repoId, repoPath, dest, modifyGrammar) {
|
||||
} else if (ext === '.json') {
|
||||
grammar = JSON.parse(content);
|
||||
} else {
|
||||
console.error('Unknown file extension: ' + ext);
|
||||
return;
|
||||
return Promise.reject(new Error('Unknown file extension: ' + ext));
|
||||
}
|
||||
if (modifyGrammar) {
|
||||
modifyGrammar(grammar);
|
||||
@@ -99,8 +103,10 @@ exports.update = function (repoId, repoPath, dest, modifyGrammar) {
|
||||
if (info) {
|
||||
result.version = 'https://github.com/' + repoId + '/commit/' + info.commitSha;
|
||||
}
|
||||
for (let key in grammar) {
|
||||
if (!result.hasOwnProperty(key)) {
|
||||
|
||||
let keys = ['name', 'scopeName', 'comment', 'injections', 'patterns', 'repository'];
|
||||
for (let key of keys) {
|
||||
if (grammar.hasOwnProperty(key)) {
|
||||
result[key] = grammar[key];
|
||||
}
|
||||
}
|
||||
@@ -113,11 +119,14 @@ exports.update = function (repoId, repoPath, dest, modifyGrammar) {
|
||||
console.log('Updated ' + path.basename(dest));
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e);
|
||||
return Promise.reject(e);
|
||||
}
|
||||
});
|
||||
|
||||
}, console.error);
|
||||
}, console.error).catch(e => {
|
||||
console.error(e);
|
||||
process.exit(1);
|
||||
});
|
||||
};
|
||||
|
||||
if (path.basename(process.argv[1]) === 'update-grammar.js') {
|
||||
|
||||
69
build/npm/update-localization-extension.js
Normal file
69
build/npm/update-localization-extension.js
Normal file
@@ -0,0 +1,69 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
let i18n = require("../lib/i18n");
|
||||
|
||||
let fs = require("fs");
|
||||
let path = require("path");
|
||||
let vfs = require("vinyl-fs");
|
||||
let rimraf = require('rimraf');
|
||||
|
||||
function update(idOrPath) {
|
||||
if (!idOrPath) {
|
||||
throw new Error('Argument must be the location of the localization extension.');
|
||||
}
|
||||
let locExtFolder = idOrPath;
|
||||
if (/^\w{2}(-\w+)?$/.test(idOrPath)) {
|
||||
locExtFolder = '../vscode-language-pack-' + idOrPath;
|
||||
}
|
||||
let locExtStat = fs.statSync(locExtFolder);
|
||||
if (!locExtStat || !locExtStat.isDirectory) {
|
||||
throw new Error('No directory found at ' + idOrPath);
|
||||
}
|
||||
let packageJSON = JSON.parse(fs.readFileSync(path.join(locExtFolder, 'package.json')).toString());
|
||||
let contributes = packageJSON['contributes'];
|
||||
if (!contributes) {
|
||||
throw new Error('The extension must define a "localizations" contribution in the "package.json"');
|
||||
}
|
||||
let localizations = contributes['localizations'];
|
||||
if (!localizations) {
|
||||
throw new Error('The extension must define a "localizations" contribution of type array in the "package.json"');
|
||||
}
|
||||
|
||||
localizations.forEach(function (localization) {
|
||||
if (!localization.languageId || !localization.languageName || !localization.localizedLanguageName) {
|
||||
throw new Error('Each localization contribution must define "languageId", "languageName" and "localizedLanguageName" properties.');
|
||||
}
|
||||
let server = localization.server || 'www.transifex.com';
|
||||
let userName = localization.userName || 'api';
|
||||
let apiToken = process.env.TRANSIFEX_API_TOKEN;
|
||||
let languageId = localization.transifexId || localization.languageId;
|
||||
let translationDataFolder = path.join(locExtFolder, 'translations');
|
||||
|
||||
if (fs.existsSync(translationDataFolder) && fs.existsSync(path.join(translationDataFolder, 'main.i18n.json'))) {
|
||||
console.log('Clearing \'' + translationDataFolder + '\'...');
|
||||
rimraf.sync(translationDataFolder);
|
||||
}
|
||||
|
||||
console.log('Downloading translations for \'' + languageId + '\' to \'' + translationDataFolder + '\'...');
|
||||
const translationPaths = [];
|
||||
i18n.pullI18nPackFiles(server, userName, apiToken, { id: languageId }, translationPaths)
|
||||
.pipe(vfs.dest(translationDataFolder)).on('end', function () {
|
||||
localization.translations = [];
|
||||
for (let tp of translationPaths) {
|
||||
localization.translations.push({ id: tp.id, path: `./translations/${tp.resourceName}`});
|
||||
}
|
||||
fs.writeFileSync(path.join(locExtFolder, 'package.json'), JSON.stringify(packageJSON, null, '\t'));
|
||||
});
|
||||
|
||||
});
|
||||
|
||||
|
||||
}
|
||||
if (path.basename(process.argv[1]) === 'update-localization-extension.js') {
|
||||
update(process.argv[2]);
|
||||
}
|
||||
@@ -11,18 +11,19 @@
|
||||
"@types/xml2js": "0.0.33",
|
||||
"azure-storage": "^2.1.0",
|
||||
"decompress": "^4.2.0",
|
||||
"documentdb": "^1.11.0",
|
||||
"extensions-modules": "file:../extensions-modules",
|
||||
"documentdb": "1.13.0",
|
||||
"service-downloader": "github:anthonydresser/service-downloader#0.1.2",
|
||||
"fs-extra-promise": "^1.0.1",
|
||||
"mime": "^1.3.4",
|
||||
"minimist": "^1.2.0",
|
||||
"typescript": "2.5.2",
|
||||
"typescript": "2.6.1",
|
||||
"vscode": "^1.0.1",
|
||||
"xml2js": "^0.4.17"
|
||||
},
|
||||
"scripts": {
|
||||
"compile": "tsc",
|
||||
"watch": "tsc --watch",
|
||||
"postinstall": "npm run compile"
|
||||
"compile": "tsc -p tsconfig.build.json",
|
||||
"watch": "tsc -p tsconfig.build.json --watch",
|
||||
"postinstall": "npm run compile",
|
||||
"npmCheckJs": "tsc --noEmit"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6,6 +6,9 @@ if [ -n "$AGENT_WORKFOLDER" ]
|
||||
then
|
||||
export npm_config_cache="$AGENT_WORKFOLDER/npm-cache"
|
||||
echo "Using npm cache: $npm_config_cache"
|
||||
|
||||
export YARN_CACHE_FOLDER="$AGENT_WORKFOLDER/yarn-cache"
|
||||
echo "Using yarn cache: $YARN_CACHE_FOLDER"
|
||||
fi
|
||||
|
||||
SUMMARY="Task;Duration"$'\n'
|
||||
|
||||
@@ -4,23 +4,15 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
const cp = require('child_process');
|
||||
const npm = process.platform === 'win32' ? 'npm.cmd' : 'npm';
|
||||
|
||||
function npmInstall(package: string, args: string[]): void {
|
||||
const result = cp.spawnSync(npm, ['install', package, ...args], {
|
||||
stdio: 'inherit'
|
||||
});
|
||||
|
||||
if (result.error || result.status !== 0) {
|
||||
process.exit(1);
|
||||
}
|
||||
function yarnInstall(package: string): void {
|
||||
cp.execSync(`yarn add --no-lockfile ${package}`);
|
||||
}
|
||||
|
||||
const product = require('../../../product.json');
|
||||
const dependencies = product.dependencies || {} as { [name: string]: string; };
|
||||
const [, , ...args] = process.argv;
|
||||
|
||||
Object.keys(dependencies).forEach(name => {
|
||||
const url = dependencies[name];
|
||||
npmInstall(url, args);
|
||||
yarnInstall(url);
|
||||
});
|
||||
@@ -4,12 +4,15 @@ set -e
|
||||
# setup nvm
|
||||
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||
export NVM_DIR=~/.nvm
|
||||
source $(brew --prefix nvm)/nvm.sh
|
||||
source $(brew --prefix nvm)/nvm.sh --no-use
|
||||
else
|
||||
source $NVM_DIR/nvm.sh
|
||||
source $NVM_DIR/nvm.sh --no-use
|
||||
fi
|
||||
|
||||
# install node
|
||||
NODE_VERSION=7.10.0
|
||||
NODE_VERSION=8.9.1
|
||||
nvm install $NODE_VERSION
|
||||
nvm use $NODE_VERSION
|
||||
nvm use $NODE_VERSION
|
||||
|
||||
# install yarn
|
||||
npm i -g yarn
|
||||
@@ -14,8 +14,9 @@ import * as mime from 'mime';
|
||||
import * as minimist from 'minimist';
|
||||
import { DocumentClient, NewDocument } from 'documentdb';
|
||||
|
||||
if (process.argv.length < 6) {
|
||||
console.error('Usage: node publish.js <product> <platform> <type> <name> <version> <commit> <is_update> <file>');
|
||||
// {{SQL CARBON EDIT}}
|
||||
if (process.argv.length < 9) {
|
||||
console.error('Usage: node publish.js <product_quality> <platform> <file_type> <file_name> <version> <is_update> <file> [commit_id]');
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
@@ -69,6 +70,7 @@ interface Asset {
|
||||
hash: string;
|
||||
sha256hash: string;
|
||||
size: number;
|
||||
supportsFastUpdate?: boolean;
|
||||
}
|
||||
|
||||
function createOrUpdate(commit: string, quality: string, platform: string, type: string, release: NewDocument, asset: Asset, isUpdate: boolean): Promise<void> {
|
||||
@@ -183,21 +185,10 @@ async function publish(commit: string, quality: string, platform: string, type:
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
|
||||
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
// {{SQL CARBON EDIT}}
|
||||
await assertContainer(blobService, quality);
|
||||
|
||||
// mooncake is fussy and far away, this is needed!
|
||||
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
|
||||
|
||||
await Promise.all([
|
||||
assertContainer(blobService, quality),
|
||||
assertContainer(mooncakeBlobService, quality)
|
||||
]);
|
||||
|
||||
const [blobExists, moooncakeBlobExists] = await Promise.all([
|
||||
doesAssetExist(blobService, quality, blobName),
|
||||
doesAssetExist(mooncakeBlobService, quality, blobName)
|
||||
]);
|
||||
const blobExists = await doesAssetExist(blobService, quality, blobName);
|
||||
|
||||
const promises = [];
|
||||
|
||||
@@ -205,10 +196,38 @@ async function publish(commit: string, quality: string, platform: string, type:
|
||||
promises.push(uploadBlob(blobService, quality, blobName, file));
|
||||
}
|
||||
|
||||
if (!moooncakeBlobExists) {
|
||||
promises.push(uploadBlob(mooncakeBlobService, quality, blobName, file));
|
||||
// {{SQL CARBON EDIT}}
|
||||
if (process.env['MOONCAKE_STORAGE_ACCESS_KEY']) {
|
||||
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
|
||||
// mooncake is fussy and far away, this is needed!
|
||||
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
|
||||
|
||||
await Promise.all([
|
||||
assertContainer(blobService, quality),
|
||||
assertContainer(mooncakeBlobService, quality)
|
||||
]);
|
||||
|
||||
const [blobExists, moooncakeBlobExists] = await Promise.all([
|
||||
doesAssetExist(blobService, quality, blobName),
|
||||
doesAssetExist(mooncakeBlobService, quality, blobName)
|
||||
]);
|
||||
|
||||
const promises = [];
|
||||
|
||||
if (!blobExists) {
|
||||
promises.push(uploadBlob(blobService, quality, blobName, file));
|
||||
}
|
||||
|
||||
if (!moooncakeBlobExists) {
|
||||
promises.push(uploadBlob(mooncakeBlobService, quality, blobName, file));
|
||||
}
|
||||
} else {
|
||||
console.log('Skipping Mooncake publishing.');
|
||||
}
|
||||
|
||||
|
||||
if (promises.length === 0) {
|
||||
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||
return;
|
||||
@@ -228,12 +247,20 @@ async function publish(commit: string, quality: string, platform: string, type:
|
||||
platform: platform,
|
||||
type: type,
|
||||
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
|
||||
mooncakeUrl: `${process.env['MOONCAKE_CDN_URL']}/${quality}/${blobName}`,
|
||||
// {{SQL CARBON EDIT}}
|
||||
mooncakeUrl: process.env['MOONCAKE_CDN_URL'] ? `${process.env['MOONCAKE_CDN_URL']}/${quality}/${blobName}` : undefined,
|
||||
hash: sha1hash,
|
||||
sha256hash,
|
||||
size
|
||||
};
|
||||
|
||||
// Remove this if we ever need to rollback fast updates for windows
|
||||
if (/win32/.test(platform)) {
|
||||
asset.supportsFastUpdate = true;
|
||||
}
|
||||
|
||||
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
||||
|
||||
const release = {
|
||||
id: commit,
|
||||
timestamp: (new Date()).getTime(),
|
||||
@@ -261,8 +288,11 @@ function main(): void {
|
||||
boolean: ['upload-only']
|
||||
});
|
||||
|
||||
const [quality, platform, type, name, version, _isUpdate, file] = opts._;
|
||||
const commit = execSync('git rev-parse HEAD', { encoding: 'utf8' }).trim();
|
||||
// {{SQL CARBON EDIT}}
|
||||
let [quality, platform, type, name, version, _isUpdate, file, commit] = opts._;
|
||||
if (!commit) {
|
||||
commit = execSync('git rev-parse HEAD', { encoding: 'utf8' }).trim();
|
||||
}
|
||||
|
||||
publish(commit, quality, platform, type, name, version, _isUpdate, file, opts).catch(err => {
|
||||
console.error(err);
|
||||
|
||||
@@ -14,11 +14,14 @@ VSO_PAT="$6"
|
||||
echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc
|
||||
|
||||
step "Install dependencies" \
|
||||
npm install
|
||||
yarn
|
||||
|
||||
step "Hygiene" \
|
||||
npm run gulp -- hygiene
|
||||
|
||||
step "Monaco Editor Check" \
|
||||
./node_modules/.bin/tsc -p ./src/tsconfig.monaco.json --noEmit
|
||||
|
||||
step "Mix in repository from vscode-distro" \
|
||||
npm run gulp -- mixin
|
||||
|
||||
|
||||
@@ -3,10 +3,6 @@
|
||||
. ./scripts/env.sh
|
||||
. ./build/tfs/common/common.sh
|
||||
|
||||
(cd $BUILD_SOURCESDIRECTORY/build/tfs/common && \
|
||||
step "Install build dependencies" \
|
||||
npm i)
|
||||
|
||||
REPO=`pwd`
|
||||
ZIP=$REPO/../VSCode-darwin-selfsigned.zip
|
||||
UNSIGNEDZIP=$REPO/../VSCode-darwin-unsigned.zip
|
||||
|
||||
3
build/tfs/linux/.gitignore
vendored
3
build/tfs/linux/.gitignore
vendored
@@ -1 +1,2 @@
|
||||
pat
|
||||
pat
|
||||
*.js
|
||||
@@ -5,6 +5,7 @@
|
||||
. ./build/tfs/common/common.sh
|
||||
|
||||
export ARCH="$1"
|
||||
export npm_config_arch="$ARCH"
|
||||
export VSCODE_MIXIN_PASSWORD="$2"
|
||||
export AZURE_STORAGE_ACCESS_KEY="$3"
|
||||
export AZURE_STORAGE_ACCESS_KEY_2="$4"
|
||||
@@ -16,11 +17,14 @@ VSO_PAT="$8"
|
||||
echo "machine monacotools.visualstudio.com password $VSO_PAT" > ~/.netrc
|
||||
|
||||
step "Install dependencies" \
|
||||
npm install --arch=$ARCH --unsafe-perm
|
||||
yarn
|
||||
|
||||
step "Hygiene" \
|
||||
npm run gulp -- hygiene
|
||||
|
||||
step "Monaco Editor Check" \
|
||||
./node_modules/.bin/tsc -p ./src/tsconfig.monaco.json --noEmit
|
||||
|
||||
step "Mix in repository from vscode-distro" \
|
||||
npm run gulp -- mixin
|
||||
|
||||
@@ -28,7 +32,7 @@ step "Get Electron" \
|
||||
npm run gulp -- "electron-$ARCH"
|
||||
|
||||
step "Install distro dependencies" \
|
||||
node build/tfs/common/installDistro.js --arch=$ARCH
|
||||
node build/tfs/common/installDistro.js
|
||||
|
||||
step "Build minified" \
|
||||
npm run gulp -- "vscode-linux-$ARCH-min"
|
||||
|
||||
42
build/tfs/linux/frozen-check.ts
Normal file
42
build/tfs/linux/frozen-check.ts
Normal file
@@ -0,0 +1,42 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import { DocumentClient } from 'documentdb';
|
||||
|
||||
interface Config {
|
||||
id: string;
|
||||
frozen: boolean;
|
||||
}
|
||||
|
||||
function createDefaultConfig(quality: string): Config {
|
||||
return {
|
||||
id: quality,
|
||||
frozen: false
|
||||
};
|
||||
}
|
||||
|
||||
function getConfig(quality: string): Promise<Config> {
|
||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/config';
|
||||
const query = {
|
||||
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
|
||||
parameters: [
|
||||
{ name: '@quality', value: quality }
|
||||
]
|
||||
};
|
||||
|
||||
return new Promise<Config>((c, e) => {
|
||||
client.queryDocuments(collection, query).toArray((err, results) => {
|
||||
if (err && err.code !== 409) { return e(err); }
|
||||
|
||||
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0] as any as Config);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
getConfig(process.argv[2])
|
||||
.then(c => console.log(c.frozen), e => console.error(e));
|
||||
6
build/tfs/linux/new_package.json.template
Normal file
6
build/tfs/linux/new_package.json.template
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"name": "PACKAGENAME",
|
||||
"version": "PACKAGEVERSION",
|
||||
"repositoryId": "REPOSITORYID",
|
||||
"sourceUrl": "PACKAGEURL"
|
||||
}
|
||||
@@ -12,10 +12,6 @@ step "Build RPM package" \
|
||||
# step "Build snap package" \
|
||||
# npm run gulp -- "vscode-linux-$ARCH-build-snap"
|
||||
|
||||
(cd $BUILD_SOURCESDIRECTORY/build/tfs/common && \
|
||||
step "Install build dependencies" \
|
||||
npm install --unsafe-perm)
|
||||
|
||||
# Variables
|
||||
PLATFORM_LINUX="linux-$ARCH"
|
||||
PLATFORM_DEB="linux-deb-$ARCH"
|
||||
@@ -55,36 +51,29 @@ step "Publish RPM package" \
|
||||
# SNAP_FILENAME="$(ls $REPO/.build/linux/snap/$ARCH/ | grep .snap)"
|
||||
# SNAP_PATH="$REPO/.build/linux/snap/$ARCH/$SNAP_FILENAME"
|
||||
|
||||
IS_FROZEN="$(node build/tfs/linux/frozen-check.js $VSCODE_QUALITY)"
|
||||
|
||||
if [ -z "$VSCODE_QUALITY" ]; then
|
||||
echo "VSCODE_QUALITY is not set, skipping repo package publish"
|
||||
elif [ "$IS_FROZEN" = "true" ]; then
|
||||
echo "$VSCODE_QUALITY is frozen, skipping repo package publish"
|
||||
else
|
||||
if [ "$BUILD_SOURCEBRANCH" = "master" ] || [ "$BUILD_SOURCEBRANCH" = "refs/heads/master" ]; then
|
||||
if [[ $BUILD_QUEUEDBY = *"Project Collection Service Accounts"* || $BUILD_QUEUEDBY = *"Microsoft.VisualStudio.Services.TFS"* ]]; then
|
||||
# Get necessary information
|
||||
pushd $REPO && COMMIT_HASH=$(git rev-parse HEAD) && popd
|
||||
PACKAGE_NAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/ | sed -e 's/_.*//g')"
|
||||
DEB_URL="https://az764295.vo.msecnd.net/$VSCODE_QUALITY/$COMMIT_HASH/$DEB_FILENAME"
|
||||
RPM_URL="https://az764295.vo.msecnd.net/$VSCODE_QUALITY/$COMMIT_HASH/$RPM_FILENAME"
|
||||
PACKAGE_VERSION="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/ | sed -e 's/code-[a-z]*_//g' -e 's/\_.*$//g')"
|
||||
# Write config files needed by API, use eval to force environment variable expansion
|
||||
DIRNAME=$(dirname $(readlink -f $0))
|
||||
pushd $DIRNAME
|
||||
# Submit to apt repo
|
||||
if [ "$DEB_ARCH" = "amd64" ]; then
|
||||
eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4adf642421134a1a48d1a\", \"username\": \"$LINUX_REPO_USERNAME\", \"password\": \"$LINUX_REPO_PASSWORD\" }' > apt-config.json
|
||||
eval echo '{ \"name\": \"$PACKAGE_NAME\", \"version\": \"$PACKAGE_VERSION\", \"repositoryId\": \"58a4adf642421134a1a48d1a\", \"sourceUrl\": \"$DEB_URL\" }' > apt-addpkg.json
|
||||
echo "Submitting apt-addpkg.json:"
|
||||
cat apt-addpkg.json
|
||||
|
||||
step "Publish to repositories" \
|
||||
./repoapi_client.sh -config apt-config.json -addpkg apt-addpkg.json
|
||||
./repoapi_client.sh -config apt-config.json -addfile $DEB_PATH
|
||||
fi
|
||||
# Submit to yum repo (disabled as it's manual until signing is automated)
|
||||
# eval echo '{ \"server\": \"azure-apt-cat.cloudapp.net\", \"protocol\": \"https\", \"port\": \"443\", \"repositoryId\": \"58a4ae3542421134a1a48d1b\", \"username\": \"$LINUX_REPO_USERNAME\", \"password\": \"$LINUX_REPO_PASSWORD\" }' > yum-config.json
|
||||
# eval echo '{ \"name\": \"$PACKAGE_NAME\", \"version\": \"$PACKAGE_VERSION\", \"repositoryId\": \"58a4ae3542421134a1a48d1b\", \"sourceUrl\": \"$RPM_URL\" }' > yum-addpkg.json
|
||||
# echo "Submitting yum-addpkg.json:"
|
||||
# cat yum-addpkg.json
|
||||
# ./repoapi_client.sh -config yum-config.json -addpkg yum-addpkg.json
|
||||
|
||||
# ./repoapi_client.sh -config yum-config.json -addfile $RPM_PATH
|
||||
popd
|
||||
echo "To check repo publish status run ./repoapi_client.sh -config config.json -check <id>"
|
||||
fi
|
||||
|
||||
@@ -2,9 +2,9 @@
|
||||
# This is a VERY basic script for Create/Delete operations on repos and packages
|
||||
#
|
||||
cmd=$1
|
||||
urls=urls.txt
|
||||
defaultPackageFile=new_package.json
|
||||
defaultRepoFile=new_repo.json
|
||||
docDir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" # chrmarti: Changed to script's directory.
|
||||
packageJsonTemplate=$docDir/new_package.json.template
|
||||
repoJsonTemplate=$docDir/new_repo.json.template
|
||||
|
||||
function Bail
|
||||
{
|
||||
@@ -24,14 +24,21 @@ function Usage {
|
||||
echo "$0 -config FILENAME -listrepos | -listpkgs | -addrepo FILENAME | -addpkg FILENAME |"
|
||||
echo "-addpkgs FILENAME | -check ID | -delrepo REPOID | -delpkg PKGID"
|
||||
echo -e "\t-config FILENAME : JSON file containing API server name and creds"
|
||||
echo -e "\t-listrepos : List repositories"
|
||||
echo -e "Package Operations:"
|
||||
echo -e "\t-listpkgs [REGEX] : List packages, optionally filter by REGEX"
|
||||
echo -e "\t-addrepo FILENAME : Create a new repo using the specified JSON file"
|
||||
echo -e "\t-addpkg FILENAME : Add package to repo using the specified JSON file"
|
||||
echo -e "\t-addpkgs FILENAME : Add packages to repo using urls contained in FILENAME"
|
||||
echo -e "\t-check ID : Check upload operation by ID"
|
||||
echo -e "\t-delrepo REPOID : Delete the specified repo by ID"
|
||||
echo -e "\t-delpkg PKGID : Delete the specified package by ID"
|
||||
echo -e "File Operations:"
|
||||
echo -e "\t-uploadfile FILENAME: Upload FILENAME (does not publish) "
|
||||
echo -e "\t-addfile FILENAME : Upload FILENAME AND publish to the repo"
|
||||
echo -e "\t-listfiles : List uploaded files"
|
||||
echo -e "\t-delfile FILEID : Delete uploaded file by ID"
|
||||
echo -e "Repository Operations:"
|
||||
echo -e "\t-listrepos : List repositories"
|
||||
echo -e "\t-addrepo FILENAME : Create a new repo using the specified JSON file"
|
||||
echo -e "\t-delrepo REPOID : Delete the specified repo by ID"
|
||||
exit 1
|
||||
}
|
||||
|
||||
@@ -84,33 +91,136 @@ function AddRepo
|
||||
{
|
||||
repoFile=$1
|
||||
if [ -z $repoFile ]; then
|
||||
Bail "Error: Must specify a JSON-formatted file. Reference $defaultRepoFile.template"
|
||||
Bail "Error: Must specify a JSON-formatted file. Reference $repoJsonTemplate"
|
||||
fi
|
||||
if [ ! -f $repoFile ]; then
|
||||
Bail "Error: Cannot create repo - $repoFile does not exist"
|
||||
fi
|
||||
packageUrl=$(grep "url" $repoFile | head -n 1 | awk '{print $2}' | tr -d ',')
|
||||
echo "Creating new repo on $server [$packageUrl]"
|
||||
curl -i -k "$baseurl/v1/repositories" --data @./$repoFile -H "Content-Type: application/json"
|
||||
curl -i -k "$baseurl/v1/repositories" --data @$repoFile -H "Content-Type: application/json"
|
||||
echo ""
|
||||
}
|
||||
|
||||
# Upload AND publish the file
|
||||
function AddFile
|
||||
{
|
||||
packageFile=$1
|
||||
# Validity checks are performed by UploadFile
|
||||
echo "Uploading package to $server [$packageFile]"
|
||||
response=$(UploadFile $packageFile "true")
|
||||
id=$(echo $response | jq -r ".id")
|
||||
|
||||
# Parse package metadata first to confirm it's a valid deb/rpm
|
||||
# Needs to be performed in this function so we can use it to publish the package
|
||||
jsonFile=$(WritePackageInfoToFile $packageFile)
|
||||
|
||||
sed -i "s/REPOSITORYID/$repositoryId/g" $jsonFile
|
||||
# Replace the url field with fileId
|
||||
sed -i "s/PACKAGEURL/$id/g" $jsonFile
|
||||
sed -i "s/sourceUrl/fileId/g" $jsonFile
|
||||
|
||||
AddPackage $jsonFile
|
||||
rm -f $jsonFile
|
||||
echo ""
|
||||
}
|
||||
|
||||
# Upload a file
|
||||
function UploadFile
|
||||
{
|
||||
packageFile=$1
|
||||
quick=$2
|
||||
if [ -z $packageFile ]; then
|
||||
Bail "Error: Must specify the path to a file to upload "
|
||||
fi
|
||||
if [ ! -f $packageFile ]; then
|
||||
Bail "Error: Cannot upload - $packageFile does not exist"
|
||||
fi
|
||||
|
||||
# Additional validation and output if quick mode isn't enabled
|
||||
# Basically, if this is part of a publish operation, these steps are handled elsewhere
|
||||
if [ "$quick" != "true" ]; then
|
||||
# Parse package metadata first to confirm it's a valid deb/rpm
|
||||
jsonFile=$(WritePackageInfoToFile $packageFile)
|
||||
rm -f $jsonFile
|
||||
|
||||
echo "Uploading package to $server [$packageFile]"
|
||||
fi
|
||||
curl -s -k -X POST -F file=@$packageFile "$baseurl/v1/files"
|
||||
echo ""
|
||||
}
|
||||
|
||||
function ListFiles
|
||||
{
|
||||
curl -s -k "$baseurl/v1/files" | jq
|
||||
}
|
||||
|
||||
function DeleteFile
|
||||
{
|
||||
fileId=$1
|
||||
if [ -z "$fileId" ]; then
|
||||
Bail "Error: Must specify an ID to delete"
|
||||
fi
|
||||
curl -s -X DELETE "$baseurl/v1/files/$fileId"
|
||||
}
|
||||
|
||||
# Upload a single package using the specified JSON file
|
||||
function AddPackage
|
||||
{
|
||||
packageFile=$1
|
||||
if [ -z $packageFile ]; then
|
||||
Bail "Error: Must specify a JSON-formatted file. Reference $defaultPackageFile.template"
|
||||
Bail "Error: Must specify a JSON-formatted file. Reference $packageJsonTemplate"
|
||||
fi
|
||||
if [ ! -f $packageFile ]; then
|
||||
Bail "Error: Cannot add package - $packageFile does not exist"
|
||||
fi
|
||||
packageUrl=$(grep "sourceUrl" $packageFile | head -n 1 | awk '{print $2}')
|
||||
echo "Adding package to $server [$packageUrl]"
|
||||
curl -i -k "$baseurl/v1/packages" --data @./$packageFile -H "Content-Type: application/json"
|
||||
curl -i -k "$baseurl/v1/packages" --data @$packageFile -H "Content-Type: application/json"
|
||||
echo ""
|
||||
}
|
||||
|
||||
# Gets the package name and version and writes it to a file
|
||||
function WritePackageInfoToFile
|
||||
{
|
||||
packageFile=$1
|
||||
tmpOut=$(mktemp)
|
||||
if [ -z "$packageFile" ]; then
|
||||
Bail "Error: Must specify path to a deb/rpm package"
|
||||
elif [ ! -f "$packageFile" ]; then
|
||||
Bail "Error: Specified file $packageFile does not exist"
|
||||
fi
|
||||
if dpkg -I $packageFile > $tmpOut 2> /dev/null; then
|
||||
>&2 echo "File is deb format"
|
||||
pkgName=$(grep "^\s*Package:" $tmpOut | awk '{print $2}')
|
||||
pkgVer=$(grep "^\s*Version:" $tmpOut | awk '{print $2}')
|
||||
elif rpm -qpi $packageFile > $tmpOut 2> /dev/null; then
|
||||
>&2 echo "File is rpm format"
|
||||
pkgName=$(egrep "^Name" $tmpOut | tr -d ':' | awk '{print $2}')
|
||||
pkgVer=$(egrep "^Version" $tmpOut | tr -d ':' | awk '{print $2}')
|
||||
else
|
||||
rm -f $tmpOut
|
||||
Bail "File is not a valid deb/rpm package $url"
|
||||
fi
|
||||
|
||||
rm -f $tmpOut
|
||||
if [ -z "$pkgName" ]; then
|
||||
Bail "Unable to parse package name for $url"
|
||||
elif [ -z "$pkgVer" ]; then
|
||||
Bail "Unable to parse package version number for $url"
|
||||
fi
|
||||
|
||||
# Create Package .json file
|
||||
outJson=$(mktemp)
|
||||
escapedUrl=$(echo "$url" | sed 's/\//\\\//g' | sed 's/\&/\\\&/g')
|
||||
cp $packageJsonTemplate $outJson
|
||||
sed -i "s/PACKAGENAME/$pkgName/g" $outJson
|
||||
sed -i "s/PACKAGEVERSION/$pkgVer/g" $outJson
|
||||
|
||||
# Return path to json file
|
||||
echo $outJson
|
||||
}
|
||||
|
||||
# Upload a single package by dynamically creating a JSON file using a provided URL
|
||||
function AddPackageByUrl
|
||||
{
|
||||
@@ -119,41 +229,20 @@ function AddPackageByUrl
|
||||
Bail "Unable to publish package because no URL was specified"
|
||||
fi
|
||||
tmpFile=$(mktemp)
|
||||
tmpOut=$(mktemp)
|
||||
if ! wget -q "$url" -O $tmpFile; then
|
||||
rm -f $tmpFile $tmpFile
|
||||
rm -f $tmpFile
|
||||
Bail "Unable to download URL $url"
|
||||
elif dpkg -I $tmpFile > $tmpOut 2> /dev/null; then
|
||||
echo "File is deb format"
|
||||
pkgName=$(grep "^\s*Package:" $tmpOut | awk '{print $2}')
|
||||
pkgVer=$(grep "^\s*Version:" $tmpOut | awk '{print $2}')
|
||||
elif rpm -qpi $tmpFile > $tmpOut 2> /dev/null; then
|
||||
echo "File is rpm format"
|
||||
pkgName=$(egrep "^Name" $tmpOut | tr -d ':' | awk '{print $2}')
|
||||
pkgVer=$(egrep "^Version" $tmpOut | tr -d ':' | awk '{print $2}')
|
||||
else
|
||||
rm -f $tmpFile $tmpOut
|
||||
Bail "File is not a valid deb/rpm package $url"
|
||||
fi
|
||||
|
||||
rm -f $tmpFile $tmpOut
|
||||
if [ -z "$pkgName" ]; then
|
||||
Bail "Unable to parse package name for $url"
|
||||
elif [ -z "$pkgVer" ]; then
|
||||
Bail "Unable to parse package version number for $url"
|
||||
fi
|
||||
|
||||
jsonFile=$(WritePackageInfoToFile $tmpFile)
|
||||
# Create Package .json file
|
||||
escapedUrl=$(echo "$url" | sed 's/\//\\\//g' | sed 's/\&/\\\&/g')
|
||||
cp $defaultPackageFile.template $defaultPackageFile
|
||||
sed -i "s/PACKAGENAME/$pkgName/g" $defaultPackageFile
|
||||
sed -i "s/PACKAGEVERSION/$pkgVer/g" $defaultPackageFile
|
||||
sed -i "s/PACKAGEURL/$escapedUrl/g" $defaultPackageFile
|
||||
sed -i "s/REPOSITORYID/$repositoryId/g" $defaultPackageFile
|
||||
sed -i "s/PACKAGEURL/$escapedUrl/g" $jsonFile
|
||||
sed -i "s/REPOSITORYID/$repositoryId/g" $jsonFile
|
||||
# Perform Upload
|
||||
AddPackage $defaultPackageFile
|
||||
AddPackage $jsonFile
|
||||
# Cleanup
|
||||
rm -f $defaultPackageFile
|
||||
rm -f $jsonFile
|
||||
}
|
||||
|
||||
# Upload multiple packages by reading urls line-by-line from the specified file
|
||||
@@ -180,7 +269,7 @@ function CheckUpload {
|
||||
if [ -z "$id" ]; then
|
||||
Bail "Must specify an ID"
|
||||
fi
|
||||
curl -k $baseurl/v1/packages/queue/$id
|
||||
curl -s -k $baseurl/v1/packages/queue/$id | jq
|
||||
echo ""
|
||||
}
|
||||
|
||||
@@ -232,6 +321,20 @@ while (( "$#" )); do
|
||||
operation=AddPackages
|
||||
shift
|
||||
operand="$1"
|
||||
elif [[ "$1" == "-addfile" ]]; then
|
||||
operation=AddFile
|
||||
shift
|
||||
operand="$1"
|
||||
elif [[ "$1" == "-uploadfile" ]]; then
|
||||
operation=UploadFile
|
||||
shift
|
||||
operand="$1"
|
||||
elif [[ "$1" == "-listfiles" ]]; then
|
||||
operation=ListFiles
|
||||
elif [[ "$1" == "-delfile" ]]; then
|
||||
operation=DeleteFile
|
||||
shift
|
||||
operand="$1"
|
||||
elif [[ "$1" == "-check" ]]; then
|
||||
operation=CheckUpload
|
||||
shift
|
||||
|
||||
@@ -14,15 +14,20 @@ Param(
|
||||
|
||||
# Set the right architecture
|
||||
$env:npm_config_arch="$arch"
|
||||
$env:CHILD_CONCURRENCY="1"
|
||||
|
||||
step "Install dependencies" {
|
||||
exec { & npm install }
|
||||
exec { & yarn }
|
||||
}
|
||||
|
||||
step "Hygiene" {
|
||||
exec { & npm run gulp -- hygiene }
|
||||
}
|
||||
|
||||
step "Monaco Editor Check" {
|
||||
exec { & .\node_modules\.bin\tsc -p .\src\tsconfig.monaco.json --noEmit }
|
||||
}
|
||||
|
||||
$env:VSCODE_MIXIN_PASSWORD = $mixinPassword
|
||||
step "Mix in repository from vscode-distro" {
|
||||
exec { & npm run gulp -- mixin }
|
||||
@@ -40,6 +45,10 @@ step "Build minified" {
|
||||
exec { & npm run gulp -- "vscode-win32-$global:arch-min" }
|
||||
}
|
||||
|
||||
step "Copy Inno updater" {
|
||||
exec { & npm run gulp -- "vscode-win32-$global:arch-copy-inno-updater" }
|
||||
}
|
||||
|
||||
# step "Create loader snapshot" {
|
||||
# exec { & node build\lib\snapshotLoader.js --arch=$global:arch }
|
||||
# }
|
||||
|
||||
@@ -17,9 +17,10 @@ Param(
|
||||
|
||||
# Set the right architecture
|
||||
$env:npm_config_arch="$arch"
|
||||
$env:CHILD_CONCURRENCY="1"
|
||||
|
||||
step "Install dependencies" {
|
||||
exec { & npm install }
|
||||
exec { & yarn }
|
||||
}
|
||||
|
||||
step "Hygiene" {
|
||||
|
||||
@@ -6,6 +6,7 @@ $env:HOME=$env:USERPROFILE
|
||||
if (Test-Path env:AGENT_WORKFOLDER) {
|
||||
$env:HOME="${env:AGENT_WORKFOLDER}\home"
|
||||
$env:npm_config_cache="${env:HOME}\npm-cache"
|
||||
$env:YARN_CACHE_FOLDER="${env:HOME}\yarn-cache"
|
||||
$env:npm_config_devdir="${env:HOME}\npm-devdir"
|
||||
New-Item -Path "$env:HOME" -Type directory -Force | out-null
|
||||
New-Item -Path "$env:npm_config_cache" -Type directory -Force | out-null
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
# install node
|
||||
$env:Path = $env:NVM_HOME + ";" + $env:NVM_SYMLINK + ";" + $env:Path
|
||||
$NodeVersion = "7.10.0"
|
||||
nvm install $NodeVersion
|
||||
nvm use $NodeVersion
|
||||
$NodeVersion = "8.9.1"
|
||||
# nvm install $NodeVersion
|
||||
# nvm use $NodeVersion
|
||||
# npm install -g yarn
|
||||
$env:Path = $env:NVM_HOME + "\v" + $NodeVersion + ";" + $env:Path
|
||||
7
build/tsconfig.build.json
Normal file
7
build/tsconfig.build.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"extends": "./tsconfig.json",
|
||||
"compilerOptions": {
|
||||
"allowJs": false,
|
||||
"checkJs": false
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,12 @@
|
||||
"preserveConstEnums": true,
|
||||
"sourceMap": false,
|
||||
"experimentalDecorators": true,
|
||||
"newLine": "LF"
|
||||
"newLine": "LF",
|
||||
// enable JavaScript type checking for the language service
|
||||
// use the tsconfig.build.json for compiling wich disable JavaScript
|
||||
// type checking so that JavaScript file are not transpiled
|
||||
"allowJs": true,
|
||||
"checkJs": true
|
||||
},
|
||||
"exclude": [
|
||||
"node_modules/**"
|
||||
|
||||
@@ -2,12 +2,13 @@
|
||||
"rules": {
|
||||
"no-unused-expression": true,
|
||||
"no-duplicate-variable": true,
|
||||
"no-unused-variable": true,
|
||||
"curly": true,
|
||||
"class-name": true,
|
||||
"semicolon": [
|
||||
true,
|
||||
"always"
|
||||
],
|
||||
"triple-equals": true
|
||||
}
|
||||
},
|
||||
"defaultSeverity": "warning"
|
||||
}
|
||||
1794
build/win32/OSSREADME.json
Normal file
1794
build/win32/OSSREADME.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -19,7 +19,7 @@ OutputDir={#OutputDir}
|
||||
OutputBaseFilename=SqlOpsStudioSetup
|
||||
Compression=lzma
|
||||
SolidCompression=yes
|
||||
AppMutex={#AppMutex}
|
||||
AppMutex={code:GetAppMutex}
|
||||
SetupMutex={#AppMutex}setup
|
||||
WizardImageFile={#RepoDir}\resources\win32\inno-big.bmp
|
||||
WizardSmallImageFile={#RepoDir}\resources\win32\inno-small.bmp
|
||||
@@ -48,11 +48,16 @@ Name: "simplifiedChinese"; MessagesFile: "{#RepoDir}\build\win32\i18n\Default.zh
|
||||
Name: "traditionalChinese"; MessagesFile: "{#RepoDir}\build\win32\i18n\Default.zh-tw.isl,{#RepoDir}\build\win32\i18n\messages.zh-tw.isl" {#LocalizedLanguageFile("cht")}
|
||||
|
||||
[InstallDelete]
|
||||
Type: filesandordirs; Name: {app}\resources\app\out
|
||||
Type: filesandordirs; Name: {app}\resources\app\plugins
|
||||
Type: filesandordirs; Name: {app}\resources\app\extensions
|
||||
Type: filesandordirs; Name: {app}\resources\app\node_modules
|
||||
Type: files; Name: {app}\resources\app\Credits_45.0.2454.85.html
|
||||
Type: filesandordirs; Name: "{app}\resources\app\out"; Check: IsNotUpdate
|
||||
Type: filesandordirs; Name: "{app}\resources\app\plugins"; Check: IsNotUpdate
|
||||
Type: filesandordirs; Name: "{app}\resources\app\extensions"; Check: IsNotUpdate
|
||||
Type: filesandordirs; Name: "{app}\resources\app\node_modules"; Check: IsNotUpdate
|
||||
Type: filesandordirs; Name: "{app}\resources\app\node_modules.asar.unpacked"; Check: IsNotUpdate
|
||||
Type: files; Name: "{app}\resources\app\node_modules.asar"; Check: IsNotUpdate
|
||||
Type: files; Name: "{app}\resources\app\Credits_45.0.2454.85.html"; Check: IsNotUpdate
|
||||
|
||||
[UninstallDelete]
|
||||
Type: filesandordirs; Name: "{app}\_"
|
||||
|
||||
[Tasks]
|
||||
Name: "quicklaunchicon"; Description: "{cm:CreateQuickLaunchIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked; OnlyBelowVersion: 0,6.1
|
||||
@@ -68,14 +73,13 @@ Name: "{commondesktop}\{#NameLong}"; Filename: "{app}\{#ExeBasename}.exe"; AppUs
|
||||
Name: "{userappdata}\Microsoft\Internet Explorer\Quick Launch\{#NameLong}"; Filename: "{app}\{#ExeBasename}.exe"; Tasks: quicklaunchicon; AppUserModelID: "{#AppUserId}"
|
||||
|
||||
[Run]
|
||||
Filename: "{app}\{#ExeBasename}.exe"; Description: "{cm:LaunchProgram,{#NameLong}}"; Tasks: runcode; Flags: nowait postinstall; Check: WizardSilent
|
||||
Filename: "{app}\{#ExeBasename}.exe"; Description: "{cm:LaunchProgram,{#NameLong}}"; Tasks: runcode; Flags: nowait postinstall; Check: ShouldRunAfterUpdate
|
||||
Filename: "{app}\{#ExeBasename}.exe"; Description: "{cm:LaunchProgram,{#NameLong}}"; Flags: nowait postinstall; Check: WizardNotSilent
|
||||
|
||||
[Registry]
|
||||
Root: HKCR; Subkey: "{#RegValueName}SourceFile"; ValueType: string; ValueName: ""; ValueData: "{cm:SourceFile,{#NameLong}}"; Flags: uninsdeletekey
|
||||
Root: HKCR; Subkey: "{#RegValueName}SourceFile\DefaultIcon"; ValueType: string; ValueName: ""; ValueData: "{app}\resources\app\resources\win32\code_file.ico"
|
||||
Root: HKCR; Subkey: "{#RegValueName}SourceFile\shell\open\command"; ValueType: string; ValueName: ""; ValueData: """{app}\{#ExeBasename}.exe"" ""%1"""
|
||||
|
||||
Root: HKCU; Subkey: "Environment"; ValueType: expandsz; ValueName: "Path"; ValueData: "{olddata};{app}\bin"; Tasks: addtopath; Check: NeedsAddPath(ExpandConstant('{app}\bin'))
|
||||
|
||||
[Code]
|
||||
@@ -112,6 +116,75 @@ begin
|
||||
Result := not WizardSilent();
|
||||
end;
|
||||
|
||||
// Updates
|
||||
function IsBackgroundUpdate(): Boolean;
|
||||
begin
|
||||
Result := ExpandConstant('{param:update|false}') <> 'false';
|
||||
end;
|
||||
|
||||
function IsNotUpdate(): Boolean;
|
||||
begin
|
||||
Result := not IsBackgroundUpdate();
|
||||
end;
|
||||
|
||||
// SqlOps will create a flag file before the update starts (/update=C:\foo\bar)
|
||||
// - if the file exists at this point, the user quit SqlOps before the update finished, so don't start SqlOps after update
|
||||
// - otherwise, the user has accepted to apply the update and SqlOps should start
|
||||
function LockFileExists(): Boolean;
|
||||
begin
|
||||
Result := FileExists(ExpandConstant('{param:update}'))
|
||||
end;
|
||||
|
||||
function ShouldRunAfterUpdate(): Boolean;
|
||||
begin
|
||||
if IsBackgroundUpdate() then
|
||||
Result := not LockFileExists()
|
||||
else
|
||||
Result := True;
|
||||
end;
|
||||
|
||||
function GetAppMutex(Value: string): string;
|
||||
begin
|
||||
if IsBackgroundUpdate() then
|
||||
Result := ''
|
||||
else
|
||||
Result := '{#AppMutex}';
|
||||
end;
|
||||
|
||||
function GetDestDir(Value: string): string;
|
||||
begin
|
||||
if IsBackgroundUpdate() then
|
||||
Result := ExpandConstant('{app}\_')
|
||||
else
|
||||
Result := ExpandConstant('{app}');
|
||||
end;
|
||||
|
||||
function BoolToStr(Value: Boolean): String;
|
||||
begin
|
||||
if Value then
|
||||
Result := 'true'
|
||||
else
|
||||
Result := 'false';
|
||||
end;
|
||||
|
||||
procedure CurStepChanged(CurStep: TSetupStep);
|
||||
var
|
||||
UpdateResultCode: Integer;
|
||||
begin
|
||||
if IsBackgroundUpdate() and (CurStep = ssPostInstall) then
|
||||
begin
|
||||
CreateMutex('{#AppMutex}-ready');
|
||||
|
||||
while (CheckForMutexes('{#AppMutex}')) do
|
||||
begin
|
||||
Log('Application is still running, waiting');
|
||||
Sleep(1000);
|
||||
end;
|
||||
|
||||
Exec(ExpandConstant('{app}\tools\inno_updater.exe'), ExpandConstant('"{app}\{#ExeBasename}.exe" ' + BoolToStr(LockFileExists())), '', SW_SHOW, ewWaitUntilTerminated, UpdateResultCode);
|
||||
end;
|
||||
end;
|
||||
|
||||
// http://stackoverflow.com/a/23838239/261019
|
||||
procedure Explode(var Dest: TArrayOfString; Text: String; Separator: String);
|
||||
var
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
AddContextMenuFiles=エクスプローラーのファイル コンテキスト メニューに [%1 で開く] アクションを追加する
|
||||
AddContextMenuFolders=エクスプローラーのディレクトリ コンテキスト メニューに [%1 で開く] アクションを追加する
|
||||
AssociateWithFiles=サポートされているファイルの種類のエディターとして、%1 を登録する
|
||||
AddToPath=PATH への追加 (再起動後に使用可能になる)
|
||||
AddToPath=PATH への追加(再起動後に使用可能)
|
||||
RunAfter=インストール後に %1 を実行する
|
||||
Other=その他:
|
||||
SourceFile=%1 ソース ファイル
|
||||
BIN
build/win32/inno_updater.exe
Normal file
BIN
build/win32/inno_updater.exe
Normal file
Binary file not shown.
BIN
build/win32/vcruntime140.dll
Normal file
BIN
build/win32/vcruntime140.dll
Normal file
Binary file not shown.
2172
build/yarn.lock
Normal file
2172
build/yarn.lock
Normal file
File diff suppressed because it is too large
Load Diff
29
dataprotocol-node/.gitignore
vendored
29
dataprotocol-node/.gitignore
vendored
@@ -1,29 +0,0 @@
|
||||
# Logs
|
||||
logs
|
||||
*.log
|
||||
|
||||
# Runtime data
|
||||
pids
|
||||
*.pid
|
||||
*.seed
|
||||
|
||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||
lib-cov
|
||||
|
||||
# Coverage directory used by tools like istanbul
|
||||
coverage
|
||||
|
||||
# node-waf configuration
|
||||
.lock-wscript
|
||||
|
||||
# Compiled binary addons (http://nodejs.org/api/addons.html)
|
||||
build/Release
|
||||
lib/
|
||||
out/
|
||||
|
||||
# Dependency directory
|
||||
# https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git-
|
||||
node_modules
|
||||
|
||||
# Debug log from npm
|
||||
npm-debug.log
|
||||
@@ -1,18 +0,0 @@
|
||||
|
||||
Copyright (c) Microsoft Corporation
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Source EULA
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy,
|
||||
modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
|
||||
OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
@@ -1,4 +0,0 @@
|
||||
# Microsoft Data Management Protocol - Node
|
||||
|
||||
## License
|
||||
[MIT](https://github.com/Microsoft/carbon/blob/dev/license.txt)
|
||||
@@ -1,24 +0,0 @@
|
||||
{
|
||||
"rules": {
|
||||
"indent": [
|
||||
2,
|
||||
"tab"
|
||||
],
|
||||
"quotes": [
|
||||
2,
|
||||
"single"
|
||||
],
|
||||
"linebreak-style": [
|
||||
2,
|
||||
"windows"
|
||||
],
|
||||
"semi": [
|
||||
2,
|
||||
"always"
|
||||
]
|
||||
},
|
||||
"env": {
|
||||
"node": true
|
||||
},
|
||||
"extends": "eslint:recommended"
|
||||
}
|
||||
@@ -1,9 +0,0 @@
|
||||
.vscode/
|
||||
lib/test/
|
||||
lib/*.map
|
||||
src/
|
||||
test/
|
||||
.eslintrc
|
||||
.gitignore
|
||||
gulpfile.js
|
||||
tsd.json
|
||||
32
dataprotocol-node/client/.vscode/launch.json
vendored
32
dataprotocol-node/client/.vscode/launch.json
vendored
@@ -1,32 +0,0 @@
|
||||
{
|
||||
"version": "0.1.0",
|
||||
// List of configurations. Add new configurations or edit existing ones.
|
||||
// ONLY "node" and "mono" are supported, change "type" to switch.
|
||||
"configurations": [
|
||||
{
|
||||
"request": "launch",
|
||||
// Name of configuration; appears in the launch configuration drop down menu.
|
||||
"name": "Mocha",
|
||||
// Type of configuration. Possible values: "node", "mono".
|
||||
"type": "node",
|
||||
// Workspace relative or absolute path to the program.
|
||||
"program": "node_modules/mocha/bin/_mocha",
|
||||
// Automatically stop program after launch.
|
||||
"stopOnEntry": false,
|
||||
// Command line arguments passed to the program.
|
||||
"args": ["--timeout", "999999"],
|
||||
// Workspace relative or absolute path to the working directory of the program being debugged. Default is the current workspace.
|
||||
"cwd": ".",
|
||||
// Workspace relative or absolute path to the runtime executable to be used. Default is the runtime executable on the PATH.
|
||||
"runtimeExecutable": null,
|
||||
// Optional arguments passed to the runtime executable.
|
||||
"runtimeArgs": [],
|
||||
// Environment variables passed to the program.
|
||||
"env": { },
|
||||
// Use JavaScript source maps (if they exist).
|
||||
"sourceMaps": true,
|
||||
// If JavaScript source maps are enabled, the generated code is expected in this directory.
|
||||
"outDir": "lib"
|
||||
}
|
||||
]
|
||||
}
|
||||
11
dataprotocol-node/client/.vscode/settings.json
vendored
11
dataprotocol-node/client/.vscode/settings.json
vendored
@@ -1,11 +0,0 @@
|
||||
// Place your settings in this file to overwrite default and user settings.
|
||||
{
|
||||
"javascript.validate.enable": false,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"eslint.enable": false,
|
||||
"editor.insertSpaces": false,
|
||||
"editor.tabSize": 4,
|
||||
"tslint.enable": false,
|
||||
"typescript.tsdk": "./node_modules/typescript/lib",
|
||||
"typescript.tsserver.trace": "off"
|
||||
}
|
||||
9
dataprotocol-node/client/.vscode/tasks.json
vendored
9
dataprotocol-node/client/.vscode/tasks.json
vendored
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"version": "0.1.0",
|
||||
"command": "npm",
|
||||
"isShellCommand": true,
|
||||
"args": ["run", "watch"],
|
||||
"showOutput": "silent",
|
||||
"isWatching": true,
|
||||
"problemMatcher": "$tsc-watch"
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
# Microsoft Data Management Protocol - Node
|
||||
|
||||
## License
|
||||
[MIT](https://github.com/Microsoft/carbon/blob/dev/license.txt)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user