mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-17 18:46:43 -05:00
Compare commits
342 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f9b968c1ae | ||
|
|
e3f6feb135 | ||
|
|
7b0be2e773 | ||
|
|
9fec7edba7 | ||
|
|
27468f75a5 | ||
|
|
79ad848216 | ||
|
|
1a2c6f1578 | ||
|
|
17e4d3bfa3 | ||
|
|
2599fb1252 | ||
|
|
26d59b528e | ||
|
|
093f44a1e7 | ||
|
|
290f43dbd7 | ||
|
|
fb2486a54b | ||
|
|
7e02c16fd7 | ||
|
|
6638db1f35 | ||
|
|
f0dde491be | ||
|
|
994a2382ad | ||
|
|
856fec4243 | ||
|
|
d1c594cfd0 | ||
|
|
86da9852ca | ||
|
|
4ba6a979ba | ||
|
|
82974a2135 | ||
|
|
585c18ef4d | ||
|
|
7ec516d851 | ||
|
|
7e970d04ca | ||
|
|
808ce4366d | ||
|
|
8271226487 | ||
|
|
bbb7a67bd2 | ||
|
|
698b4fce41 | ||
|
|
302e8305ef | ||
|
|
659f392196 | ||
|
|
775a25d944 | ||
|
|
bceb766d28 | ||
|
|
36fd618ed4 | ||
|
|
99c473cdf6 | ||
|
|
88b55d0e06 | ||
|
|
184d4bbe27 | ||
|
|
4fc6f4a13e | ||
|
|
adad11c725 | ||
|
|
a1b5af0445 | ||
|
|
db4f512991 | ||
|
|
42ff30515c | ||
|
|
62565e0577 | ||
|
|
de177c0335 | ||
|
|
d614116b63 | ||
|
|
a7ff238653 | ||
|
|
6fb120f5dd | ||
|
|
6e8cc3aaca | ||
|
|
18ab73cc1d | ||
|
|
d1c7370f1c | ||
|
|
794f7a14c0 | ||
|
|
7cd2a6d6aa | ||
|
|
febf6b9e70 | ||
|
|
7201025a15 | ||
|
|
4787d7ba5c | ||
|
|
3de95af25c | ||
|
|
0bf4790a64 | ||
|
|
0d9353d99e | ||
|
|
a898c46e74 | ||
|
|
493e7087cf | ||
|
|
f5ce7fb2a5 | ||
|
|
a8818ab0df | ||
|
|
9691fab917 | ||
|
|
a7f5741608 | ||
|
|
c34869c243 | ||
|
|
ab94b9785e | ||
|
|
ee94524ab1 | ||
|
|
6cce532ca4 | ||
|
|
7f16a4d857 | ||
|
|
255ea1945b | ||
|
|
b38b53b658 | ||
|
|
82c60a23c0 | ||
|
|
c93ea20b75 | ||
|
|
6a4e4fc07b | ||
|
|
d358cdac1e | ||
|
|
4f8ced1f6b | ||
|
|
8cc60fde90 | ||
|
|
b8bc629970 | ||
|
|
9a83dfc022 | ||
|
|
d5e26527a6 | ||
|
|
c9226a07c5 | ||
|
|
d451528b36 | ||
|
|
48b2cbb0bf | ||
|
|
39e6b9933d | ||
|
|
d08fb1aee2 | ||
|
|
5235a1d029 | ||
|
|
3135b8525b | ||
|
|
0e9797c394 | ||
|
|
4145ecfb32 | ||
|
|
20e9b329b1 | ||
|
|
d1ccbf028f | ||
|
|
a1fc621e1b | ||
|
|
9ef6bec960 | ||
|
|
b631530753 | ||
|
|
d9997cebfc | ||
|
|
ffee69a765 | ||
|
|
7a38943412 | ||
|
|
c970173fc0 | ||
|
|
0979ce8de6 | ||
|
|
878bcc0d92 | ||
|
|
68b2f1a8e4 | ||
|
|
8cd06f74b9 | ||
|
|
43387f0d0b | ||
|
|
f3a6fc6f88 | ||
|
|
f2bc367e78 | ||
|
|
46a8410fc5 | ||
|
|
be7c26ede5 | ||
|
|
fb3b7be9e5 | ||
|
|
78731e0c8c | ||
|
|
6b67f27cac | ||
|
|
56182a53d1 | ||
|
|
66e1c01793 | ||
|
|
f0039a64a7 | ||
|
|
22501a09a1 | ||
|
|
c03cce7f60 | ||
|
|
98abf4a758 | ||
|
|
c19bc54877 | ||
|
|
e17d4e96ae | ||
|
|
b333788c3c | ||
|
|
632ca0685e | ||
|
|
52de2b4751 | ||
|
|
fc0c05c755 | ||
|
|
22b8ebd281 | ||
|
|
ec91d3eda0 | ||
|
|
7b31ee27d8 | ||
|
|
927120fa3b | ||
|
|
f1ca2a35ef | ||
|
|
1760af13d1 | ||
|
|
183cb84fbc | ||
|
|
019d5088ec | ||
|
|
15913e5e48 | ||
|
|
e6ffb97a7b | ||
|
|
8655044dfb | ||
|
|
f26c790736 | ||
|
|
7e553031ce | ||
|
|
164ec41fb1 | ||
|
|
8cd9097526 | ||
|
|
134b0b32c6 | ||
|
|
de4b7af1ad | ||
|
|
f976fc9418 | ||
|
|
55059907a3 | ||
|
|
8ca0082ec4 | ||
|
|
5b50696a1b | ||
|
|
840683e3f0 | ||
|
|
ee5dbdffb9 | ||
|
|
43f6a5576d | ||
|
|
8a44de27e7 | ||
|
|
fa9bbd4e1e | ||
|
|
66048f1d63 | ||
|
|
bafd9fd437 | ||
|
|
dae71c3bf4 | ||
|
|
ae8304fc33 | ||
|
|
d6ef42c8b0 | ||
|
|
131b0b93bf | ||
|
|
82185f75d7 | ||
|
|
3769b5066f | ||
|
|
ba8c331356 | ||
|
|
80248846bb | ||
|
|
cf5297958a | ||
|
|
22996cbce7 | ||
|
|
7563416754 | ||
|
|
02b1673c71 | ||
|
|
6ef87d7067 | ||
|
|
e3921c6d14 | ||
|
|
f9ef9d85f4 | ||
|
|
7a2c30e159 | ||
|
|
6438967202 | ||
|
|
63bf82ad84 | ||
|
|
18ab2ae799 | ||
|
|
86d6295bf0 | ||
|
|
3f306d2396 | ||
|
|
2f1f5b2376 | ||
|
|
62d7c71093 | ||
|
|
4f69ed5745 | ||
|
|
ddddf3beb4 | ||
|
|
0ae525cbd5 | ||
|
|
0520870754 | ||
|
|
8b17b77010 | ||
|
|
e2ef1f8a89 | ||
|
|
7f7052ad42 | ||
|
|
738ca479e4 | ||
|
|
34a274a7d1 | ||
|
|
b1496aa12f | ||
|
|
30acba7921 | ||
|
|
b5c0c37a23 | ||
|
|
ef0a92d83f | ||
|
|
b364e32beb | ||
|
|
7f51921176 | ||
|
|
efebd681b6 | ||
|
|
d635390b33 | ||
|
|
61f0d614ce | ||
|
|
27b80804f5 | ||
|
|
df0c505452 | ||
|
|
564f78b6f6 | ||
|
|
df6b6ded33 | ||
|
|
e801a04bcf | ||
|
|
3b1eaca58e | ||
|
|
22a427f934 | ||
|
|
4645a8ba6b | ||
|
|
1b88c10197 | ||
|
|
5a392dfd58 | ||
|
|
e49ff93122 | ||
|
|
399788ccc1 | ||
|
|
08fde8719d | ||
|
|
f7bef3f87b | ||
|
|
c70e7794eb | ||
|
|
9eb438bb24 | ||
|
|
38decaea90 | ||
|
|
ade68b184d | ||
|
|
3c702c15e2 | ||
|
|
76e8805a6b | ||
|
|
5dc7049f8c | ||
|
|
97f852c3d6 | ||
|
|
a8eed6114b | ||
|
|
6864d39f85 | ||
|
|
6f47c1fcda | ||
|
|
c2c64293f5 | ||
|
|
5bbc17be5c | ||
|
|
abbb1e54da | ||
|
|
08d81927b4 | ||
|
|
3d718068d1 | ||
|
|
1d31a6ef98 | ||
|
|
9c8f36e463 | ||
|
|
bd988f62a2 | ||
|
|
29a46b9f8b | ||
|
|
55acb36e33 | ||
|
|
a8b442a274 | ||
|
|
330f690628 | ||
|
|
7cc430d199 | ||
|
|
2558d6bff6 | ||
|
|
5aff7ef4c7 | ||
|
|
7569f7fa32 | ||
|
|
333f634e94 | ||
|
|
b62c0cf2ab | ||
|
|
572a83dac7 | ||
|
|
dbf834c00f | ||
|
|
24a6897836 | ||
|
|
23da6155dd | ||
|
|
ef9321ef2c | ||
|
|
836bf1d28a | ||
|
|
9eb319f392 | ||
|
|
79b6a14d64 | ||
|
|
0dec2ff9b5 | ||
|
|
dd270a78fc | ||
|
|
827e6162c7 | ||
|
|
9f54fbc8cc | ||
|
|
f8858a3511 | ||
|
|
82e5221024 | ||
|
|
004297aea6 | ||
|
|
f7b8a019cd | ||
|
|
a89788a020 | ||
|
|
62af81e88c | ||
|
|
ab736466cd | ||
|
|
6a6f30523c | ||
|
|
8e3fa0a26d | ||
|
|
72c088e137 | ||
|
|
ce5eb00177 | ||
|
|
5629356c66 | ||
|
|
6e7311ca87 | ||
|
|
d315ccff68 | ||
|
|
789ee4b133 | ||
|
|
428745e929 | ||
|
|
cea8d62051 | ||
|
|
fa79e5b016 | ||
|
|
15fd37e049 | ||
|
|
1dd4ea19a3 | ||
|
|
067af76904 | ||
|
|
a7f597c943 | ||
|
|
833adf3515 | ||
|
|
bd15a96b83 | ||
|
|
024bd00d93 | ||
|
|
eb1aafa639 | ||
|
|
bf6b68c614 | ||
|
|
a895f53029 | ||
|
|
78d3b9d555 | ||
|
|
42e1b28130 | ||
|
|
5590d60c5a | ||
|
|
d79423c728 | ||
|
|
d1a0ae43c8 | ||
|
|
e4b0371b2a | ||
|
|
e191556d3b | ||
|
|
5a269fc49a | ||
|
|
613cd58aa3 | ||
|
|
af9984f73b | ||
|
|
3b1c9e910d | ||
|
|
5b29aef5f3 | ||
|
|
b65a7795df | ||
|
|
c6a78456b8 | ||
|
|
f8067ffada | ||
|
|
f7059a2365 | ||
|
|
d013b594b1 | ||
|
|
c5d427ebb1 | ||
|
|
240b90610f | ||
|
|
5cfad825fc | ||
|
|
8918d1593c | ||
|
|
b1e0b7c1e3 | ||
|
|
3a5e4cbeac | ||
|
|
7bfa6e611e | ||
|
|
004c177f7b | ||
|
|
86cc3f77ee | ||
|
|
a33820ecdd | ||
|
|
7babd6f3d0 | ||
|
|
e28ecf44cb | ||
|
|
93685d3a09 | ||
|
|
d660405e73 | ||
|
|
684fb2566b | ||
|
|
696f6841cb | ||
|
|
cef60f3ae5 | ||
|
|
34fe2b44cc | ||
|
|
a16bfbfedd | ||
|
|
fb4fccf2d5 | ||
|
|
b53cad78bd | ||
|
|
a431ca7ef2 | ||
|
|
c2022cac57 | ||
|
|
806d807eae | ||
|
|
24e3b1c5e6 | ||
|
|
bbe3605317 | ||
|
|
06d67f5ad2 | ||
|
|
41f9f22e38 | ||
|
|
a94cbb528e | ||
|
|
4a68ab4659 | ||
|
|
4c24043cc8 | ||
|
|
2ca5d18855 | ||
|
|
37426b0794 | ||
|
|
a70ebeed1c | ||
|
|
8f2113e6b5 | ||
|
|
03cb0565d4 | ||
|
|
dd14f9b93d | ||
|
|
4dd15fb479 | ||
|
|
397f6afaf1 | ||
|
|
65fb77ef5c | ||
|
|
1e22f47304 | ||
|
|
7c9be74970 | ||
|
|
5efb2cf918 | ||
|
|
98505110a4 | ||
|
|
1a864584b6 | ||
|
|
8aa8dc29a1 | ||
|
|
06e86e57e7 | ||
|
|
6a375fdd8c | ||
|
|
b8ad7e3072 | ||
|
|
597a0cad6b | ||
|
|
a646af2ad2 |
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -2,7 +2,7 @@
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: Bug
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
blank_issues_enabled: false
|
||||
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -2,7 +2,7 @@
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: Enhancement
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
8
.github/classifier.yml
vendored
8
.github/classifier.yml
vendored
@@ -2,12 +2,14 @@
|
||||
perform: true,
|
||||
alwaysRequireAssignee: false,
|
||||
labelsRequiringAssignee: [],
|
||||
defaultLabel: 'Triage: Needed',
|
||||
defaultAssignee: '',
|
||||
autoAssignees: {
|
||||
Area - Acquisition: [],
|
||||
Area - Azure: [],
|
||||
Area - Backup\Restore: [],
|
||||
Area - Charting\Insights: [],
|
||||
Area - Connection: [],
|
||||
Area - Connection: [ charles-gagnon ],
|
||||
Area - DacFX: [],
|
||||
Area - Dashboard: [],
|
||||
Area - Data Explorer: [],
|
||||
@@ -15,9 +17,9 @@
|
||||
Area - Extensibility: [],
|
||||
Area - External Table: [],
|
||||
Area - Fundamentals: [],
|
||||
Area - Language Service: [],
|
||||
Area - Language Service: [ charles-gagnon ],
|
||||
Area - Localization: [],
|
||||
Area - Notebooks: [],
|
||||
Area - Notebooks: [ chlafreniere ],
|
||||
Area - Performance: [],
|
||||
Area - Query Editor: [ anthonydresser ],
|
||||
Area - Query Plan: [],
|
||||
|
||||
9
.github/pull_request_template.md
vendored
Normal file
9
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
<!-- Thank you for submitting a Pull Request. Please:
|
||||
* Read our Pull Request guidelines:
|
||||
https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute#pull-requests.
|
||||
* Associate an issue with the Pull Request.
|
||||
* Ensure that the code is up-to-date with the `master` branch.
|
||||
* Include a description of the proposed changes and how to test them.
|
||||
-->
|
||||
|
||||
This PR fixes #
|
||||
6
.github/stale.yml
vendored
Normal file
6
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
perform: true,
|
||||
label: 'Stale PR',
|
||||
daysSinceLastUpdate: 7,
|
||||
ignoredLabels: ['Do Not Merge']
|
||||
}
|
||||
118
.github/workflows/ci.yml
vendored
Normal file
118
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- release/*
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- release/*
|
||||
|
||||
jobs:
|
||||
linux:
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
CHILD_CONCURRENCY: "1"
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
# TODO: rename azure-pipelines/linux/xvfb.init to github-actions
|
||||
- run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev # {{SQL CARBON EDIT}} add kerberos dep
|
||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||
sudo chmod +x /etc/init.d/xvfb
|
||||
sudo update-rc.d xvfb defaults
|
||||
sudo service xvfb start
|
||||
name: Setup Build Environment
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 10
|
||||
# TODO: cache node modules
|
||||
- run: yarn --frozen-lockfile
|
||||
name: Install Dependencies
|
||||
- run: yarn electron x64
|
||||
name: Download Electron
|
||||
- run: yarn gulp hygiene --skip-tslint
|
||||
name: Run Hygiene Checks
|
||||
- run: yarn gulp tslint
|
||||
name: Run TSLint Checks
|
||||
- run: yarn strict-null-check # {{SQL CARBON EDIT}} add step
|
||||
name: Run Strict Null Check
|
||||
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
|
||||
# name: Run Monaco Editor Checks
|
||||
- run: yarn compile
|
||||
name: Compile Sources
|
||||
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
|
||||
# name: Download Built-in Extensions
|
||||
- run: DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
|
||||
name: Run Unit Tests
|
||||
# - run: DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
|
||||
# name: Run Integration Tests
|
||||
|
||||
windows:
|
||||
runs-on: windows-2016
|
||||
env:
|
||||
CHILD_CONCURRENCY: "1"
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 10
|
||||
- uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: '2.x'
|
||||
- run: yarn --frozen-lockfile
|
||||
name: Install Dependencies
|
||||
- run: yarn electron
|
||||
name: Download Electron
|
||||
- run: yarn gulp hygiene --skip-tslint
|
||||
name: Run Hygiene Checks
|
||||
- run: yarn gulp tslint
|
||||
name: Run TSLint Checks
|
||||
- run: yarn strict-null-check # {{SQL CARBON EDIT}} add step
|
||||
name: Run Strict Null Check
|
||||
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
|
||||
# name: Run Monaco Editor Checks
|
||||
- run: yarn compile
|
||||
name: Compile Sources
|
||||
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
|
||||
# name: Download Built-in Extensions
|
||||
- run: .\scripts\test.bat --tfs "Unit Tests"
|
||||
name: Run Unit Tests
|
||||
# - run: .\scripts\test-integration.bat --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
|
||||
# name: Run Integration Tests
|
||||
|
||||
darwin:
|
||||
runs-on: macos-latest
|
||||
env:
|
||||
CHILD_CONCURRENCY: "1"
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 10
|
||||
- run: yarn --frozen-lockfile
|
||||
name: Install Dependencies
|
||||
- run: yarn electron x64
|
||||
name: Download Electron
|
||||
- run: yarn gulp hygiene --skip-tslint
|
||||
name: Run Hygiene Checks
|
||||
- run: yarn gulp tslint
|
||||
name: Run TSLint Checks
|
||||
- run: yarn strict-null-check # {{SQL CARBON EDIT}} add step
|
||||
name: Run Strict Null Check
|
||||
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
|
||||
# name: Run Monaco Editor Checks
|
||||
- run: yarn compile
|
||||
name: Compile Sources
|
||||
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
|
||||
# name: Download Built-in Extensions
|
||||
- run: ./scripts/test.sh --tfs "Unit Tests"
|
||||
name: Run Unit Tests
|
||||
# - run: ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
|
||||
# name: Run Integration Tests
|
||||
13
.github/workflows/tslint.yml
vendored
Normal file
13
.github/workflows/tslint.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
name: TSLint Enforcement
|
||||
on: [pull_request]
|
||||
jobs:
|
||||
job:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: TSLint
|
||||
uses: aaomidi/gh-action-tslint@master
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
tslint_config: 'tslint-sql.json'
|
||||
46
.vscode/launch.json
vendored
46
.vscode/launch.json
vendored
@@ -67,17 +67,16 @@
|
||||
"request": "launch",
|
||||
"name": "Launch azuredatastudio",
|
||||
"windows": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat",
|
||||
"timeout": 45000
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat"
|
||||
},
|
||||
"osx": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
|
||||
"timeout": 45000
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||
},
|
||||
"linux": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
|
||||
"timeout": 45000
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||
},
|
||||
"port": 9222,
|
||||
"timeout": 20000,
|
||||
"env": {
|
||||
"VSCODE_EXTHOST_WILL_SEND_SOCKET": null
|
||||
},
|
||||
@@ -128,6 +127,33 @@
|
||||
"webRoot": "${workspaceFolder}",
|
||||
"timeout": 45000
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
"request": "launch",
|
||||
"name": "Launch ADS (Web) (TBD)",
|
||||
"runtimeExecutable": "yarn",
|
||||
"runtimeArgs": [
|
||||
"web"
|
||||
],
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
"request": "launch",
|
||||
"name": "Launch ADS (Web, Chrome) (TBD)",
|
||||
"url": "http://localhost:8080",
|
||||
"preLaunchTask": "Run web"
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Git Unit Tests",
|
||||
"program": "${workspaceFolder}/extensions/git/node_modules/mocha/bin/_mocha",
|
||||
"stopOnEntry": false,
|
||||
"cwd": "${workspaceFolder}/extensions/git",
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/extensions/git/out/**/*.js"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Built-in Extension",
|
||||
"type": "extensionHost",
|
||||
@@ -233,6 +259,14 @@
|
||||
"Attach to Main Process"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Debug azuredatastudio Main, Renderer & Extension Host",
|
||||
"configurations": [
|
||||
"Launch azuredatastudio",
|
||||
"Attach to Main Process",
|
||||
"Attach to Extension Host"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Debug Renderer and search processes",
|
||||
"configurations": [
|
||||
|
||||
2
.vscode/settings.json
vendored
2
.vscode/settings.json
vendored
@@ -39,6 +39,7 @@
|
||||
],
|
||||
"typescript.tsdk": "node_modules/typescript/lib",
|
||||
"npm.exclude": "**/extensions/**",
|
||||
"npm.packageManager": "yarn",
|
||||
"emmet.excludeLanguages": [],
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"typescript.preferences.quoteStyle": "single",
|
||||
@@ -60,5 +61,6 @@
|
||||
"remote.extensionKind": {
|
||||
"msjsdiag.debugger-for-chrome": "workspace"
|
||||
},
|
||||
"gulp.autoDetect": "off",
|
||||
"files.insertFinalNewline": true
|
||||
}
|
||||
|
||||
29
.vscode/tasks.json
vendored
29
.vscode/tasks.json
vendored
@@ -33,15 +33,15 @@
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
"script": "strict-initialization-watch",
|
||||
"label": "TS - Strict Initialization",
|
||||
"script": "strict-function-types-watch",
|
||||
"label": "TS - Strict Function Types",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"reveal": "never"
|
||||
},
|
||||
"problemMatcher": {
|
||||
"base": "$tsc-watch",
|
||||
"owner": "typescript-strict-initialization",
|
||||
"owner": "typescript-function-types",
|
||||
"applyTo": "allDocuments"
|
||||
}
|
||||
},
|
||||
@@ -90,8 +90,8 @@
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"type": "gulp",
|
||||
"task": "electron",
|
||||
"type": "npm",
|
||||
"script": "electron",
|
||||
"label": "Download electron"
|
||||
},
|
||||
{
|
||||
@@ -99,5 +99,24 @@
|
||||
"task": "hygiene",
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"type": "shell",
|
||||
"command": "yarn web -- --no-launch",
|
||||
"label": "Run web",
|
||||
"isBackground": true,
|
||||
// This section to make error go away when launching the debug config
|
||||
"problemMatcher": {
|
||||
"pattern": {
|
||||
"regexp": ""
|
||||
},
|
||||
"background": {
|
||||
"beginsPattern": ".*node .*",
|
||||
"endsPattern": "Web UI available at .*"
|
||||
}
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "never"
|
||||
}
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
2
.yarnrc
2
.yarnrc
@@ -1,3 +1,3 @@
|
||||
disturl "https://atom.io/download/electron"
|
||||
target "4.2.10"
|
||||
target "6.1.5"
|
||||
runtime "electron"
|
||||
|
||||
15
CHANGELOG.md
15
CHANGELOG.md
@@ -1,5 +1,20 @@
|
||||
# Change Log
|
||||
|
||||
## Version 1.13.1
|
||||
* Release date: November 15, 2019
|
||||
* Release status: General Availability
|
||||
* Resolved [#8210 Copy/Paste results are out of order](https://github.com/microsoft/azuredatastudio/issues/8210).
|
||||
|
||||
## Version 1.13.0
|
||||
* Release date: November 4, 2019
|
||||
* Release status: General Availability
|
||||
* General Availability release for Schema Compare and DACPAC extensions
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/43?closed=1).
|
||||
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
* aspnerd for `Use selected DB for import wizard schema list` [#7878](https://github.com/microsoft/azuredatastudio/pull/7878)
|
||||
|
||||
## Version 1.12.2
|
||||
* Release date: October 11, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
18
README.md
18
README.md
@@ -2,6 +2,7 @@
|
||||
|
||||
[](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://dev.azure.com/azuredatastudio/azuredatastudio/_build/latest?definitionId=4&branchName=master)
|
||||
[](https://twitter.com/azuredatastudio)
|
||||
|
||||
Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
|
||||
|
||||
@@ -9,13 +10,13 @@ Azure Data Studio is a data management tool that enables you to work with SQL Se
|
||||
|
||||
Platform | Link
|
||||
-- | --
|
||||
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2105135
|
||||
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2105134
|
||||
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2104938
|
||||
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2105133
|
||||
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2105132
|
||||
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2104937
|
||||
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2105131
|
||||
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2109256
|
||||
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2109085
|
||||
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2109255
|
||||
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2109180
|
||||
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2109179
|
||||
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2109178
|
||||
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2109254
|
||||
|
||||
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
|
||||
|
||||
@@ -68,6 +69,9 @@ The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.micro
|
||||
## Contributions and "Thank You"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* eulercamposbarros for `Prevent connections from moving on click (#7528)`
|
||||
* AlexFsmn for `Fixed issue where task icons got hidden if text was too long`
|
||||
* jamesrod817 for `Tempdb (#7022)`
|
||||
* dzsquared for `fix(snippets): ads parenthesis to sqlcreateindex snippet #7020`
|
||||
* devmattrick for `Update row count as updates are received #6642`
|
||||
* mottykohn for `In Message panel onclick scroll to line #6417`
|
||||
|
||||
@@ -1,85 +0,0 @@
|
||||
steps:
|
||||
- script: |
|
||||
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:10
|
||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||
sudo chmod +x /etc/init.d/xvfb
|
||||
sudo update-rc.d xvfb defaults
|
||||
sudo service xvfb start
|
||||
sudo apt-get install -y libkrb5-dev
|
||||
# sh -e /etc/init.d/xvfb start
|
||||
# sleep 3
|
||||
displayName: "Linux preinstall"
|
||||
condition: eq(variables['Agent.OS'], 'Linux')
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "$(build-cache)"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
|
||||
- script: |
|
||||
yarn --frozen-lockfile
|
||||
displayName: Install Dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
env:
|
||||
GITHUB_TOKEN: $(GITHUB_TOKEN)
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "$(build-cache)"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
yarn gulp electron-x64
|
||||
displayName: Download Electron
|
||||
env:
|
||||
GITHUB_TOKEN: $(GITHUB_TOKEN)
|
||||
|
||||
- script: |
|
||||
yarn gulp hygiene --skip-tslint
|
||||
displayName: Run Hygiene Checks
|
||||
|
||||
- script: |
|
||||
yarn tslint
|
||||
displayName: Run TSLint
|
||||
|
||||
- script: |
|
||||
yarn strict-null-check
|
||||
displayName: Run Strict Null Check
|
||||
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: Compile
|
||||
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests (Linux)
|
||||
condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux'))
|
||||
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests" --coverage
|
||||
displayName: Run Unit Tests (Mac)
|
||||
condition: and(succeeded(), ne(variables['Agent.OS'], 'Linux'))
|
||||
|
||||
- task: PublishTestResults@2
|
||||
inputs:
|
||||
testResultsFiles: '*-results.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
inputs:
|
||||
codeCoverageTool: 'cobertura'
|
||||
summaryFileLocation: $(System.DefaultWorkingDirectory)/.build/coverage/cobertura-coverage.xml
|
||||
reportDirectory: $(System.DefaultWorkingDirectory)/.build/coverage/lcov-report
|
||||
condition: ne(variables['Agent.OS'], 'Linux')
|
||||
@@ -1,60 +0,0 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "$(build-cache)"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
|
||||
- script: |
|
||||
yarn --frozen-lockfile
|
||||
displayName: Install Dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
env:
|
||||
GITHUB_TOKEN: $(GITHUB_TOKEN)
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "$(build-cache)"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
yarn gulp electron-x64
|
||||
displayName: "Electron"
|
||||
env:
|
||||
GITHUB_TOKEN: $(GITHUB_TOKEN)
|
||||
|
||||
- script: |
|
||||
yarn gulp hygiene --skip-tslint
|
||||
displayName: Run Hygiene Checks
|
||||
|
||||
- script: |
|
||||
yarn tslint
|
||||
displayName: Run TSLint
|
||||
|
||||
- script: |
|
||||
yarn strict-null-check
|
||||
displayName: Run Strict Null Check
|
||||
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: Compile
|
||||
|
||||
- powershell: |
|
||||
.\scripts\test.bat --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests
|
||||
|
||||
- task: PublishTestResults@2
|
||||
inputs:
|
||||
testResultsFiles: "*-results.xml"
|
||||
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
|
||||
condition: succeededOrFailed()
|
||||
@@ -3,20 +3,20 @@ trigger:
|
||||
- release/*
|
||||
|
||||
jobs:
|
||||
- job: Windows
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
steps:
|
||||
- template: azure-pipelines-windows.yml
|
||||
- job: Windows
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
steps:
|
||||
- template: build/azure-pipelines/win32/continuous-build-win32.yml
|
||||
|
||||
- job: Linux
|
||||
pool:
|
||||
vmImage: "Ubuntu-16.04"
|
||||
steps:
|
||||
- template: azure-pipelines-linux-mac.yml
|
||||
- job: Linux
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
steps:
|
||||
- template: build/azure-pipelines/linux/continuous-build-linux.yml
|
||||
|
||||
- job: macOS
|
||||
pool:
|
||||
vmImage: macOS 10.13
|
||||
steps:
|
||||
- template: azure-pipelines-linux-mac.yml
|
||||
- job: macOS
|
||||
pool:
|
||||
vmImage: macOS 10.13
|
||||
steps:
|
||||
- template: build/azure-pipelines/darwin/continuous-build-darwin.yml
|
||||
|
||||
@@ -1 +1 @@
|
||||
2019-08-30T20:24:23.714Z
|
||||
2019-12-01T02:20:58.491Z
|
||||
|
||||
36
build/azure-pipelines/common/copyArtifacts.ts
Normal file
36
build/azure-pipelines/common/copyArtifacts.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as vfs from 'vinyl-fs';
|
||||
|
||||
const files = [
|
||||
'.build/extensions/**/*.vsix', // external extensions
|
||||
'.build/win32-x64/**/*.{exe,zip}', // windows binaries
|
||||
'.build/linux/sha256hashes.txt', // linux hashes
|
||||
'.build/linux/deb/amd64/deb/*', // linux debs
|
||||
'.build/linux/rpm/x86_64/*', // linux rpms
|
||||
'.build/linux/server/*', // linux server
|
||||
'.build/linux/archive/*', // linux archive
|
||||
'.build/docker/**', // docker images
|
||||
'.build/darwin/**', // darwin binaries
|
||||
'.build/version.json' // version information
|
||||
];
|
||||
|
||||
async function main() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const stream = vfs.src(files, { base: '.build', allowEmpty: true })
|
||||
.pipe(vfs.dest(process.env.BUILD_ARTIFACTSTAGINGDIRECTORY!));
|
||||
|
||||
stream.on('end', () => resolve());
|
||||
stream.on('error', e => reject(e));
|
||||
});
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
132
build/azure-pipelines/common/createAsset.ts
Normal file
132
build/azure-pipelines/common/createAsset.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import { Readable } from 'stream';
|
||||
import * as crypto from 'crypto';
|
||||
import * as azure from 'azure-storage';
|
||||
import * as mime from 'mime';
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
|
||||
interface Asset {
|
||||
platform: string;
|
||||
type: string;
|
||||
url: string;
|
||||
mooncakeUrl?: string;
|
||||
hash: string;
|
||||
sha256hash: string;
|
||||
size: number;
|
||||
supportsFastUpdate?: boolean;
|
||||
}
|
||||
|
||||
if (process.argv.length !== 6) {
|
||||
console.error('Usage: node createAsset.js PLATFORM TYPE NAME FILE');
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
function hashStream(hashName: string, stream: Readable): Promise<string> {
|
||||
return new Promise<string>((c, e) => {
|
||||
const shasum = crypto.createHash(hashName);
|
||||
|
||||
stream
|
||||
.on('data', shasum.update.bind(shasum))
|
||||
.on('error', e)
|
||||
.on('close', () => c(shasum.digest('hex')));
|
||||
});
|
||||
}
|
||||
|
||||
async function doesAssetExist(blobService: azure.BlobService, quality: string, blobName: string): Promise<boolean | undefined> {
|
||||
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
|
||||
return existsResult.exists;
|
||||
}
|
||||
|
||||
async function uploadBlob(blobService: azure.BlobService, quality: string, blobName: string, file: string): Promise<void> {
|
||||
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(file),
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
|
||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
function getEnv(name: string): string {
|
||||
const result = process.env[name];
|
||||
|
||||
if (typeof result === 'undefined') {
|
||||
throw new Error('Missing env: ' + name);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const [, , platform, type, name, file] = process.argv;
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
|
||||
console.log('Creating asset...');
|
||||
|
||||
const stat = await new Promise<fs.Stats>((c, e) => fs.stat(file, (err, stat) => err ? e(err) : c(stat)));
|
||||
const size = stat.size;
|
||||
|
||||
console.log('Size:', size);
|
||||
|
||||
const stream = fs.createReadStream(file);
|
||||
const [sha1hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]);
|
||||
|
||||
console.log('SHA1:', sha1hash);
|
||||
console.log('SHA256:', sha256hash);
|
||||
|
||||
const blobName = commit + '/' + name;
|
||||
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2']!;
|
||||
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2']!)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
|
||||
const blobExists = await doesAssetExist(blobService, quality, blobName);
|
||||
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Uploading blobs to Azure storage...');
|
||||
|
||||
await uploadBlob(blobService, quality, blobName, file);
|
||||
|
||||
console.log('Blobs successfully uploaded.');
|
||||
|
||||
const asset: Asset = {
|
||||
platform,
|
||||
type,
|
||||
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
|
||||
hash: sha1hash,
|
||||
sha256hash,
|
||||
size
|
||||
};
|
||||
|
||||
// Remove this if we ever need to rollback fast updates for windows
|
||||
if (/win32/.test(platform)) {
|
||||
asset.supportsFastUpdate = true;
|
||||
}
|
||||
|
||||
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
||||
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await scripts.storedProcedure('createAsset').execute('', [commit, asset, true]);
|
||||
}
|
||||
|
||||
main().then(() => {
|
||||
console.log('Asset successfully created');
|
||||
process.exit(0);
|
||||
}, err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
60
build/azure-pipelines/common/createBuild.ts
Normal file
60
build/azure-pipelines/common/createBuild.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
|
||||
if (process.argv.length !== 3) {
|
||||
console.error('Usage: node createBuild.js VERSION');
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
function getEnv(name: string): string {
|
||||
const result = process.env[name];
|
||||
|
||||
if (typeof result === 'undefined') {
|
||||
throw new Error('Missing env: ' + name);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const [, , _version] = process.argv;
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
const queuedBy = getEnv('BUILD_QUEUEDBY');
|
||||
const sourceBranch = getEnv('BUILD_SOURCEBRANCH');
|
||||
const version = _version + (quality === 'stable' ? '' : `-${quality}`);
|
||||
|
||||
console.log('Creating build...');
|
||||
console.log('Quality:', quality);
|
||||
console.log('Version:', version);
|
||||
console.log('Commit:', commit);
|
||||
|
||||
const build = {
|
||||
id: commit,
|
||||
timestamp: (new Date()).getTime(),
|
||||
version,
|
||||
isReleased: false,
|
||||
sourceBranch,
|
||||
queuedBy,
|
||||
assets: [],
|
||||
updates: {}
|
||||
};
|
||||
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await scripts.storedProcedure('createBuild').execute('', [{ ...build, _partitionKey: '' }]);
|
||||
}
|
||||
|
||||
main().then(() => {
|
||||
console.log('Build successfully created');
|
||||
process.exit(0);
|
||||
}, err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
70
build/azure-pipelines/common/releaseBuild.ts
Normal file
70
build/azure-pipelines/common/releaseBuild.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
|
||||
function getEnv(name: string): string {
|
||||
const result = process.env[name];
|
||||
|
||||
if (typeof result === 'undefined') {
|
||||
throw new Error('Missing env: ' + name);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
interface Config {
|
||||
id: string;
|
||||
frozen: boolean;
|
||||
}
|
||||
|
||||
function createDefaultConfig(quality: string): Config {
|
||||
return {
|
||||
id: quality,
|
||||
frozen: false
|
||||
};
|
||||
}
|
||||
|
||||
async function getConfig(client: CosmosClient, quality: string): Promise<Config> {
|
||||
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${quality}"`;
|
||||
|
||||
const res = await client.database('builds').container('config').items.query(query).fetchAll();
|
||||
|
||||
if (res.resources.length === 0) {
|
||||
return createDefaultConfig(quality);
|
||||
}
|
||||
|
||||
return res.resources[0] as Config;
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const config = await getConfig(client, quality);
|
||||
|
||||
console.log('Quality config:', config);
|
||||
|
||||
if (config.frozen) {
|
||||
console.log(`Skipping release because quality ${quality} is frozen.`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Releasing build ${commit}...`);
|
||||
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await scripts.storedProcedure('releaseBuild').execute('', [commit]);
|
||||
}
|
||||
|
||||
main().then(() => {
|
||||
console.log('Build successfully released');
|
||||
process.exit(0);
|
||||
}, err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -8,7 +8,7 @@
|
||||
import * as url from 'url';
|
||||
import * as azure from 'azure-storage';
|
||||
import * as mime from 'mime';
|
||||
import { DocumentClient, RetrievedDocument } from 'documentdb';
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
|
||||
function log(...args: any[]) {
|
||||
console.log(...[`[${new Date().toISOString()}]`, ...args]);
|
||||
@@ -23,7 +23,7 @@ if (process.argv.length < 3) {
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
interface Build extends RetrievedDocument {
|
||||
interface Build {
|
||||
assets: Asset[];
|
||||
}
|
||||
|
||||
@@ -38,62 +38,20 @@ interface Asset {
|
||||
supportsFastUpdate?: boolean;
|
||||
}
|
||||
|
||||
function updateBuild(commit: string, quality: string, platform: string, type: string, asset: Asset): Promise<void> {
|
||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/' + quality;
|
||||
const updateQuery = {
|
||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
||||
parameters: [{ name: '@id', value: commit }]
|
||||
};
|
||||
|
||||
let updateTries = 0;
|
||||
|
||||
function _update(): Promise<void> {
|
||||
updateTries++;
|
||||
|
||||
return new Promise<void>((c, e) => {
|
||||
client.queryDocuments(collection, updateQuery).toArray((err, results) => {
|
||||
if (err) { return e(err); }
|
||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
||||
|
||||
const release = results[0];
|
||||
|
||||
release.assets = [
|
||||
...release.assets.filter((a: any) => !(a.platform === platform && a.type === type)),
|
||||
asset
|
||||
];
|
||||
|
||||
client.replaceDocument(release._self, release, err => {
|
||||
if (err && err.code === 409 && updateTries < 5) { return c(_update()); }
|
||||
if (err) { return e(err); }
|
||||
|
||||
log('Build successfully updated.');
|
||||
c();
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return _update();
|
||||
}
|
||||
|
||||
async function sync(commit: string, quality: string): Promise<void> {
|
||||
log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
|
||||
|
||||
const cosmosdb = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = `dbs/builds/colls/${quality}`;
|
||||
const query = {
|
||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
||||
parameters: [{ name: '@id', value: commit }]
|
||||
};
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const container = client.database('builds').container(quality);
|
||||
|
||||
const build = await new Promise<Build>((c, e) => {
|
||||
cosmosdb.queryDocuments(collection, query).toArray((err, results) => {
|
||||
if (err) { return e(err); }
|
||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
||||
c(results[0] as Build);
|
||||
});
|
||||
});
|
||||
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${commit}"`;
|
||||
const res = await container.items.query<Build>(query, {}).fetchAll();
|
||||
|
||||
if (res.resources.length !== 1) {
|
||||
throw new Error(`No builds found for ${commit}`);
|
||||
}
|
||||
|
||||
const build = res.resources[0];
|
||||
|
||||
log(`Found build for ${commit}, with ${build.assets.length} assets`);
|
||||
|
||||
@@ -140,8 +98,9 @@ async function sync(commit: string, quality: string): Promise<void> {
|
||||
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
|
||||
|
||||
log(` Updating build in DB...`);
|
||||
asset.mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
||||
await updateBuild(commit, quality, asset.platform, asset.type, asset);
|
||||
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
||||
await container.scripts.storedProcedure('setAssetMooncakeUrl')
|
||||
.execute('', [commit, asset.platform, asset.type, mooncakeUrl]);
|
||||
|
||||
log(` Done ✔️`);
|
||||
} catch (err) {
|
||||
|
||||
@@ -1,27 +1,27 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: '$(ArtifactFeed)'
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
versionSpec: "12.13.0"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
- script: |
|
||||
yarn --frozen-lockfile
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install Dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: '$(ArtifactFeed)'
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
- script: |
|
||||
yarn gulp electron-x64
|
||||
yarn electron x64
|
||||
displayName: Download Electron
|
||||
- script: |
|
||||
yarn gulp hygiene --skip-tslint
|
||||
@@ -29,21 +29,27 @@ steps:
|
||||
- script: |
|
||||
yarn gulp tslint
|
||||
displayName: Run TSLint Checks
|
||||
- script: |
|
||||
yarn monaco-compile-check
|
||||
displayName: Run Monaco Editor Checks
|
||||
- script: | # {{SQL CARBON EDIT}} add step
|
||||
yarn strict-null-check
|
||||
displayName: Run Strict Null Check.
|
||||
- script: | # {{SQL CARBON EDIT}} add step
|
||||
yarn tslint
|
||||
displayName: Run TSLint (gci)
|
||||
# - script: | {{SQL CARBON EDIT}} remove step
|
||||
# yarn monaco-compile-check
|
||||
# displayName: Run Monaco Editor Checks
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
- script: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
# - script: | {{SQL CARBON EDIT}} remove step
|
||||
# yarn download-builtin-extensions
|
||||
# displayName: Download Built-in Extensions
|
||||
- script: |
|
||||
./scripts/test.sh --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests
|
||||
- script: |
|
||||
./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
displayName: Run Integration Tests
|
||||
# - script: | {{SQL CARBON EDIT}} remove step
|
||||
# ./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
# displayName: Run Integration Tests
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
|
||||
14
build/azure-pipelines/darwin/createDrop.sh
Executable file
14
build/azure-pipelines/darwin/createDrop.sh
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
REPO="$(pwd)"
|
||||
|
||||
# ensure drop directories exist
|
||||
mkdir -p $REPO/.build/darwin/{archive,server}
|
||||
|
||||
# remove pkg from archive
|
||||
zip -d $REPO/.build/darwin/archive/azuredatastudio-darwin.zip "*.pkg"
|
||||
|
||||
# package Remote Extension Host
|
||||
pushd .. && mv azuredatastudio-reh-darwin azuredatastudio-server-darwin && zip -Xry $REPO/.build/darwin/server/azuredatastudio-server-darwin.zip azuredatastudio-server-darwin && popd
|
||||
|
||||
node build/azure-pipelines/common/copyArtifacts.js
|
||||
@@ -21,7 +21,7 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
|
||||
@@ -5,28 +5,20 @@ set -e
|
||||
zip -d ../VSCode-darwin.zip "*.pkg"
|
||||
|
||||
# publish the build
|
||||
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
|
||||
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
|
||||
node build/azure-pipelines/common/publish.js \
|
||||
"$VSCODE_QUALITY" \
|
||||
node build/azure-pipelines/common/createAsset.js \
|
||||
darwin \
|
||||
archive \
|
||||
"VSCode-darwin-$VSCODE_QUALITY.zip" \
|
||||
$VERSION \
|
||||
true \
|
||||
../VSCode-darwin.zip
|
||||
|
||||
# package Remote Extension Host
|
||||
pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
|
||||
|
||||
# publish Remote Extension Host
|
||||
node build/azure-pipelines/common/publish.js \
|
||||
"$VSCODE_QUALITY" \
|
||||
node build/azure-pipelines/common/createAsset.js \
|
||||
server-darwin \
|
||||
archive-unsigned \
|
||||
"vscode-server-darwin.zip" \
|
||||
$VERSION \
|
||||
true \
|
||||
../vscode-server-darwin.zip
|
||||
|
||||
# publish hockeyapp symbols
|
||||
|
||||
169
build/azure-pipelines/darwin/sql-product-build-darwin.yml
Normal file
169
build/azure-pipelines/darwin/sql-product-build-darwin.yml
Normal file
@@ -0,0 +1,169 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
displayName: Prepare cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
vstsFeed: 'BuildCache'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: '10.15.3'
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: '1.x'
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||
KeyVaultName: ado-secrets
|
||||
SecretsFilter: 'github-distro-mixin-password'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login azuredatastudio
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
displayName: Prepare tooling
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp install-sqltoolsservice
|
||||
displayName: Install sqltoolsservice
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp package-rebuild-extensions
|
||||
yarn gulp vscode-darwin-min-ci
|
||||
yarn gulp vscode-reh-darwin-min-ci
|
||||
yarn gulp vscode-reh-web-darwin-min-ci
|
||||
displayName: Build
|
||||
env:
|
||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./scripts/test.sh --build --coverage --reporter mocha-junit-reporter
|
||||
displayName: Run unit tests
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
mkdir -p .build/darwin/archive
|
||||
pushd ../azuredatastudio-darwin && zip -r -X -y $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip * && popd
|
||||
displayName: 'Archive'
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
displayName: 'ESRP CodeSigning'
|
||||
inputs:
|
||||
ConnectedServiceName: 'Code Signing'
|
||||
FolderPath: '$(Build.SourcesDirectory)/.build/darwin/archive'
|
||||
Pattern: 'azuredatastudio-darwin.zip'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-401337-Apple",
|
||||
"operationSetCode": "MacAppDeveloperSign",
|
||||
"parameters": [],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 20
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./build/azure-pipelines/darwin/createDrop.sh
|
||||
displayName: Create Drop
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: drop'
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish Test Results test-results.xml'
|
||||
inputs:
|
||||
testResultsFiles: 'test-results.xml'
|
||||
searchFolder: '$(Build.SourcesDirectory)'
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish Integration and Smoke Test Results'
|
||||
inputs:
|
||||
testResultsFiles: 'dawin-integration-tests-results.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)\test-results'
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
displayName: 'Publish code coverage from $(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||
inputs:
|
||||
codeCoverageTool: Cobertura
|
||||
summaryFileLocation: '$(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||
reportDirectory: '$(Build.SourcesDirectory)/.build/coverage'
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
inputs:
|
||||
failOnAlert: true
|
||||
19
build/azure-pipelines/darwin/sql-publish.ps1
Normal file
19
build/azure-pipelines/darwin/sql-publish.ps1
Normal file
@@ -0,0 +1,19 @@
|
||||
Param(
|
||||
[string]$sourcesDir,
|
||||
[string]$artifactsDir,
|
||||
[string]$storageKey,
|
||||
[string]$documentDbKey
|
||||
)
|
||||
|
||||
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
|
||||
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
||||
|
||||
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
|
||||
$Version = $VersionJson.version
|
||||
$Quality = $VersionJson.quality
|
||||
$CommitId = $VersionJson.commit
|
||||
|
||||
$ZipName = "azuredatastudio-darwin.zip"
|
||||
$Zip = "$artifactsDir\darwin\archive\$ZipName"
|
||||
|
||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality darwin archive $ZipName $Version true $Zip $CommitId
|
||||
@@ -1,3 +1,6 @@
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
|
||||
trigger:
|
||||
branches:
|
||||
include: ['master', 'release/*']
|
||||
@@ -8,27 +11,27 @@ pr:
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'vscode-builds-subscription'
|
||||
KeyVaultName: vscode
|
||||
azureSubscription: 'azuredatastudio-adointegration'
|
||||
KeyVaultName: ado-secrets
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login vscode
|
||||
login azuredatastudio
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
|
||||
git remote add distro "https://github.com/$VSCODE_MIXIN_REPO.git"
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
|
||||
# Push master branch into oss/master
|
||||
|
||||
16
build/azure-pipelines/docker/Dockerfile
Normal file
16
build/azure-pipelines/docker/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
||||
#Download base image ubuntu 16.04
|
||||
FROM ubuntu:16.04
|
||||
|
||||
# Update Software repository
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus libgtk-3-0
|
||||
|
||||
ADD ./ /opt/ads-server
|
||||
|
||||
RUN chmod +x /opt/ads-server/server.sh && chmod +x /opt/ads-server/node
|
||||
|
||||
CMD ["/opt/ads-server/server.sh"]
|
||||
|
||||
EXPOSE 8000:8000
|
||||
EXPOSE 8001:8001
|
||||
@@ -7,7 +7,7 @@ pr: none
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
|
||||
20
build/azure-pipelines/linux/Dockerfile
Normal file
20
build/azure-pipelines/linux/Dockerfile
Normal file
@@ -0,0 +1,20 @@
|
||||
#Download base image ubuntu 16.04
|
||||
FROM ubuntu:16.04
|
||||
|
||||
# Update Software repository
|
||||
RUN apt-get update --fix-missing
|
||||
|
||||
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 \
|
||||
libkrb5-dev git apt-transport-https ca-certificates curl gnupg-agent software-properties-common \
|
||||
libnss3 libasound2 make gcc libx11-dev fakeroot rpm
|
||||
|
||||
#docker
|
||||
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||
RUN apt-key fingerprint 0EBFCD88
|
||||
RUN add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||
RUN apt-get update
|
||||
RUN apt-get -y install docker-ce docker-ce-cli containerd.io
|
||||
|
||||
ADD ./xvfb.init /etc/init.d/xvfb
|
||||
RUN chmod +x /etc/init.d/xvfb
|
||||
RUN update-rc.d xvfb defaults
|
||||
@@ -2,34 +2,34 @@ steps:
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0
|
||||
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev #{{SQL CARBON EDIT}} add kerberos dep
|
||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||
sudo chmod +x /etc/init.d/xvfb
|
||||
sudo update-rc.d xvfb defaults
|
||||
sudo service xvfb start
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: '$(ArtifactFeed)'
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
versionSpec: "12.13.0"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
- script: |
|
||||
yarn --frozen-lockfile
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install Dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: '$(ArtifactFeed)'
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
- script: |
|
||||
yarn gulp electron-x64
|
||||
yarn electron x64
|
||||
displayName: Download Electron
|
||||
- script: |
|
||||
yarn gulp hygiene --skip-tslint
|
||||
@@ -37,21 +37,27 @@ steps:
|
||||
- script: |
|
||||
yarn gulp tslint
|
||||
displayName: Run TSLint Checks
|
||||
- script: |
|
||||
yarn monaco-compile-check
|
||||
displayName: Run Monaco Editor Checks
|
||||
- script: | # {{SQL CARBON EDIT}} add gci checks
|
||||
yarn tslint
|
||||
displayName: Run TSLint (gci)
|
||||
- script: | # {{SQL CARBON EDIT}} add strict null check
|
||||
yarn strict-null-check
|
||||
displayName: Run Strict Null Check
|
||||
# - script: | {{SQL CARBON EDIT}} remove monaco editor checks
|
||||
# yarn monaco-compile-check
|
||||
# displayName: Run Monaco Editor Checks
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
- script: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
# - script: | {{SQL CARBON EDIT}} remove step
|
||||
# yarn download-builtin-extensions
|
||||
# displayName: Download Built-in Extensions
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
displayName: Run Integration Tests
|
||||
# - script: | {{SQL CARBON EDIT}} remove step
|
||||
# DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
# displayName: Run Integration Tests
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
|
||||
37
build/azure-pipelines/linux/createDrop.sh
Executable file
37
build/azure-pipelines/linux/createDrop.sh
Executable file
@@ -0,0 +1,37 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
REPO="$(pwd)"
|
||||
ROOT="$REPO/.."
|
||||
|
||||
# Publish tarball
|
||||
mkdir -p $REPO/.build/linux/{archive,server}
|
||||
PLATFORM_LINUX="linux-x64"
|
||||
BUILDNAME="azuredatastudio-$PLATFORM_LINUX"
|
||||
BUILD="$ROOT/$BUILDNAME"
|
||||
TARBALL_FILENAME="azuredatastudio-$PLATFORM_LINUX.tar.gz"
|
||||
TARBALL_PATH="$REPO/.build/linux/archive/$TARBALL_FILENAME"
|
||||
|
||||
# create version
|
||||
PACKAGEJSON="$BUILD/resources/app/package.json"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
COMMIT_ID=$(git rev-parse HEAD)
|
||||
echo -e "{ \"version\": \"$VERSION\", \"quality\": \"$VSCODE_QUALITY\", \"commit\": \"$COMMIT_ID\" }" > "$REPO/.build/version.json"
|
||||
|
||||
rm -rf $ROOT/code-*.tar.*
|
||||
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||
|
||||
# Publish Remote Extension Host
|
||||
LEGACY_SERVER_BUILD_NAME="azuredatastudio-reh-$PLATFORM_LINUX"
|
||||
SERVER_BUILD_NAME="azuredatastudio-server-$PLATFORM_LINUX"
|
||||
SERVER_TARBALL_FILENAME="azuredatastudio-server-$PLATFORM_LINUX.tar.gz"
|
||||
SERVER_TARBALL_PATH="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME"
|
||||
|
||||
rm -rf $ROOT/azuredatastudio-server-*.tar.*
|
||||
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||
|
||||
# create docker
|
||||
mkdir -p $REPO/.build/docker
|
||||
docker build -t azuredatastudio-server -f $REPO/build/azure-pipelines/docker/Dockerfile $ROOT/$SERVER_BUILD_NAME
|
||||
docker save azuredatastudio-server | gzip > $REPO/.build/docker/azuredatastudio-server-docker.tar.gz
|
||||
|
||||
node build/azure-pipelines/common/copyArtifacts.js
|
||||
@@ -1,40 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const documentdb_1 = require("documentdb");
|
||||
function createDefaultConfig(quality) {
|
||||
return {
|
||||
id: quality,
|
||||
frozen: false
|
||||
};
|
||||
}
|
||||
function getConfig(quality) {
|
||||
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/config';
|
||||
const query = {
|
||||
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
|
||||
parameters: [
|
||||
{ name: '@quality', value: quality }
|
||||
]
|
||||
};
|
||||
return new Promise((c, e) => {
|
||||
client.queryDocuments(collection, query).toArray((err, results) => {
|
||||
if (err && err.code !== 409) {
|
||||
return e(err);
|
||||
}
|
||||
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0]);
|
||||
});
|
||||
});
|
||||
}
|
||||
getConfig(process.argv[2])
|
||||
.then(config => {
|
||||
console.log(config.frozen);
|
||||
process.exit(0);
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -21,7 +21,7 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
|
||||
@@ -21,7 +21,7 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
@@ -118,6 +118,32 @@ steps:
|
||||
displayName: Run integration tests
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp "vscode-linux-x64-build-deb"
|
||||
yarn gulp "vscode-linux-x64-build-rpm"
|
||||
yarn gulp "vscode-linux-x64-prepare-snap"
|
||||
displayName: Build packages
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: 'ESRP CodeSign'
|
||||
FolderPath: '.build/linux/rpm/x86_64'
|
||||
Pattern: '*.rpm'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-450779-Pgp",
|
||||
"operationSetCode": "LinuxSign",
|
||||
"parameters": [ ],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
displayName: Codesign rpm
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
|
||||
@@ -10,13 +10,11 @@ BUILD="$ROOT/$BUILDNAME"
|
||||
BUILD_VERSION="$(date +%s)"
|
||||
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
|
||||
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
|
||||
PACKAGEJSON="$BUILD/resources/app/package.json"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
|
||||
rm -rf $ROOT/code-*.tar.*
|
||||
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$VERSION" true "$TARBALL_PATH"
|
||||
node build/azure-pipelines/common/createAsset.js "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$TARBALL_PATH"
|
||||
|
||||
# Publish Remote Extension Host
|
||||
LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
|
||||
@@ -27,32 +25,28 @@ SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
|
||||
rm -rf $ROOT/vscode-server-*.tar.*
|
||||
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$VERSION" true "$SERVER_TARBALL_PATH"
|
||||
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
|
||||
|
||||
# Publish hockeyapp symbols
|
||||
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "x64" "$VSCODE_HOCKEYAPP_ID_LINUX64"
|
||||
|
||||
# Publish DEB
|
||||
yarn gulp "vscode-linux-x64-build-deb"
|
||||
PLATFORM_DEB="linux-deb-x64"
|
||||
DEB_ARCH="amd64"
|
||||
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
|
||||
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
|
||||
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_DEB" package "$DEB_FILENAME" "$VERSION" true "$DEB_PATH"
|
||||
node build/azure-pipelines/common/createAsset.js "$PLATFORM_DEB" package "$DEB_FILENAME" "$DEB_PATH"
|
||||
|
||||
# Publish RPM
|
||||
yarn gulp "vscode-linux-x64-build-rpm"
|
||||
PLATFORM_RPM="linux-rpm-x64"
|
||||
RPM_ARCH="x86_64"
|
||||
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
|
||||
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
|
||||
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_RPM" package "$RPM_FILENAME" "$VERSION" true "$RPM_PATH"
|
||||
node build/azure-pipelines/common/createAsset.js "$PLATFORM_RPM" package "$RPM_FILENAME" "$RPM_PATH"
|
||||
|
||||
# Publish Snap
|
||||
yarn gulp "vscode-linux-x64-prepare-snap"
|
||||
|
||||
# Pack snap tarball artifact, in order to preserve file perms
|
||||
mkdir -p $REPO/.build/linux/snap-tarball
|
||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-x64.tar.gz"
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
@@ -43,12 +43,10 @@ steps:
|
||||
# Create snap package
|
||||
BUILD_VERSION="$(date +%s)"
|
||||
SNAP_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.snap"
|
||||
PACKAGEJSON="$(ls $SNAP_ROOT/code*/usr/share/code*/resources/app/package.json)"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME"
|
||||
(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft snap --output "$SNAP_PATH")
|
||||
|
||||
# Publish snap package
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-x64" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"
|
||||
node build/azure-pipelines/common/createAsset.js "linux-snap-x64" package "$SNAP_FILENAME" "$SNAP_PATH"
|
||||
|
||||
170
build/azure-pipelines/linux/sql-product-build-linux.yml
Normal file
170
build/azure-pipelines/linux/sql-product-build-linux.yml
Normal file
@@ -0,0 +1,170 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
displayName: Prepare cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
vstsFeed: 'BuildCache'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: '10.15.1'
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||
KeyVaultName: ado-secrets
|
||||
SecretsFilter: 'github-distro-mixin-password'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login azuredatastudio
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
displayName: Prepare tooling
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp install-sqltoolsservice
|
||||
yarn gulp install-ssmsmin
|
||||
displayName: Install extension binaries
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-min-ci
|
||||
yarn gulp vscode-reh-linux-x64-min-ci
|
||||
yarn gulp vscode-reh-web-linux-x64-min-ci
|
||||
displayName: Build
|
||||
env:
|
||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
service xvfb start
|
||||
displayName: Start xvfb
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp package-rebuild-extensions
|
||||
yarn gulp compile-extensions
|
||||
yarn gulp package-external-extensions
|
||||
displayName: Package External extensions
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
DISPLAY=:10 ./scripts/test-extensions-unit.sh
|
||||
displayName: 'Run Stable Extension Unit Tests'
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
DISPLAY=:10 ./scripts/test-extensions-unit-unstable.sh
|
||||
displayName: 'Run Unstable Extension Unit Tests'
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-build-deb
|
||||
displayName: Build Deb
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-build-rpm
|
||||
displayName: Build Rpm
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./build/azure-pipelines/linux/createDrop.sh
|
||||
displayName: Create Drop
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: drop'
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish Test Results test-results.xml'
|
||||
inputs:
|
||||
testResultsFiles: 'test-results.xml'
|
||||
searchFolder: '$(Build.SourcesDirectory)'
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
displayName: 'Publish code coverage from $(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||
inputs:
|
||||
codeCoverageTool: Cobertura
|
||||
summaryFileLocation: '$(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||
reportDirectory: '$(Build.SourcesDirectory)/.build/coverage'
|
||||
continueOnError: true
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
inputs:
|
||||
failOnAlert: true
|
||||
36
build/azure-pipelines/linux/sql-publish.ps1
Normal file
36
build/azure-pipelines/linux/sql-publish.ps1
Normal file
@@ -0,0 +1,36 @@
|
||||
Param(
|
||||
[string]$sourcesDir,
|
||||
[string]$artifactsDir,
|
||||
[string]$storageKey,
|
||||
[string]$documentDbKey
|
||||
)
|
||||
|
||||
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
|
||||
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
||||
|
||||
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
|
||||
$Version = $VersionJson.version
|
||||
$Quality = $VersionJson.quality
|
||||
$CommitId = $VersionJson.commit
|
||||
$Arch = "x64"
|
||||
|
||||
# Publish tarball
|
||||
$PlatformLinux = "linux-$Arch"
|
||||
$TarballFilename = "azuredatastudio-linux-$Arch.tar.gz"
|
||||
$TarballPath = "$artifactsDir\linux\archive\$TarballFilename"
|
||||
|
||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformLinux archive-unsigned $TarballFilename $Version true $TarballPath $CommitId
|
||||
|
||||
# Publish DEB
|
||||
$PlatformDeb = "linux-deb-$Arch"
|
||||
$DebFilename = "$(Get-ChildItem -File -Name $artifactsDir\linux\deb\amd64\deb\*.deb)"
|
||||
$DebPath = "$artifactsDir\linux\deb\amd64\deb\$DebFilename"
|
||||
|
||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformDeb package $DebFilename $Version true $DebPath $CommitId
|
||||
|
||||
# Publish RPM
|
||||
$PlatformRpm = "linux-rpm-$Arch"
|
||||
$RpmFilename = "$(Get-ChildItem -File -Name $artifactsDir\linux\rpm\x86_64\*.rpm)"
|
||||
$RpmPath = "$artifactsDir\linux\rpm\x86_64\$RpmFilename"
|
||||
|
||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformRpm package $RpmFilename $Version true $RpmPath $CommitId
|
||||
@@ -21,7 +21,7 @@ function main() {
|
||||
return;
|
||||
}
|
||||
|
||||
const productJsonFilter = filter('product.json', { restore: true });
|
||||
const productJsonFilter = filter('**/product.json', { restore: true });
|
||||
|
||||
fancyLog(ansiColors.blue('[mixin]'), `Mixing in sources:`);
|
||||
return vfs
|
||||
@@ -29,7 +29,7 @@ function main() {
|
||||
.pipe(filter(f => !f.isDirectory()))
|
||||
.pipe(productJsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(json(o => Object.assign({}, require('../product.json'), o)))
|
||||
.pipe(json(o => Object.assign({}, require('../../product.json'), o)))
|
||||
.pipe(productJsonFilter.restore)
|
||||
.pipe(es.mapSync(function (f) {
|
||||
fancyLog(ansiColors.blue('[mixin]'), f.relative, ansiColors.green('✔︎'));
|
||||
@@ -38,4 +38,4 @@ function main() {
|
||||
.pipe(vfs.dest('.'));
|
||||
}
|
||||
|
||||
main();
|
||||
main();
|
||||
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
- template: linux/product-build-linux-multiarch.yml
|
||||
|
||||
- job: LinuxArm64
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'), ne(variables['VSCODE_QUALITY'], 'stable'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
variables:
|
||||
@@ -118,6 +118,7 @@ jobs:
|
||||
- Linux
|
||||
- LinuxSnap
|
||||
- LinuxArmhf
|
||||
- LinuxArm64
|
||||
- LinuxAlpine
|
||||
- macOS
|
||||
steps:
|
||||
@@ -133,6 +134,7 @@ jobs:
|
||||
- Linux
|
||||
- LinuxSnap
|
||||
- LinuxArmhf
|
||||
- LinuxArm64
|
||||
- LinuxAlpine
|
||||
- LinuxWeb
|
||||
- macOS
|
||||
|
||||
@@ -12,23 +12,24 @@ steps:
|
||||
vstsFeed: 'npm-vscode'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
dryRun: true
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
versionSpec: "12.13.0"
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'vscode-builds-subscription'
|
||||
KeyVaultName: vscode
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -41,7 +42,7 @@ steps:
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
displayName: Prepare tooling
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -49,33 +50,33 @@ steps:
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: 'npm-vscode'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: 'npm-vscode'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
# Mixin must run before optimize, because the CSS loader will
|
||||
# inline small SVGs
|
||||
@@ -83,7 +84,7 @@ steps:
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -91,20 +92,20 @@ steps:
|
||||
yarn gulp tslint
|
||||
yarn monaco-compile-check
|
||||
displayName: Run hygiene, tslint and monaco compile checks
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -
|
||||
./build/azure-pipelines/common/extract-telemetry.sh
|
||||
displayName: Extract Telemetry
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
|
||||
./build/azure-pipelines/common/publish-webview.sh
|
||||
displayName: Publish Webview
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -114,14 +115,22 @@ steps:
|
||||
yarn gulp minify-vscode-reh
|
||||
yarn gulp minify-vscode-reh-web
|
||||
displayName: Compile
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
||||
node build/azure-pipelines/upload-sourcemaps
|
||||
displayName: Upload sourcemaps
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VERSION=`node -p "require(\"./package.json\").version"`
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
node build/azure-pipelines/common/createBuild.js $VERSION
|
||||
displayName: Create build
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
@@ -130,4 +139,4 @@ steps:
|
||||
vstsFeed: 'npm-vscode'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const cp = require("child_process");
|
||||
let tag = '';
|
||||
try {
|
||||
tag = cp
|
||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
||||
.toString()
|
||||
.trim();
|
||||
if (!isValidTag(tag)) {
|
||||
throw Error(`Invalid tag ${tag}`);
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
console.error('Failed to update types');
|
||||
process.exit(1);
|
||||
}
|
||||
function isValidTag(t) {
|
||||
if (t.split('.').length !== 3) {
|
||||
return false;
|
||||
}
|
||||
const [major, minor, bug] = t.split('.');
|
||||
// Only release for tags like 1.34.0
|
||||
if (bug !== '0') {
|
||||
return false;
|
||||
}
|
||||
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@@ -35,9 +35,9 @@ function isValidTag(t: string) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
|
||||
if (isNaN(parseInt(major, 10)) || isNaN(parseInt(minor, 10))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ pr: none
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
|
||||
@@ -1,62 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const cp = require("child_process");
|
||||
const path = require("path");
|
||||
let tag = '';
|
||||
try {
|
||||
tag = cp
|
||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
||||
.toString()
|
||||
.trim();
|
||||
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
|
||||
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
|
||||
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
|
||||
updateDTSFile(outPath, tag);
|
||||
console.log(`Done updating vscode.d.ts at ${outPath}`);
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
console.error('Failed to update types');
|
||||
process.exit(1);
|
||||
}
|
||||
function updateDTSFile(outPath, tag) {
|
||||
const oldContent = fs.readFileSync(outPath, 'utf-8');
|
||||
const newContent = getNewFileContent(oldContent, tag);
|
||||
fs.writeFileSync(outPath, newContent);
|
||||
}
|
||||
function getNewFileContent(content, tag) {
|
||||
const oldheader = [
|
||||
`/*---------------------------------------------------------------------------------------------`,
|
||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
||||
` * Licensed under the Source EULA. See License.txt in the project root for license information.`,
|
||||
` *--------------------------------------------------------------------------------------------*/`
|
||||
].join('\n');
|
||||
return getNewFileHeader(tag) + content.slice(oldheader.length);
|
||||
}
|
||||
function getNewFileHeader(tag) {
|
||||
const [major, minor] = tag.split('.');
|
||||
const shorttag = `${major}.${minor}`;
|
||||
const header = [
|
||||
`// Type definitions for Visual Studio Code ${shorttag}`,
|
||||
`// Project: https://github.com/microsoft/vscode`,
|
||||
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/Microsoft>`,
|
||||
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
|
||||
``,
|
||||
`/*---------------------------------------------------------------------------------------------`,
|
||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
||||
` * Licensed under the Source EULA.`,
|
||||
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
|
||||
` *--------------------------------------------------------------------------------------------*/`,
|
||||
``,
|
||||
`/**`,
|
||||
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
|
||||
` * See https://code.visualstudio.com/api for more information`,
|
||||
` */`
|
||||
].join('\n');
|
||||
return header;
|
||||
}
|
||||
@@ -19,4 +19,4 @@ steps:
|
||||
(cd build ; yarn)
|
||||
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
node build/azure-pipelines/common/release.js
|
||||
node build/azure-pipelines/common/releaseBuild.js
|
||||
|
||||
73
build/azure-pipelines/sql-product-build.yml
Normal file
73
build/azure-pipelines/sql-product-build.yml
Normal file
@@ -0,0 +1,73 @@
|
||||
resources:
|
||||
containers:
|
||||
- container: linux-x64
|
||||
image: sqltoolscontainers.azurecr.io/linux-build-agent:x64
|
||||
endpoint: ContainerRegistry
|
||||
|
||||
jobs:
|
||||
- job: Compile
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
container: linux-x64
|
||||
steps:
|
||||
- template: sql-product-compile.yml
|
||||
|
||||
- job: macOS
|
||||
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
|
||||
pool:
|
||||
vmImage: macOS 10.13
|
||||
dependsOn:
|
||||
- Compile
|
||||
steps:
|
||||
- template: darwin/sql-product-build-darwin.yml
|
||||
|
||||
- job: Linux
|
||||
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
container: linux-x64
|
||||
dependsOn:
|
||||
- Compile
|
||||
steps:
|
||||
- template: linux/sql-product-build-linux.yml
|
||||
|
||||
- job: Windows
|
||||
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
dependsOn:
|
||||
- Compile
|
||||
steps:
|
||||
- template: win32/sql-product-build-win32.yml
|
||||
|
||||
- job: Windows_Test
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
||||
pool:
|
||||
name: mssqltools
|
||||
dependsOn:
|
||||
- Linux
|
||||
- Windows
|
||||
steps:
|
||||
- template: win32/sql-product-test-win32.yml
|
||||
|
||||
- job: Release
|
||||
condition: and(succeeded(), or(eq(variables['VSCODE_RELEASE'], 'true'), and(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['Build.Reason'], 'Schedule'))))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
dependsOn:
|
||||
- macOS
|
||||
- Linux
|
||||
- Windows
|
||||
- Windows_Test
|
||||
steps:
|
||||
- template: sql-release.yml
|
||||
|
||||
trigger: none
|
||||
pr: none
|
||||
|
||||
schedules:
|
||||
- cron: "0 5 * * Mon-Fri"
|
||||
displayName: Mon-Fri at 7:00
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
112
build/azure-pipelines/sql-product-compile.yml
Normal file
112
build/azure-pipelines/sql-product-compile.yml
Normal file
@@ -0,0 +1,112 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
displayName: Prepare cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
vstsFeed: 'BuildCache'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||
KeyVaultName: ado-secrets
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login azuredatastudio
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
displayName: Prepare tooling
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
# Mixin must run before optimize, because the CSS loader will
|
||||
# inline small SVGs
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp hygiene --skip-tslint
|
||||
yarn gulp tslint
|
||||
displayName: Run hygiene, tslint
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp compile-build
|
||||
yarn gulp compile-extensions-build
|
||||
yarn gulp minify-vscode
|
||||
yarn gulp minify-vscode-reh
|
||||
yarn gulp minify-vscode-reh-web
|
||||
displayName: Compile
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
vstsFeed: 'BuildCache'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
5
build/azure-pipelines/sql-release.yml
Normal file
5
build/azure-pipelines/sql-release.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
echo "##vso[build.addbuildtag]Release"
|
||||
displayName: Set For Release
|
||||
@@ -1,7 +1,7 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
|
||||
@@ -21,7 +21,7 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
|
||||
@@ -7,12 +7,9 @@ ROOT="$REPO/.."
|
||||
WEB_BUILD_NAME="vscode-web"
|
||||
WEB_TARBALL_FILENAME="vscode-web.tar.gz"
|
||||
WEB_TARBALL_PATH="$ROOT/$WEB_TARBALL_FILENAME"
|
||||
BUILD="$ROOT/$WEB_BUILD_NAME"
|
||||
PACKAGEJSON="$BUILD/package.json"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
|
||||
rm -rf $ROOT/vscode-web.tar.*
|
||||
|
||||
(cd $ROOT && tar --owner=0 --group=0 -czf $WEB_TARBALL_PATH $WEB_BUILD_NAME)
|
||||
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "web-standalone" archive-unsigned "$WEB_TARBALL_FILENAME" "$VERSION" true "$WEB_TARBALL_PATH"
|
||||
node build/azure-pipelines/common/createAsset.js web-standalone archive-unsigned "$WEB_TARBALL_FILENAME" "$WEB_TARBALL_PATH"
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
versionSpec: "12.13.0"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
- task: UsePythonVersion@0
|
||||
@@ -11,42 +11,50 @@ steps:
|
||||
addToPath: true
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: '$(ArtifactFeed)'
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
- powershell: |
|
||||
yarn --frozen-lockfile
|
||||
env:
|
||||
CHILD_CONCURRENCY: "1"
|
||||
displayName: Install Dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: '$(ArtifactFeed)'
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
- powershell: |
|
||||
yarn gulp electron
|
||||
yarn electron
|
||||
- script: |
|
||||
yarn gulp hygiene --skip-tslint
|
||||
displayName: Run Hygiene Checks
|
||||
- script: |
|
||||
yarn gulp tslint
|
||||
displayName: Run TSLint Checks
|
||||
- powershell: |
|
||||
yarn monaco-compile-check
|
||||
displayName: Run Monaco Editor Checks
|
||||
- script: | # {{SQL CARBON EDIT}} add step
|
||||
yarn tslint
|
||||
displayName: Run TSLint (gci)
|
||||
- script: | # {{SQL CARBON EDIT}} add step
|
||||
yarn strict-null-check
|
||||
displayName: Run Strict Null Check
|
||||
# - powershell: | {{SQL CARBON EDIT}} remove step
|
||||
# yarn monaco-compile-check
|
||||
# displayName: Run Monaco Editor Checks
|
||||
- powershell: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
- powershell: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
# - powershell: | {{SQL CARBON EDIT}} remove step
|
||||
# yarn download-builtin-extensions
|
||||
# displayName: Download Built-in Extensions
|
||||
- powershell: |
|
||||
.\scripts\test.bat --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests
|
||||
- powershell: |
|
||||
.\scripts\test-integration.bat --tfs "Integration Tests"
|
||||
displayName: Run Integration Tests
|
||||
# - powershell: | {{SQL CARBON EDIT}} remove step
|
||||
# .\scripts\test-integration.bat --tfs "Integration Tests"
|
||||
# displayName: Run Integration Tests
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
|
||||
20
build/azure-pipelines/win32/createDrop.ps1
Normal file
20
build/azure-pipelines/win32/createDrop.ps1
Normal file
@@ -0,0 +1,20 @@
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
$Arch = "x64"
|
||||
|
||||
$Repo = "$(pwd)"
|
||||
$Root = "$Repo\.."
|
||||
$LegacyServer = "$Root\azuredatastudio-reh-win32-$Arch"
|
||||
$ServerName = "azuredatastudio-server-win32-$Arch"
|
||||
$Server = "$Root\$ServerName"
|
||||
$ServerZipLocation = "$Repo\.build\win32-$Arch\server"
|
||||
$ServerZip = "$ServerZipLocation\azuredatastudio-server-win32-$Arch.zip"
|
||||
|
||||
# Create server archive
|
||||
New-Item $ServerZipLocation -ItemType Directory # this will throw even when success for we don't want to exec this
|
||||
$global:LASTEXITCODE = 0
|
||||
exec { Rename-Item -Path $LegacyServer -NewName $ServerName } "Rename Item"
|
||||
exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r } "Zip Server"
|
||||
|
||||
exec { node build/azure-pipelines/common/copyArtifacts.js } "Copy Artifacts"
|
||||
@@ -21,7 +21,7 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
@@ -107,7 +107,7 @@ steps:
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn gulp "electron-$(VSCODE_ARCH)" }
|
||||
exec { yarn electron $(VSCODE_ARCH) }
|
||||
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
|
||||
displayName: Run unit tests
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
@@ -23,14 +23,13 @@ exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r }
|
||||
# get version
|
||||
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
|
||||
$Version = $PackageJson.version
|
||||
$Quality = "$env:VSCODE_QUALITY"
|
||||
|
||||
$AssetPlatform = if ("$Arch" -eq "ia32") { "win32" } else { "win32-x64" }
|
||||
|
||||
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform-archive" archive "VSCode-win32-$Arch-$Version.zip" $Version true $Zip }
|
||||
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform" setup "VSCodeSetup-$Arch-$Version.exe" $Version true $SystemExe }
|
||||
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform-user" setup "VSCodeUserSetup-$Arch-$Version.exe" $Version true $UserExe }
|
||||
exec { node build/azure-pipelines/common/publish.js $Quality "server-$AssetPlatform" archive "vscode-server-win32-$Arch.zip" $Version true $ServerZip }
|
||||
exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform-archive" archive "VSCode-win32-$Arch-$Version.zip" $Zip }
|
||||
exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform" setup "VSCodeSetup-$Arch-$Version.exe" $SystemExe }
|
||||
exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform-user" setup "VSCodeUserSetup-$Arch-$Version.exe" $UserExe }
|
||||
exec { node build/azure-pipelines/common/createAsset.js "server-$AssetPlatform" archive "vscode-server-win32-$Arch.zip" $ServerZip }
|
||||
|
||||
# publish hockeyapp symbols
|
||||
$hockeyAppId = if ("$Arch" -eq "ia32") { "$env:VSCODE_HOCKEYAPP_ID_WIN32" } else { "$env:VSCODE_HOCKEYAPP_ID_WIN64" }
|
||||
|
||||
278
build/azure-pipelines/win32/sql-product-build-win32.yml
Normal file
278
build/azure-pipelines/win32/sql-product-build-win32.yml
Normal file
@@ -0,0 +1,278 @@
|
||||
steps:
|
||||
- powershell: |
|
||||
mkdir .build -ea 0
|
||||
"$env:BUILD_SOURCEVERSION" | Out-File -Encoding ascii -NoNewLine .build\commit
|
||||
"$env:VSCODE_QUALITY" | Out-File -Encoding ascii -NoNewLine .build\quality
|
||||
displayName: Prepare cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
vstsFeed: 'BuildCache'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
|
||||
- powershell: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '2.x'
|
||||
addToPath: true
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||
KeyVaultName: ado-secrets
|
||||
SecretsFilter: 'github-distro-mixin-password'
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
"machine github.com`nlogin azuredatastudio`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
|
||||
|
||||
exec { git config user.email "andresse@microsoft.com" }
|
||||
exec { git config user.name "AzureDataStudio" }
|
||||
displayName: Prepare tooling
|
||||
|
||||
- powershell: |
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$env:CHILD_CONCURRENCY="1"
|
||||
exec { yarn --frozen-lockfile }
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn postinstall }
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { node build/azure-pipelines/mixin }
|
||||
displayName: Mix in quality
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn gulp "install-sqltoolsservice" }
|
||||
displayName: Install sqltoolsservice
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn gulp "package-rebuild-extensions" }
|
||||
exec { yarn gulp "vscode-win32-x64-min-ci" }
|
||||
exec { yarn gulp "vscode-reh-win32-x64-min-ci" }
|
||||
exec { yarn gulp "vscode-reh-web-win32-x64-min-ci" }
|
||||
displayName: Build
|
||||
env:
|
||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { .\scripts\test-unstable.bat --build --coverage --reporter mocha-junit-reporter }
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||
displayName: Run unstable tests
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
displayName: 'Sign out code'
|
||||
inputs:
|
||||
ConnectedServiceName: 'Code Signing'
|
||||
FolderPath: '$(agent.builddirectory)/azuredatastudio-win32-x64'
|
||||
Pattern: '*.exe,*.node,resources/app/node_modules.asar.unpacked/*.dll,swiftshader/*.dll,d3dcompiler_47.dll,libGLESv2.dll,ffmpeg.dll,libEGL.dll,Microsoft.SqlTools.Hosting.dll,Microsoft.SqlTools.ResourceProvider.Core.dll,Microsoft.SqlTools.ResourceProvider.DefaultImpl.dll,MicrosoftSqlToolsCredentials.dll,MicrosoftSqlToolsServiceLayer.dll,Newtonsoft.Json.dll,SqlSerializationService.dll,SqlToolsResourceProviderService.dll,Microsoft.SqlServer.*.dll,Microsoft.Data.Tools.Sql.BatchParser.dll'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "Azure Data Studio"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "https://github.com/microsoft/azuredatastudio"
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "FileDigest",
|
||||
"parameterValue": "/fd sha256"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||
}
|
||||
],
|
||||
"toolName": "signtool.exe",
|
||||
"toolVersion": "6.2.9304.0"
|
||||
},
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolVerify",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "VerifyAll",
|
||||
"parameterValue": "/all"
|
||||
}
|
||||
],
|
||||
"toolName": "signtool.exe",
|
||||
"toolVersion": "6.2.9304.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 600
|
||||
MaxConcurrency: 5
|
||||
MaxRetryAttempts: 20
|
||||
condition: and(succeeded(), eq(variables['signed'], true))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn gulp "vscode-win32-x64-user-setup" }
|
||||
exec { yarn gulp "vscode-win32-x64-system-setup" }
|
||||
exec { yarn gulp "vscode-win32-x64-archive" }
|
||||
displayName: Archive & User & System setup
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
displayName: 'Sign installers'
|
||||
inputs:
|
||||
ConnectedServiceName: 'Code Signing'
|
||||
FolderPath: '.build'
|
||||
Pattern: '*.exe'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "Azure Data Studio"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "https://github.com/microsoft/azuredatastudio"
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "FileDigest",
|
||||
"parameterValue": "/fd sha256"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||
}
|
||||
],
|
||||
"toolName": "signtool.exe",
|
||||
"toolVersion": "6.2.9304.0"
|
||||
},
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolVerify",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "VerifyAll",
|
||||
"parameterValue": "/all"
|
||||
}
|
||||
],
|
||||
"toolName": "signtool.exe",
|
||||
"toolVersion": "6.2.9304.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 600
|
||||
MaxConcurrency: 5
|
||||
MaxRetryAttempts: 20
|
||||
condition: and(succeeded(), eq(variables['signed'], true))
|
||||
|
||||
- task: ArchiveFiles@2
|
||||
displayName: 'Archive build scripts source'
|
||||
inputs:
|
||||
rootFolderOrFile: '$(Build.SourcesDirectory)/build'
|
||||
archiveType: tar
|
||||
archiveFile: '$(Build.BinariesDirectory)/source.tar.gz'
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: build scripts source'
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.BinariesDirectory)/source.tar.gz'
|
||||
ArtifactName: source
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
.\build\azure-pipelines\win32\createDrop.ps1
|
||||
displayName: Create Drop
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: drop'
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish Test Results test-results.xml'
|
||||
inputs:
|
||||
testResultsFiles: 'test-results.xml'
|
||||
searchFolder: '$(Build.SourcesDirectory)'
|
||||
failTaskOnFailedTests: true
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish Integration and Smoke Test Results'
|
||||
inputs:
|
||||
testResultsFiles: '*.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)\test-results'
|
||||
mergeTestResults: true
|
||||
failTaskOnFailedTests: true
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
inputs:
|
||||
failOnAlert: true
|
||||
106
build/azure-pipelines/win32/sql-product-test-win32.yml
Normal file
106
build/azure-pipelines/win32/sql-product-test-win32.yml
Normal file
@@ -0,0 +1,106 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$env:CHILD_CONCURRENCY="1"
|
||||
exec { git clean -fxd }
|
||||
displayName: Clean repo
|
||||
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
buildType: 'current'
|
||||
targetPath: '$(Build.SourcesDirectory)\.build'
|
||||
artifactName: drop
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$env:CHILD_CONCURRENCY="1"
|
||||
exec { yarn --frozen-lockfile }
|
||||
displayName: Install dependencies
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { .\node_modules\7zip\7zip-lite\7z.exe x $(Build.SourcesDirectory)\.build\win32-x64/archive/azuredatastudio-win32-x64.zip -o$(Agent.TempDirectory)\azuredatastudio-win32-x64 }
|
||||
displayName: Unzip artifact
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: SqlToolsSecretStore'
|
||||
inputs:
|
||||
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||
KeyVaultName: SqlToolsSecretStore
|
||||
SecretsFilter: 'ads-integration-test-azure-server,ads-integration-test-azure-server-password,ads-integration-test-azure-server-username,ads-integration-test-bdc-server,ads-integration-test-bdc-server-password,ads-integration-test-bdc-server-username,ads-integration-test-standalone-server,ads-integration-test-standalone-server-password,ads-integration-test-standalone-server-username'
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$AppRoot = "$(Agent.TempDirectory)\azuredatastudio-win32-x64"
|
||||
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||
$AppNameShort = $AppProductJson.nameShort
|
||||
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:INTEGRATION_TEST_CLI_PATH = "$AppRoot\bin\$AppNameShort"; .\scripts\sql-test-integration.bat }
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
displayName: Run stable tests
|
||||
env:
|
||||
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
|
||||
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
|
||||
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
|
||||
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
|
||||
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
|
||||
STANDALONE_SQL: $(ads-integration-test-standalone-server)
|
||||
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
|
||||
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
|
||||
AZURE_SQL: $(ads-integration-test-azure-server)
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$AppRoot = "$(Agent.TempDirectory)\azuredatastudio-win32-x64"
|
||||
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||
$AppNameShort = $AppProductJson.nameShort
|
||||
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:INTEGRATION_TEST_CLI_PATH = "$AppRoot\bin\$AppNameShort"; .\scripts\sql-test-integration.bat }
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
displayName: Run release tests
|
||||
env:
|
||||
ADS_TEST_GREP: (.*@REL@|integration test setup)
|
||||
ADS_TEST_INVERT_GREP: 0
|
||||
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
|
||||
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
|
||||
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
|
||||
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
|
||||
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
|
||||
STANDALONE_SQL: $(ads-integration-test-standalone-server)
|
||||
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
|
||||
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
|
||||
AZURE_SQL: $(ads-integration-test-azure-server)
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$AppRoot = "$(Agent.TempDirectory)\azuredatastudio-win32-x64"
|
||||
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||
$AppNameShort = $AppProductJson.nameShort
|
||||
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; .\scripts\sql-test-integration-unstable.bat }
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||
displayName: Run unstable integration tests
|
||||
env:
|
||||
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
|
||||
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
|
||||
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
|
||||
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
|
||||
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
|
||||
STANDALONE_SQL: $(ads-integration-test-standalone-server)
|
||||
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
|
||||
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
|
||||
AZURE_SQL: $(ads-integration-test-azure-server)
|
||||
29
build/azure-pipelines/win32/sql-publish.ps1
Normal file
29
build/azure-pipelines/win32/sql-publish.ps1
Normal file
@@ -0,0 +1,29 @@
|
||||
Param(
|
||||
[string]$sourcesDir,
|
||||
[string]$artifactsDir,
|
||||
[string]$storageKey,
|
||||
[string]$documentDbKey
|
||||
)
|
||||
|
||||
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
|
||||
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
||||
|
||||
$ExeName = "AzureDataStudioSetup.exe"
|
||||
$SystemExe = "$artifactsDir\win32-x64\system-setup\$ExeName"
|
||||
$UserExe = "$artifactsDir\win32-x64\user-setup\$ExeName"
|
||||
$UserExeName = "AzureDataStudioUserSetup.exe"
|
||||
$ZipName = "azuredatastudio-win32-x64.zip"
|
||||
$Zip = "$artifactsDir\win32-x64\archive\$ZipName"
|
||||
|
||||
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
|
||||
$Version = $VersionJson.version
|
||||
$Quality = $VersionJson.quality
|
||||
$CommitId = $VersionJson.commit
|
||||
|
||||
$assetPlatform = "win32-x64"
|
||||
|
||||
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform-archive" archive $ZipName $Version true $Zip $CommitId
|
||||
|
||||
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform" setup $ExeName $Version true $SystemExe $CommitId
|
||||
|
||||
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform-user" setup $UserExeName $Version true $UserExe $CommitId
|
||||
@@ -1,7 +1,7 @@
|
||||
[
|
||||
{
|
||||
"name": "Microsoft.sqlservernotebook",
|
||||
"version": "0.3.2",
|
||||
"version": "0.3.3",
|
||||
"repo": "https://github.com/Microsoft/azuredatastudio"
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[
|
||||
{
|
||||
"name": "Microsoft.sqlservernotebook",
|
||||
"version": "0.3.2",
|
||||
"version": "0.3.3",
|
||||
"repo": "https://github.com/Microsoft/azuredatastudio"
|
||||
}
|
||||
]
|
||||
|
||||
@@ -57,7 +57,6 @@ var BUNDLED_FILE_HEADER = [
|
||||
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
|
||||
|
||||
const extractEditorSrcTask = task.define('extract-editor-src', () => {
|
||||
console.log(`If the build fails, consider tweaking shakeLevel below to a lower value.`);
|
||||
const apiusages = monacoapi.execute().usageContent;
|
||||
const extrausages = fs.readFileSync(path.join(root, 'build', 'monaco', 'monaco.usage.recipe')).toString();
|
||||
standalone.extractEditor({
|
||||
@@ -71,25 +70,15 @@ const extractEditorSrcTask = task.define('extract-editor-src', () => {
|
||||
apiusages,
|
||||
extrausages
|
||||
],
|
||||
typings: [
|
||||
'typings/lib.ie11_safe_es6.d.ts',
|
||||
'typings/thenable.d.ts',
|
||||
'typings/es6-promise.d.ts',
|
||||
'typings/require-monaco.d.ts',
|
||||
"typings/lib.es2018.promise.d.ts",
|
||||
'vs/monaco.d.ts'
|
||||
],
|
||||
libs: [
|
||||
`lib.es5.d.ts`,
|
||||
`lib.dom.d.ts`,
|
||||
`lib.webworker.importscripts.d.ts`
|
||||
],
|
||||
redirects: {
|
||||
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
|
||||
},
|
||||
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
|
||||
importIgnorePattern: /(^vs\/css!)|(promise-polyfill\/polyfill)/,
|
||||
destRoot: path.join(root, 'out-editor-src')
|
||||
destRoot: path.join(root, 'out-editor-src'),
|
||||
redirects: []
|
||||
});
|
||||
});
|
||||
|
||||
@@ -140,18 +129,70 @@ const createESMSourcesAndResourcesTask = task.define('extract-editor-esm', () =>
|
||||
});
|
||||
|
||||
const compileEditorESMTask = task.define('compile-editor-esm', () => {
|
||||
console.log(`Launching the TS compiler at ${path.join(__dirname, '../out-editor-esm')}...`);
|
||||
let result;
|
||||
if (process.platform === 'win32') {
|
||||
const result = cp.spawnSync(`..\\node_modules\\.bin\\tsc.cmd`, {
|
||||
result = cp.spawnSync(`..\\node_modules\\.bin\\tsc.cmd`, {
|
||||
cwd: path.join(__dirname, '../out-editor-esm')
|
||||
});
|
||||
console.log(result.stdout.toString());
|
||||
console.log(result.stderr.toString());
|
||||
} else {
|
||||
const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
|
||||
result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
|
||||
cwd: path.join(__dirname, '../out-editor-esm')
|
||||
});
|
||||
console.log(result.stdout.toString());
|
||||
console.log(result.stderr.toString());
|
||||
}
|
||||
|
||||
console.log(result.stdout.toString());
|
||||
console.log(result.stderr.toString());
|
||||
|
||||
if (result.status !== 0) {
|
||||
console.log(`The TS Compilation failed, preparing analysis folder...`);
|
||||
const destPath = path.join(__dirname, '../../vscode-monaco-editor-esm-analysis');
|
||||
return util.rimraf(destPath)().then(() => {
|
||||
fs.mkdirSync(destPath);
|
||||
|
||||
// initialize a new repository
|
||||
cp.spawnSync(`git`, [`init`], {
|
||||
cwd: destPath
|
||||
});
|
||||
|
||||
// build a list of files to copy
|
||||
const files = util.rreddir(path.join(__dirname, '../out-editor-esm'));
|
||||
|
||||
// copy files from src
|
||||
for (const file of files) {
|
||||
const srcFilePath = path.join(__dirname, '../src', file);
|
||||
const dstFilePath = path.join(destPath, file);
|
||||
if (fs.existsSync(srcFilePath)) {
|
||||
util.ensureDir(path.dirname(dstFilePath));
|
||||
const contents = fs.readFileSync(srcFilePath).toString().replace(/\r\n|\r|\n/g, '\n');
|
||||
fs.writeFileSync(dstFilePath, contents);
|
||||
}
|
||||
}
|
||||
|
||||
// create an initial commit to diff against
|
||||
cp.spawnSync(`git`, [`add`, `.`], {
|
||||
cwd: destPath
|
||||
});
|
||||
|
||||
// create the commit
|
||||
cp.spawnSync(`git`, [`commit`, `-m`, `"original sources"`, `--no-gpg-sign`], {
|
||||
cwd: destPath
|
||||
});
|
||||
|
||||
// copy files from esm
|
||||
for (const file of files) {
|
||||
const srcFilePath = path.join(__dirname, '../out-editor-esm', file);
|
||||
const dstFilePath = path.join(destPath, file);
|
||||
if (fs.existsSync(srcFilePath)) {
|
||||
util.ensureDir(path.dirname(dstFilePath));
|
||||
const contents = fs.readFileSync(srcFilePath).toString().replace(/\r\n|\r|\n/g, '\n');
|
||||
fs.writeFileSync(dstFilePath, contents);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Open in VS Code the folder at '${destPath}' and you can alayze the compilation error`);
|
||||
throw new Error('Standalone Editor compilation failed. If this is the build machine, simply launch `yarn run gulp editor-distro` on your machine to further analyze the compilation problem.');
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -115,7 +115,8 @@ const tasks = compilations.map(function (tsconfigFile) {
|
||||
|
||||
const compileTask = task.define(`compile-extension:${name}`, task.series(cleanTask, () => {
|
||||
const pipeline = createPipeline(sqlLocalizedExtensions.includes(name), true); // {{SQL CARBON EDIT}}
|
||||
const input = pipeline.tsProjectSrc();
|
||||
const nonts = gulp.src(src, srcOpts).pipe(filter(['**', '!**/*.ts']));
|
||||
const input = es.merge(nonts, pipeline.tsProjectSrc());
|
||||
|
||||
return input
|
||||
.pipe(pipeline())
|
||||
@@ -124,7 +125,8 @@ const tasks = compilations.map(function (tsconfigFile) {
|
||||
|
||||
const watchTask = task.define(`watch-extension:${name}`, task.series(cleanTask, () => {
|
||||
const pipeline = createPipeline(false);
|
||||
const input = pipeline.tsProjectSrc();
|
||||
const nonts = gulp.src(src, srcOpts).pipe(filter(['**', '!**/*.ts']));
|
||||
const input = es.merge(nonts, pipeline.tsProjectSrc());
|
||||
const watchInput = watcher(src, { ...srcOpts, ...{ readDelay: 200 } });
|
||||
|
||||
return watchInput
|
||||
@@ -134,7 +136,8 @@ const tasks = compilations.map(function (tsconfigFile) {
|
||||
|
||||
const compileBuildTask = task.define(`compile-build-extension-${name}`, task.series(cleanTask, () => {
|
||||
const pipeline = createPipeline(true, true);
|
||||
const input = pipeline.tsProjectSrc();
|
||||
const nonts = gulp.src(src, srcOpts).pipe(filter(['**', '!**/*.ts']));
|
||||
const input = es.merge(nonts, pipeline.tsProjectSrc());
|
||||
|
||||
return input
|
||||
.pipe(pipeline())
|
||||
|
||||
@@ -73,7 +73,6 @@ const indentationFilter = [
|
||||
'!**/yarn-error.log',
|
||||
|
||||
// except multiple specific folders
|
||||
'!**/octicons/**',
|
||||
'!**/codicon/**',
|
||||
'!**/fixtures/**',
|
||||
'!**/lib/**',
|
||||
@@ -126,6 +125,7 @@ const copyrightFilter = [
|
||||
'!**/*.opts',
|
||||
'!**/*.disabled',
|
||||
'!**/*.code-workspace',
|
||||
'!**/*.js.map',
|
||||
'!**/promise-polyfill/polyfill.js',
|
||||
'!build/**/*.init',
|
||||
'!resources/linux/snap/snapcraft.yaml',
|
||||
@@ -136,29 +136,30 @@ const copyrightFilter = [
|
||||
'!extensions/html-language-features/server/src/modes/typescript/*',
|
||||
'!extensions/*/server/bin/*',
|
||||
'!src/vs/editor/test/node/classification/typescript-test.ts',
|
||||
'!scripts/code-web.js',
|
||||
// {{SQL CARBON EDIT}}
|
||||
'!extensions/notebook/src/intellisense/text.ts',
|
||||
'!extensions/mssql/src/hdfs/webhdfs.ts',
|
||||
'!src/sql/workbench/parts/notebook/browser/outputs/tableRenderers.ts',
|
||||
'!src/sql/workbench/parts/notebook/common/models/url.ts',
|
||||
'!src/sql/workbench/parts/notebook/browser/models/renderMimeInterfaces.ts',
|
||||
'!src/sql/workbench/parts/notebook/browser/models/outputProcessor.ts',
|
||||
'!src/sql/workbench/parts/notebook/browser/models/mimemodel.ts',
|
||||
'!src/sql/workbench/parts/notebook/browser/cellViews/media/*.css',
|
||||
'!src/sql/workbench/contrib/notebook/browser/outputs/tableRenderers.ts',
|
||||
'!src/sql/workbench/contrib/notebook/common/models/url.ts',
|
||||
'!src/sql/workbench/contrib/notebook/browser/models/renderMimeInterfaces.ts',
|
||||
'!src/sql/workbench/contrib/notebook/browser/models/outputProcessor.ts',
|
||||
'!src/sql/workbench/contrib/notebook/browser/models/mimemodel.ts',
|
||||
'!src/sql/workbench/contrib/notebook/browser/cellViews/media/*.css',
|
||||
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
|
||||
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
|
||||
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
|
||||
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
|
||||
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
|
||||
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
|
||||
'!src/sql/workbench/parts/notebook/browser/outputs/sanitizer.ts',
|
||||
'!src/sql/workbench/parts/notebook/browser/outputs/renderers.ts',
|
||||
'!src/sql/workbench/parts/notebook/browser/outputs/registry.ts',
|
||||
'!src/sql/workbench/parts/notebook/browser/outputs/factories.ts',
|
||||
'!src/sql/workbench/parts/notebook/common/models/nbformat.ts',
|
||||
'!src/sql/workbench/contrib/notebook/browser/outputs/sanitizer.ts',
|
||||
'!src/sql/workbench/contrib/notebook/browser/outputs/renderers.ts',
|
||||
'!src/sql/workbench/contrib/notebook/browser/outputs/registry.ts',
|
||||
'!src/sql/workbench/contrib/notebook/browser/outputs/factories.ts',
|
||||
'!src/sql/workbench/contrib/notebook/common/models/nbformat.ts',
|
||||
'!extensions/markdown-language-features/media/tomorrow.css',
|
||||
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
|
||||
'!src/sql/workbench/parts/notebook/electron-browser/cellViews/media/highlight.css',
|
||||
'!src/sql/workbench/contrib/notebook/electron-browser/cellViews/media/highlight.css',
|
||||
'!extensions/mssql/sqltoolsservice/**',
|
||||
'!extensions/import/flatfileimportservice/**',
|
||||
'!extensions/notebook/src/prompts/**',
|
||||
@@ -194,10 +195,37 @@ const tslintBaseFilter = [
|
||||
'!extensions/**/*.test.ts',
|
||||
'!extensions/html-language-features/server/lib/jquery.d.ts',
|
||||
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts', // {{SQL CARBON EDIT}},
|
||||
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts' // {{SQL CARBON EDIT}},
|
||||
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts', // {{SQL CARBON EDIT}},
|
||||
'!src/vs/workbench/services/themes/common/textMateScopeMatcher.ts' // {{SQL CARBON EDIT}} skip this because we have no plans on touching this and its not ours
|
||||
];
|
||||
|
||||
const sqlFilter = ['src/sql/**']; // {{SQL CARBON EDIT}}
|
||||
// {{SQL CARBON EDIT}}
|
||||
const sqlFilter = [
|
||||
'src/sql/**',
|
||||
'extensions/**',
|
||||
// Ignore VS Code extensions
|
||||
'!extensions/bat/**',
|
||||
'!extensions/configuration-editing/**',
|
||||
'!extensions/docker/**',
|
||||
'!extensions/extension-editing/**',
|
||||
'!extensions/git/**',
|
||||
'!extensions/git-ui/**',
|
||||
'!extensions/image-preview/**',
|
||||
'!extensions/insights-default/**',
|
||||
'!extensions/json/**',
|
||||
'!extensions/json-language-features/**',
|
||||
'!extensions/markdown-basics/**',
|
||||
'!extensions/markdown-language-features/**',
|
||||
'!extensions/merge-conflict/**',
|
||||
'!extensions/powershell/**',
|
||||
'!extensions/python/**',
|
||||
'!extensions/r/**',
|
||||
'!extensions/theme-*/**',
|
||||
'!extensions/vscode-*/**',
|
||||
'!extensions/xml/**',
|
||||
'!extensions/xml-language-features/**',
|
||||
'!extensions/yarml/**',
|
||||
];
|
||||
|
||||
const tslintCoreFilter = [
|
||||
'src/**/*.ts',
|
||||
@@ -220,9 +248,16 @@ const tslintHygieneFilter = [
|
||||
'src/**/*.ts',
|
||||
'test/**/*.ts',
|
||||
'extensions/**/*.ts',
|
||||
'!src/vs/workbench/contrib/extensions/browser/extensionTipsService.ts', // {{SQL CARBON EDIT}} known formatting issue do to commenting out code
|
||||
...tslintBaseFilter
|
||||
];
|
||||
|
||||
const fileLengthFilter = filter([
|
||||
'**',
|
||||
'!extensions/import/*.docx',
|
||||
'!extensions/admin-tool-ext-win/license/**'
|
||||
], {restore: true});
|
||||
|
||||
const copyrightHeaderLines = [
|
||||
'/*---------------------------------------------------------------------------------------------',
|
||||
' * Copyright (c) Microsoft Corporation. All rights reserved.',
|
||||
@@ -361,6 +396,23 @@ function hygiene(some) {
|
||||
});
|
||||
});
|
||||
|
||||
const filelength = es.through(function (file) {
|
||||
|
||||
const fileName = path.basename(file.relative);
|
||||
const fileDir = path.dirname(file.relative);
|
||||
//check the filename is < 50 characters (basename gets the filename with extension).
|
||||
if (fileName.length > 50) {
|
||||
console.error(`File name '${fileName}' under ${fileDir} is too long. Rename file to have less than 50 characters.`);
|
||||
errorCount++;
|
||||
}
|
||||
if (file.relative.length > 150) {
|
||||
console.error(`File path ${file.relative} exceeds acceptable file-length. Rename the path to have less than 150 characters.`);
|
||||
errorCount++;
|
||||
}
|
||||
|
||||
this.emit('data', file);
|
||||
});
|
||||
|
||||
const tslintConfiguration = tslint.Configuration.findConfiguration('tslint.json', '.');
|
||||
const tslintOptions = { fix: false, formatter: 'json' };
|
||||
const tsLinter = new tslint.Linter(tslintOptions);
|
||||
@@ -374,11 +426,17 @@ function hygiene(some) {
|
||||
let input;
|
||||
|
||||
if (Array.isArray(some) || typeof some === 'string' || !some) {
|
||||
input = vfs.src(some || all, { base: '.', follow: true, allowEmpty: true });
|
||||
const options = { base: '.', follow: true, allowEmpty: true };
|
||||
if (some) {
|
||||
input = vfs.src(some, options).pipe(filter(all)); // split this up to not unnecessarily filter all a second time
|
||||
} else {
|
||||
input = vfs.src(all, options);
|
||||
}
|
||||
} else {
|
||||
input = some;
|
||||
}
|
||||
|
||||
// {{SQL CARBON EDIT}} Linting for SQL
|
||||
const tslintSqlConfiguration = tslint.Configuration.findConfiguration('tslint-sql.json', '.');
|
||||
const tslintSqlOptions = { fix: false, formatter: 'json' };
|
||||
const sqlTsLinter = new tslint.Linter(tslintSqlOptions);
|
||||
@@ -391,6 +449,9 @@ function hygiene(some) {
|
||||
const productJsonFilter = filter('product.json', { restore: true });
|
||||
|
||||
const result = input
|
||||
.pipe(fileLengthFilter)
|
||||
.pipe(filelength)
|
||||
.pipe(fileLengthFilter.restore)
|
||||
.pipe(filter(f => !f.stat.isDirectory()))
|
||||
.pipe(productJsonFilter)
|
||||
.pipe(process.env['BUILD_SOURCEVERSION'] ? es.through() : productJson)
|
||||
@@ -407,8 +468,8 @@ function hygiene(some) {
|
||||
if (!process.argv.some(arg => arg === '--skip-tslint')) {
|
||||
typescript = typescript.pipe(tsl);
|
||||
typescript = typescript
|
||||
.pipe(filter(sqlFilter))
|
||||
.pipe(sqlTsl); // {{SQL CARBON EDIT}}
|
||||
.pipe(filter(sqlFilter)) // {{SQL CARBON EDIT}}
|
||||
.pipe(sqlTsl);
|
||||
}
|
||||
|
||||
const javascript = result
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const gulp = require('gulp');
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const jeditor = require('gulp-json-editor');
|
||||
const product = require('../product.json');
|
||||
|
||||
gulp.task('mixin', function () {
|
||||
// {{SQL CARBON EDIT}}
|
||||
const updateUrl = process.env['SQLOPS_UPDATEURL'];
|
||||
if (!updateUrl) {
|
||||
console.log('Missing SQLOPS_UPDATEURL, skipping mixin');
|
||||
return;
|
||||
}
|
||||
|
||||
const quality = process.env['VSCODE_QUALITY'];
|
||||
|
||||
if (!quality) {
|
||||
console.log('Missing VSCODE_QUALITY, skipping mixin');
|
||||
return;
|
||||
}
|
||||
|
||||
// {{SQL CARBON EDIT}} - apply ADS insiders values if needed
|
||||
let newValues = {
|
||||
"nameShort": product.nameShort,
|
||||
"nameLong": product.nameLong,
|
||||
"applicationName": product.applicationName,
|
||||
"dataFolderName": product.dataFolderName,
|
||||
"win32MutexName": product.win32MutexName,
|
||||
"win32DirName": product.win32DirName,
|
||||
"win32NameVersion": product.win32NameVersion,
|
||||
"win32RegValueName": product.win32RegValueName,
|
||||
"win32AppId": product.win32AppId,
|
||||
"win32x64AppId": product.win32x64AppId,
|
||||
"win32UserAppId": product.win32UserAppId,
|
||||
"win32x64UserAppId": product.win32x64UserAppId,
|
||||
"win32AppUserModelId": product.win32AppUserModelId,
|
||||
"win32ShellNameShort": product.win32ShellNameShort,
|
||||
"darwinBundleIdentifier": product.darwinBundleIdentifier,
|
||||
"updateUrl": updateUrl,
|
||||
"quality": quality,
|
||||
"extensionsGallery": {
|
||||
"serviceUrl": 'https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery.json'
|
||||
}
|
||||
};
|
||||
|
||||
if (quality === 'insider') {
|
||||
let dashSuffix = '-insiders';
|
||||
let dotSuffix = '.insiders';
|
||||
let displaySuffix = ' - Insiders';
|
||||
|
||||
newValues.extensionsGallery.serviceUrl = `https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery-${quality}.json`;
|
||||
newValues.nameShort += dashSuffix;
|
||||
newValues.nameLong += displaySuffix;
|
||||
newValues.applicationName += dashSuffix;
|
||||
newValues.dataFolderName += dashSuffix;
|
||||
newValues.win32MutexName += dashSuffix;
|
||||
newValues.win32DirName += displaySuffix;
|
||||
newValues.win32NameVersion += displaySuffix;
|
||||
newValues.win32RegValueName += dashSuffix;
|
||||
newValues.win32AppId = "{{9F0801B2-DEE3-4272-A2C6-FBDF25BAAF0F}";
|
||||
newValues.win32x64AppId = "{{6748A5FD-29EB-4BA6-B3C6-E7B981B8D6B0}";
|
||||
newValues.win32UserAppId = "{{0F8CD1ED-483C-40EB-8AD2-8ED784651AA1}";
|
||||
newValues.win32x64UserAppId += dashSuffix;
|
||||
newValues.win32AppUserModelId += dotSuffix;
|
||||
newValues.win32ShellNameShort += displaySuffix;
|
||||
newValues.darwinBundleIdentifier += dotSuffix;
|
||||
}
|
||||
|
||||
return gulp.src('./product.json')
|
||||
.pipe(jeditor(newValues))
|
||||
.pipe(gulp.dest('.'));
|
||||
});
|
||||
@@ -11,12 +11,13 @@ const es = require('event-stream');
|
||||
const filter = require('gulp-filter');
|
||||
const del = require('del');
|
||||
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
|
||||
const platformInfo = require('service-downloader/out/platform').PlatformInformation;
|
||||
const platform = require('service-downloader/out/platform').PlatformInformation;
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const rollup = require('rollup');
|
||||
const rollupNodeResolve = require('rollup-plugin-node-resolve');
|
||||
const rollupCommonJS = require('rollup-plugin-commonjs');
|
||||
const ext = require('./lib/extensions');
|
||||
const task = require('./lib/task');
|
||||
const glob = require('glob');
|
||||
const vsce = require('vsce');
|
||||
const mkdirp = require('mkdirp');
|
||||
|
||||
gulp.task('clean-mssql-extension', util.rimraf('extensions/mssql/node_modules'));
|
||||
gulp.task('clean-credentials-extension', util.rimraf('extensions/credentials/node_modules'));
|
||||
@@ -97,7 +98,7 @@ const formatStagedFiles = () => {
|
||||
|
||||
function installService() {
|
||||
let config = require('../extensions/mssql/config.json');
|
||||
return platformInfo.getCurrent().then(p => {
|
||||
return platform.getCurrent().then(p => {
|
||||
let runtime = p.runtimeId;
|
||||
// fix path since it won't be correct
|
||||
config.installDirectory = path.join(__dirname, '../extensions/mssql/src', config.installDirectory);
|
||||
@@ -117,82 +118,50 @@ gulp.task('install-sqltoolsservice', () => {
|
||||
return installService();
|
||||
});
|
||||
|
||||
function installSsmsMin() {
|
||||
const config = require('../extensions/admin-tool-ext-win/config.json');
|
||||
return platformInfo.getCurrent().then(p => {
|
||||
const runtime = p.runtimeId;
|
||||
// fix path since it won't be correct
|
||||
config.installDirectory = path.join(__dirname, '..', 'extensions', 'admin-tool-ext-win', config.installDirectory);
|
||||
var installer = new serviceDownloader(config);
|
||||
const serviceInstallFolder = installer.getInstallDirectory(runtime);
|
||||
const serviceCleanupFolder = path.join(serviceInstallFolder, '..');
|
||||
console.log('Cleaning up the install folder: ' + serviceCleanupFolder);
|
||||
return del(serviceCleanupFolder + '/*').then(() => {
|
||||
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
|
||||
return installer.installService(runtime);
|
||||
}, delError => {
|
||||
console.log('failed to delete the install folder error: ' + delError);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
gulp.task('install-ssmsmin', () => {
|
||||
return installSsmsMin();
|
||||
const config = require('../extensions/admin-tool-ext-win/config.json');
|
||||
const runtime = 'Windows_64'; // admin-tool-ext is a windows only extension, and we only ship a 64 bit version, so locking the binaries as such
|
||||
// fix path since it won't be correct
|
||||
config.installDirectory = path.join(__dirname, '..', 'extensions', 'admin-tool-ext-win', config.installDirectory);
|
||||
var installer = new serviceDownloader(config);
|
||||
const serviceInstallFolder = installer.getInstallDirectory(runtime);
|
||||
const serviceCleanupFolder = path.join(serviceInstallFolder, '..');
|
||||
console.log('Cleaning up the install folder: ' + serviceCleanupFolder);
|
||||
return del(serviceCleanupFolder + '/*').then(() => {
|
||||
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
|
||||
return installer.installService(runtime);
|
||||
}, delError => {
|
||||
console.log('failed to delete the install folder error: ' + delError);
|
||||
});
|
||||
});
|
||||
|
||||
async function rollupModule(options) {
|
||||
const moduleName = options.moduleName;
|
||||
try {
|
||||
const inputFile = options.inputFile;
|
||||
const outputDirectory = options.outputDirectory;
|
||||
const root = path.dirname(__dirname);
|
||||
|
||||
await fs.promises.mkdir(outputDirectory, {
|
||||
recursive: true
|
||||
gulp.task('package-external-extensions', task.series(
|
||||
task.define('bundle-external-extensions-build', () => ext.packageExternalExtensionsStream().pipe(gulp.dest('.build/external'))),
|
||||
task.define('create-external-extension-vsix-build', () => {
|
||||
const vsixes = glob.sync('.build/external/extensions/*/package.json').map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
}).map(element => {
|
||||
const pkgJson = require(path.join(element.path, 'package.json'));
|
||||
const vsixDirectory = path.join(root, '.build', 'extensions');
|
||||
mkdirp.sync(vsixDirectory);
|
||||
const packagePath = path.join(vsixDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
|
||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
||||
return vsce.createVSIX({
|
||||
cwd: element.path,
|
||||
packagePath: packagePath,
|
||||
useYarn: true
|
||||
});
|
||||
});
|
||||
|
||||
const outputFileName = options.outputFileName;
|
||||
const outputMapName = `${outputFileName}.map`;
|
||||
const external = options.external || [];
|
||||
return Promise.all(vsixes);
|
||||
})
|
||||
));
|
||||
|
||||
const outputFilePath = path.resolve(outputDirectory, outputFileName);
|
||||
const outputMapPath = path.resolve(outputDirectory, outputMapName);
|
||||
|
||||
const bundle = await rollup.rollup({
|
||||
input: inputFile,
|
||||
plugins: [
|
||||
rollupNodeResolve(),
|
||||
rollupCommonJS(),
|
||||
],
|
||||
external,
|
||||
});
|
||||
|
||||
const generatedBundle = await bundle.generate({
|
||||
output: {
|
||||
name: moduleName
|
||||
},
|
||||
format: 'umd',
|
||||
sourcemap: true
|
||||
});
|
||||
|
||||
const result = generatedBundle.output[0];
|
||||
result.code = result.code + '\n//# sourceMappingURL=' + path.basename(outputMapName);
|
||||
|
||||
await fs.promises.writeFile(outputFilePath, result.code);
|
||||
await fs.promises.writeFile(outputMapPath, result.map);
|
||||
|
||||
return {
|
||||
name: moduleName,
|
||||
result: true
|
||||
};
|
||||
} catch (ex) {
|
||||
return {
|
||||
name: moduleName,
|
||||
result: false,
|
||||
exception: ex
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
rollupModule
|
||||
};
|
||||
gulp.task('package-rebuild-extensions', task.series(
|
||||
task.define('clean-rebuild-extensions', () => ext.cleanRebuildExtensions('.build/extensions')),
|
||||
task.define('rebuild-extensions-build', () => ext.packageRebuildExtensionsStream().pipe(gulp.dest('.build'))),
|
||||
));
|
||||
|
||||
@@ -29,9 +29,8 @@ const packageJson = require('../package.json');
|
||||
const product = require('../product.json');
|
||||
const crypto = require('crypto');
|
||||
const i18n = require('./lib/i18n');
|
||||
const ext = require('./lib/extensions'); // {{SQL CARBON EDIT}}
|
||||
const deps = require('./dependencies');
|
||||
const getElectronVersion = require('./lib/electron').getElectronVersion;
|
||||
const { config } = require('./lib/electron');
|
||||
const createAsar = require('./lib/asar').createAsar;
|
||||
const { compileBuildTask } = require('./gulpfile.compile');
|
||||
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
|
||||
@@ -78,7 +77,6 @@ const vscodeResources = [
|
||||
'out-build/vs/base/common/performance.js',
|
||||
'out-build/vs/base/node/languagePacks.js',
|
||||
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh,ps.sh}',
|
||||
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
|
||||
'out-build/vs/base/browser/ui/codiconLabel/codicon/**',
|
||||
'out-build/vs/workbench/browser/media/*-theme.css',
|
||||
'out-build/vs/workbench/contrib/debug/**/*.json',
|
||||
@@ -93,8 +91,7 @@ const vscodeResources = [
|
||||
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
|
||||
'out-build/vs/code/electron-browser/issue/issueReporter.js',
|
||||
'out-build/vs/code/electron-browser/processExplorer/processExplorer.js',
|
||||
// {{SQL CARBON EDIT}}
|
||||
'out-build/sql/workbench/electron-browser/splashscreen/*',
|
||||
'out-build/sql/workbench/electron-browser/splashscreen/*', // {{SQL CARBON EDIT}} STart
|
||||
'out-build/sql/**/*.{svg,png,cur,html}',
|
||||
'out-build/sql/base/browser/ui/table/media/*.{gif,png,svg}',
|
||||
'out-build/sql/base/browser/ui/checkbox/media/*.{gif,png,svg}',
|
||||
@@ -112,7 +109,8 @@ const vscodeResources = [
|
||||
'out-build/sql/media/objectTypes/*.svg',
|
||||
'out-build/sql/media/icons/*.svg',
|
||||
'out-build/sql/workbench/parts/notebook/media/**/*.svg',
|
||||
'out-build/sql/setup.js',
|
||||
'out-build/sql/setup.js', // {{SQL CARBON EDIT}} end
|
||||
'out-build/vs/platform/auth/common/auth.css',
|
||||
'!**/test/**'
|
||||
];
|
||||
|
||||
@@ -144,73 +142,6 @@ const minifyVSCodeTask = task.define('minify-vscode', task.series(
|
||||
));
|
||||
gulp.task(minifyVSCodeTask);
|
||||
|
||||
// Package
|
||||
|
||||
// @ts-ignore JSON checking: darwinCredits is optional
|
||||
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
|
||||
|
||||
function darwinBundleDocumentType(extensions, icon) {
|
||||
return {
|
||||
name: product.nameLong + ' document',
|
||||
role: 'Editor',
|
||||
ostypes: ["TEXT", "utxt", "TUTX", "****"],
|
||||
extensions: extensions,
|
||||
iconFile: icon
|
||||
};
|
||||
}
|
||||
|
||||
const config = {
|
||||
version: getElectronVersion(),
|
||||
productAppName: product.nameLong,
|
||||
companyName: 'Microsoft Corporation',
|
||||
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
|
||||
darwinIcon: 'resources/darwin/code.icns',
|
||||
darwinBundleIdentifier: product.darwinBundleIdentifier,
|
||||
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
||||
darwinHelpBookFolder: 'VS Code HelpBook',
|
||||
darwinHelpBookName: 'VS Code HelpBook',
|
||||
darwinBundleDocumentTypes: [
|
||||
// {{SQL CARBON EDIT}} - Remove most document types and replace with ours
|
||||
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
|
||||
],
|
||||
darwinBundleURLTypes: [{
|
||||
role: 'Viewer',
|
||||
name: product.nameLong,
|
||||
urlSchemes: [product.urlProtocol]
|
||||
}],
|
||||
darwinForceDarkModeSupport: true,
|
||||
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
|
||||
linuxExecutableName: product.applicationName,
|
||||
winIcon: 'resources/win32/code.ico',
|
||||
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
|
||||
|
||||
// @ts-ignore JSON checking: electronRepository is optional
|
||||
repo: product.electronRepository || undefined
|
||||
};
|
||||
|
||||
function getElectron(arch) {
|
||||
return () => {
|
||||
const electronOpts = _.extend({}, config, {
|
||||
platform: process.platform,
|
||||
arch,
|
||||
ffmpegChromium: true,
|
||||
keepDefaultApp: true
|
||||
});
|
||||
|
||||
return gulp.src('package.json')
|
||||
.pipe(json({ name: product.nameShort }))
|
||||
.pipe(electron(electronOpts))
|
||||
.pipe(filter(['**', '!**/app/package.json']))
|
||||
.pipe(vfs.dest('.build/electron'));
|
||||
};
|
||||
}
|
||||
|
||||
gulp.task(task.define('electron', task.series(util.rimraf('.build/electron'), getElectron(process.arch))));
|
||||
gulp.task(task.define('electron-ia32', task.series(util.rimraf('.build/electron'), getElectron('ia32'))));
|
||||
gulp.task(task.define('electron-x64', task.series(util.rimraf('.build/electron'), getElectron('x64'))));
|
||||
gulp.task(task.define('electron-arm', task.series(util.rimraf('.build/electron'), getElectron('armv7l'))));
|
||||
gulp.task(task.define('electron-arm64', task.series(util.rimraf('.build/electron'), getElectron('arm64'))));
|
||||
|
||||
/**
|
||||
* Compute checksums for some files.
|
||||
*
|
||||
@@ -265,10 +196,7 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
||||
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }))
|
||||
.pipe(util.setExecutableBit(['**/*.sh']));
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
ext.packageBuiltInExtensions();
|
||||
|
||||
const extensions = gulp.src('.build/extensions/**', { base: '.build', dot: true });
|
||||
const extensions = gulp.src(['.build/extensions/**', '!.build/extensions/node_modules/**'], { base: '.build', dot: true }); // {{SQL CARBON EDIT}} - don't package the node_modules directory
|
||||
|
||||
const sources = es.merge(src, extensions)
|
||||
.pipe(filter(['**', '!**/*.js.map'], { dot: true }));
|
||||
|
||||
@@ -44,7 +44,7 @@ function prepareDebPackage(arch) {
|
||||
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(replace('@@EXEC@@', `/usr/share/${product.applicationName}/${product.applicationName}`))
|
||||
.pipe(replace('@@ICON@@', product.linuxIconName))
|
||||
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
|
||||
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
|
||||
|
||||
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
|
||||
|
||||
@@ -23,7 +23,7 @@ const repoPath = path.dirname(__dirname);
|
||||
// {{SQL CARBON EDIT}}
|
||||
const buildPath = arch => path.join(path.dirname(repoPath), `azuredatastudio-win32-${arch}`);
|
||||
const zipDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'archive');
|
||||
const zipPath = arch => path.join(zipDir(arch), `VSCode-win32-${arch}.zip`);
|
||||
const zipPath = arch => path.join(zipDir(arch), `azuredatastudio-win32-${arch}.zip`);
|
||||
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
|
||||
const issPath = path.join(__dirname, 'win32', 'code.iss');
|
||||
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup'))), 'bin', 'ISCC.exe');
|
||||
|
||||
@@ -23,6 +23,13 @@ const quality = process.env['VSCODE_QUALITY'];
|
||||
const builtInExtensions = quality && quality === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
|
||||
// {{SQL CARBON EDIT}} - END
|
||||
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
||||
const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE'];
|
||||
|
||||
function log() {
|
||||
if (ENABLE_LOGGING) {
|
||||
fancyLog.apply(this, arguments);
|
||||
}
|
||||
}
|
||||
|
||||
function getExtensionPath(extension) {
|
||||
return path.join(root, '.build', 'builtInExtensions', extension.name);
|
||||
@@ -47,7 +54,7 @@ function isUpToDate(extension) {
|
||||
|
||||
function syncMarketplaceExtension(extension) {
|
||||
if (isUpToDate(extension)) {
|
||||
fancyLog(ansiColors.blue('[marketplace]'), `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));
|
||||
log(ansiColors.blue('[marketplace]'), `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
|
||||
@@ -56,13 +63,13 @@ function syncMarketplaceExtension(extension) {
|
||||
return ext.fromMarketplace(extension.name, extension.version, extension.metadata)
|
||||
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
|
||||
.pipe(vfs.dest('.build/builtInExtensions'))
|
||||
.on('end', () => fancyLog(ansiColors.blue('[marketplace]'), extension.name, ansiColors.green('✔︎')));
|
||||
.on('end', () => log(ansiColors.blue('[marketplace]'), extension.name, ansiColors.green('✔︎')));
|
||||
}
|
||||
|
||||
function syncExtension(extension, controlState) {
|
||||
switch (controlState) {
|
||||
case 'disabled':
|
||||
fancyLog(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
|
||||
log(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
|
||||
return es.readArray([]);
|
||||
|
||||
case 'marketplace':
|
||||
@@ -70,15 +77,15 @@ function syncExtension(extension, controlState) {
|
||||
|
||||
default:
|
||||
if (!fs.existsSync(controlState)) {
|
||||
fancyLog(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
||||
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
||||
return es.readArray([]);
|
||||
|
||||
} else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
|
||||
fancyLog(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
||||
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
||||
return es.readArray([]);
|
||||
}
|
||||
|
||||
fancyLog(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎'));
|
||||
log(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
}
|
||||
@@ -97,8 +104,8 @@ function writeControlFile(control) {
|
||||
}
|
||||
|
||||
function main() {
|
||||
fancyLog('Syncronizing built-in extensions...');
|
||||
fancyLog(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
|
||||
log('Syncronizing built-in extensions...');
|
||||
log(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
|
||||
|
||||
const control = readControlFile();
|
||||
const streams = [];
|
||||
|
||||
@@ -44,7 +44,7 @@ function createCompile(src, build, emitError) {
|
||||
const input = es.through();
|
||||
const output = input
|
||||
.pipe(utf8Filter)
|
||||
.pipe(bom())
|
||||
.pipe(bom()) // this is required to preserve BOM in test files that loose it otherwise
|
||||
.pipe(utf8Filter.restore)
|
||||
.pipe(tsFilter)
|
||||
.pipe(util.loadSourcemaps())
|
||||
|
||||
@@ -54,7 +54,7 @@ function createCompile(src: string, build: boolean, emitError?: boolean) {
|
||||
const input = es.through();
|
||||
const output = input
|
||||
.pipe(utf8Filter)
|
||||
.pipe(bom())
|
||||
.pipe(bom()) // this is required to preserve BOM in test files that loose it otherwise
|
||||
.pipe(utf8Filter.restore)
|
||||
.pipe(tsFilter)
|
||||
.pipe(util.loadSourcemaps())
|
||||
|
||||
@@ -2,29 +2,88 @@
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const vfs = require("vinyl-fs");
|
||||
const filter = require("gulp-filter");
|
||||
const json = require("gulp-json-editor");
|
||||
const _ = require("underscore");
|
||||
const util = require("./util");
|
||||
const electron = require('gulp-atom-electron');
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
|
||||
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
|
||||
const commit = util.getVersion(root);
|
||||
function getElectronVersion() {
|
||||
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
|
||||
// @ts-ignore
|
||||
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
|
||||
|
||||
return target;
|
||||
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
|
||||
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
|
||||
return target;
|
||||
}
|
||||
exports.getElectronVersion = getElectronVersion;
|
||||
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
|
||||
function darwinBundleDocumentType(extensions, icon) {
|
||||
return {
|
||||
name: product.nameLong + ' document',
|
||||
role: 'Editor',
|
||||
ostypes: ["TEXT", "utxt", "TUTX", "****"],
|
||||
extensions: extensions,
|
||||
iconFile: icon
|
||||
};
|
||||
}
|
||||
exports.config = {
|
||||
version: getElectronVersion(),
|
||||
productAppName: product.nameLong,
|
||||
companyName: 'Microsoft Corporation',
|
||||
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
|
||||
darwinIcon: 'resources/darwin/code.icns',
|
||||
darwinBundleIdentifier: product.darwinBundleIdentifier,
|
||||
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
||||
darwinHelpBookFolder: 'VS Code HelpBook',
|
||||
darwinHelpBookName: 'VS Code HelpBook',
|
||||
darwinBundleDocumentTypes: [
|
||||
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
|
||||
],
|
||||
darwinBundleURLTypes: [{
|
||||
role: 'Viewer',
|
||||
name: product.nameLong,
|
||||
urlSchemes: [product.urlProtocol]
|
||||
}],
|
||||
darwinForceDarkModeSupport: true,
|
||||
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
|
||||
linuxExecutableName: product.applicationName,
|
||||
winIcon: 'resources/win32/code.ico',
|
||||
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
|
||||
repo: product.electronRepository || undefined
|
||||
};
|
||||
function getElectron(arch) {
|
||||
return () => {
|
||||
const electronOpts = _.extend({}, exports.config, {
|
||||
platform: process.platform,
|
||||
arch,
|
||||
ffmpegChromium: true,
|
||||
keepDefaultApp: true
|
||||
});
|
||||
return vfs.src('package.json')
|
||||
.pipe(json({ name: product.nameShort }))
|
||||
.pipe(electron(electronOpts))
|
||||
.pipe(filter(['**', '!**/app/package.json']))
|
||||
.pipe(vfs.dest('.build/electron'));
|
||||
};
|
||||
}
|
||||
async function main(arch = process.arch) {
|
||||
const version = getElectronVersion();
|
||||
const electronPath = path.join(root, '.build', 'electron');
|
||||
const versionFile = path.join(electronPath, 'version');
|
||||
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
|
||||
if (!isUpToDate) {
|
||||
await util.rimraf(electronPath)();
|
||||
await util.streamToPromise(getElectron(arch)());
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.getElectronVersion = getElectronVersion;
|
||||
|
||||
// returns 0 if the right version of electron is in .build/electron
|
||||
// @ts-ignore
|
||||
if (require.main === module) {
|
||||
const version = getElectronVersion();
|
||||
const versionFile = path.join(root, '.build', 'electron', 'version');
|
||||
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
|
||||
|
||||
process.exit(isUpToDate ? 0 : 1);
|
||||
main(process.argv[2]).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
100
build/lib/electron.ts
Normal file
100
build/lib/electron.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as vfs from 'vinyl-fs';
|
||||
import * as filter from 'gulp-filter';
|
||||
import * as json from 'gulp-json-editor';
|
||||
import * as _ from 'underscore';
|
||||
import * as util from './util';
|
||||
|
||||
const electron = require('gulp-atom-electron');
|
||||
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
|
||||
const commit = util.getVersion(root);
|
||||
|
||||
export function getElectronVersion(): string {
|
||||
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
|
||||
const target = /^target "(.*)"$/m.exec(yarnrc)![1];
|
||||
return target;
|
||||
}
|
||||
|
||||
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
|
||||
|
||||
function darwinBundleDocumentType(extensions: string[], icon: string) {
|
||||
return {
|
||||
name: product.nameLong + ' document',
|
||||
role: 'Editor',
|
||||
ostypes: ["TEXT", "utxt", "TUTX", "****"],
|
||||
extensions: extensions,
|
||||
iconFile: icon
|
||||
};
|
||||
}
|
||||
|
||||
export const config = {
|
||||
version: getElectronVersion(),
|
||||
productAppName: product.nameLong,
|
||||
companyName: 'Microsoft Corporation',
|
||||
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
|
||||
darwinIcon: 'resources/darwin/code.icns',
|
||||
darwinBundleIdentifier: product.darwinBundleIdentifier,
|
||||
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
||||
darwinHelpBookFolder: 'VS Code HelpBook',
|
||||
darwinHelpBookName: 'VS Code HelpBook',
|
||||
darwinBundleDocumentTypes: [
|
||||
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
|
||||
],
|
||||
darwinBundleURLTypes: [{
|
||||
role: 'Viewer',
|
||||
name: product.nameLong,
|
||||
urlSchemes: [product.urlProtocol]
|
||||
}],
|
||||
darwinForceDarkModeSupport: true,
|
||||
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
|
||||
linuxExecutableName: product.applicationName,
|
||||
winIcon: 'resources/win32/code.ico',
|
||||
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
|
||||
repo: product.electronRepository || undefined
|
||||
};
|
||||
|
||||
function getElectron(arch: string): () => NodeJS.ReadWriteStream {
|
||||
return () => {
|
||||
const electronOpts = _.extend({}, config, {
|
||||
platform: process.platform,
|
||||
arch,
|
||||
ffmpegChromium: true,
|
||||
keepDefaultApp: true
|
||||
});
|
||||
|
||||
return vfs.src('package.json')
|
||||
.pipe(json({ name: product.nameShort }))
|
||||
.pipe(electron(electronOpts))
|
||||
.pipe(filter(['**', '!**/app/package.json']))
|
||||
.pipe(vfs.dest('.build/electron'));
|
||||
};
|
||||
}
|
||||
|
||||
async function main(arch = process.arch): Promise<void> {
|
||||
const version = getElectronVersion();
|
||||
const electronPath = path.join(root, '.build', 'electron');
|
||||
const versionFile = path.join(electronPath, 'version');
|
||||
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
|
||||
|
||||
if (!isUpToDate) {
|
||||
await util.rimraf(electronPath)();
|
||||
await util.streamToPromise(getElectron(arch)());
|
||||
}
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
main(process.argv[2]).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
@@ -189,9 +189,11 @@ const excludedExtensions = [
|
||||
'integration-tests'
|
||||
];
|
||||
// {{SQL CARBON EDIT}}
|
||||
const sqlBuiltInExtensions = [
|
||||
// Add SQL built-in extensions here.
|
||||
// the extension will be excluded from SQLOps package and will have separate vsix packages
|
||||
const externalExtensions = [
|
||||
// This is the list of SQL extensions which the source code is included in this repository, but
|
||||
// they get packaged separately. Adding extension name here, will make the build to create
|
||||
// a separate vsix package for the extension and the extension will be excluded from the main package.
|
||||
// Any extension not included here will be installed by default.
|
||||
'admin-tool-ext-win',
|
||||
'agent',
|
||||
'import',
|
||||
@@ -201,7 +203,13 @@ const sqlBuiltInExtensions = [
|
||||
'schema-compare',
|
||||
'cms',
|
||||
'query-history',
|
||||
'liveshare'
|
||||
'liveshare',
|
||||
'sql-database-projects'
|
||||
];
|
||||
// extensions that require a rebuild since they have native parts
|
||||
const rebuildExtensions = [
|
||||
'big-data-cluster',
|
||||
'mssql'
|
||||
];
|
||||
const builtInExtensions = process.env['VSCODE_QUALITY'] === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
@@ -214,7 +222,7 @@ function packageLocalExtensionsStream() {
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1); // {{SQL CARBON EDIT}} add aditional filter
|
||||
.filter(({ name }) => externalExtensions.indexOf(name) === -1); // {{SQL CARBON EDIT}} Remove external Extensions with separate package
|
||||
const nodeModules = gulp.src('extensions/node_modules/**', { base: '.' });
|
||||
const localExtensions = localExtensionDescriptions.map(extension => {
|
||||
return fromLocal(extension.path)
|
||||
@@ -233,66 +241,40 @@ function packageMarketplaceExtensionsStream() {
|
||||
.pipe(util2.setExecutableBit(['**/*.sh']));
|
||||
}
|
||||
exports.packageMarketplaceExtensionsStream = packageMarketplaceExtensionsStream;
|
||||
const vfs = require("vinyl-fs");
|
||||
function packageBuiltInExtensions() {
|
||||
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
function packageExternalExtensionsStream() {
|
||||
const extenalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
|
||||
const visxDirectory = path.join(path.dirname(root), 'vsix');
|
||||
try {
|
||||
if (!fs.existsSync(visxDirectory)) {
|
||||
fs.mkdirSync(visxDirectory);
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
// don't fail the build if the output directory already exists
|
||||
console.warn(err);
|
||||
}
|
||||
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
|
||||
let pkgJson = JSON.parse(fs.readFileSync(path.join(element.path, 'package.json'), { encoding: 'utf8' }));
|
||||
const packagePath = path.join(visxDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
|
||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
||||
vsce.createVSIX({
|
||||
cwd: element.path,
|
||||
packagePath: packagePath,
|
||||
useYarn: true
|
||||
});
|
||||
.filter(({ name }) => externalExtensions.indexOf(name) >= 0);
|
||||
const builtExtensions = extenalExtensionDescriptions.map(extension => {
|
||||
return fromLocal(extension.path)
|
||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
});
|
||||
return es.merge(builtExtensions);
|
||||
}
|
||||
exports.packageBuiltInExtensions = packageBuiltInExtensions;
|
||||
function packageExtensionTask(extensionName, platform, arch) {
|
||||
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
||||
if (platform === 'darwin') {
|
||||
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
|
||||
}
|
||||
else {
|
||||
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
|
||||
}
|
||||
platform = platform || process.platform;
|
||||
return () => {
|
||||
const root = path.resolve(path.join(__dirname, '../..'));
|
||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => extensionName === name);
|
||||
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
||||
return fromLocal(extension.path);
|
||||
}));
|
||||
let result = localExtensions
|
||||
.pipe(util2.skipDirectories())
|
||||
.pipe(util2.fixWin32DirectoryPermissions())
|
||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
|
||||
return result.pipe(vfs.dest(destination));
|
||||
};
|
||||
}
|
||||
exports.packageExtensionTask = packageExtensionTask;
|
||||
exports.packageExternalExtensionsStream = packageExternalExtensionsStream;
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
function cleanRebuildExtensions(root) {
|
||||
return Promise.all(rebuildExtensions.map(async (e) => {
|
||||
await util2.rimraf(path.join(root, e))();
|
||||
})).then();
|
||||
}
|
||||
exports.cleanRebuildExtensions = cleanRebuildExtensions;
|
||||
function packageRebuildExtensionsStream() {
|
||||
const extenalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => rebuildExtensions.indexOf(name) >= 0);
|
||||
const builtExtensions = extenalExtensionDescriptions.map(extension => {
|
||||
return fromLocal(extension.path)
|
||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
});
|
||||
return es.merge(builtExtensions);
|
||||
}
|
||||
exports.packageRebuildExtensionsStream = packageRebuildExtensionsStream;
|
||||
|
||||
@@ -225,9 +225,11 @@ const excludedExtensions = [
|
||||
];
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const sqlBuiltInExtensions = [
|
||||
// Add SQL built-in extensions here.
|
||||
// the extension will be excluded from SQLOps package and will have separate vsix packages
|
||||
const externalExtensions = [
|
||||
// This is the list of SQL extensions which the source code is included in this repository, but
|
||||
// they get packaged separately. Adding extension name here, will make the build to create
|
||||
// a separate vsix package for the extension and the extension will be excluded from the main package.
|
||||
// Any extension not included here will be installed by default.
|
||||
'admin-tool-ext-win',
|
||||
'agent',
|
||||
'import',
|
||||
@@ -237,7 +239,14 @@ const sqlBuiltInExtensions = [
|
||||
'schema-compare',
|
||||
'cms',
|
||||
'query-history',
|
||||
'liveshare'
|
||||
'liveshare',
|
||||
'sql-database-projects'
|
||||
];
|
||||
|
||||
// extensions that require a rebuild since they have native parts
|
||||
const rebuildExtensions = [
|
||||
'big-data-cluster',
|
||||
'mssql'
|
||||
];
|
||||
|
||||
interface IBuiltInExtension {
|
||||
@@ -261,7 +270,7 @@ export function packageLocalExtensionsStream(): NodeJS.ReadWriteStream {
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1); // {{SQL CARBON EDIT}} add aditional filter
|
||||
.filter(({ name }) => externalExtensions.indexOf(name) === -1); // {{SQL CARBON EDIT}} Remove external Extensions with separate package
|
||||
|
||||
const nodeModules = gulp.src('extensions/node_modules/**', { base: '.' });
|
||||
const localExtensions = localExtensionDescriptions.map(extension => {
|
||||
@@ -283,71 +292,43 @@ export function packageMarketplaceExtensionsStream(): NodeJS.ReadWriteStream {
|
||||
.pipe(util2.setExecutableBit(['**/*.sh']));
|
||||
}
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
import * as _ from 'underscore';
|
||||
import * as vfs from 'vinyl-fs';
|
||||
|
||||
export function packageBuiltInExtensions() {
|
||||
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
export function packageExternalExtensionsStream(): NodeJS.ReadWriteStream {
|
||||
const extenalExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
|
||||
const visxDirectory = path.join(path.dirname(root), 'vsix');
|
||||
try {
|
||||
if (!fs.existsSync(visxDirectory)) {
|
||||
fs.mkdirSync(visxDirectory);
|
||||
}
|
||||
} catch (err) {
|
||||
// don't fail the build if the output directory already exists
|
||||
console.warn(err);
|
||||
}
|
||||
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
|
||||
let pkgJson = JSON.parse(fs.readFileSync(path.join(element.path, 'package.json'), { encoding: 'utf8' }));
|
||||
const packagePath = path.join(visxDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
|
||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
||||
vsce.createVSIX({
|
||||
cwd: element.path,
|
||||
packagePath: packagePath,
|
||||
useYarn: true
|
||||
});
|
||||
.filter(({ name }) => externalExtensions.indexOf(name) >= 0);
|
||||
|
||||
const builtExtensions = extenalExtensionDescriptions.map(extension => {
|
||||
return fromLocal(extension.path)
|
||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
});
|
||||
}
|
||||
|
||||
export function packageExtensionTask(extensionName: string, platform: string, arch: string) {
|
||||
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
||||
if (platform === 'darwin') {
|
||||
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
|
||||
} else {
|
||||
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
|
||||
}
|
||||
|
||||
platform = platform || process.platform;
|
||||
|
||||
return () => {
|
||||
const root = path.resolve(path.join(__dirname, '../..'));
|
||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => extensionName === name);
|
||||
|
||||
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
||||
return fromLocal(extension.path);
|
||||
}));
|
||||
|
||||
let result = localExtensions
|
||||
.pipe(util2.skipDirectories())
|
||||
.pipe(util2.fixWin32DirectoryPermissions())
|
||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
|
||||
|
||||
return result.pipe(vfs.dest(destination));
|
||||
};
|
||||
return es.merge(builtExtensions);
|
||||
}
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
|
||||
export function cleanRebuildExtensions(root: string): Promise<void> {
|
||||
return Promise.all(rebuildExtensions.map(async e => {
|
||||
await util2.rimraf(path.join(root, e))();
|
||||
})).then();
|
||||
}
|
||||
|
||||
export function packageRebuildExtensionsStream(): NodeJS.ReadWriteStream {
|
||||
const extenalExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => rebuildExtensions.indexOf(name) >= 0);
|
||||
|
||||
const builtExtensions = extenalExtensionDescriptions.map(extension => {
|
||||
return fromLocal(extension.path)
|
||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
});
|
||||
|
||||
return es.merge(builtExtensions);
|
||||
}
|
||||
|
||||
@@ -42,6 +42,10 @@
|
||||
"name": "vs/workbench/contrib/callHierarchy",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/codeActions",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/comments",
|
||||
"project": "vscode-workbench"
|
||||
@@ -135,7 +139,7 @@
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/stats",
|
||||
"name": "vs/workbench/contrib/tags",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
@@ -194,6 +198,10 @@
|
||||
"name": "vs/workbench/services/actions",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/authToken",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/bulkEdit",
|
||||
"project": "vscode-workbench"
|
||||
|
||||
103
build/lib/rollup.js
Normal file
103
build/lib/rollup.js
Normal file
@@ -0,0 +1,103 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const rollup = require("rollup");
|
||||
const path = require("path");
|
||||
// getting around stupid import rules
|
||||
const nodeResolve = require('rollup-plugin-node-resolve');
|
||||
const commonjs = require('rollup-plugin-commonjs');
|
||||
async function rollupModule(options) {
|
||||
const moduleName = options.moduleName;
|
||||
try {
|
||||
const inputFile = options.inputFile;
|
||||
const outputDirectory = options.outputDirectory;
|
||||
await fs.promises.mkdir(outputDirectory, {
|
||||
recursive: true
|
||||
});
|
||||
const outputFileName = options.outputFileName;
|
||||
const outputMapName = `${outputFileName}.map`;
|
||||
const external = options.external || [];
|
||||
const outputFilePath = path.resolve(outputDirectory, outputFileName);
|
||||
const outputMapPath = path.resolve(outputDirectory, outputMapName);
|
||||
const bundle = await rollup.rollup({
|
||||
input: inputFile,
|
||||
plugins: [
|
||||
nodeResolve(),
|
||||
commonjs(),
|
||||
],
|
||||
external,
|
||||
});
|
||||
const generatedBundle = await bundle.generate({
|
||||
name: moduleName,
|
||||
format: 'umd',
|
||||
sourcemap: true
|
||||
});
|
||||
const result = generatedBundle.output[0];
|
||||
result.code = result.code + '\n//# sourceMappingURL=' + path.basename(outputMapName);
|
||||
await fs.promises.writeFile(outputFilePath, result.code);
|
||||
await fs.promises.writeFile(outputMapPath, result.map);
|
||||
return {
|
||||
name: moduleName,
|
||||
result: true
|
||||
};
|
||||
}
|
||||
catch (ex) {
|
||||
return {
|
||||
name: moduleName,
|
||||
result: false,
|
||||
exception: ex
|
||||
};
|
||||
}
|
||||
}
|
||||
function rollupAngularSlickgrid(root) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const result = await rollupModule({
|
||||
moduleName: 'angular2-slickgrid',
|
||||
inputFile: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'index.js'),
|
||||
outputDirectory: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'bundles'),
|
||||
outputFileName: 'angular2-slickgrid.umd.js'
|
||||
});
|
||||
if (!result.result) {
|
||||
return reject(`angular2-slickgrid failed to bundle - ${result.exception}`);
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
exports.rollupAngularSlickgrid = rollupAngularSlickgrid;
|
||||
function rollupAngular(root) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const modules = ['core', 'animations', 'common', 'compiler', 'forms', 'platform-browser', 'platform-browser-dynamic', 'router'];
|
||||
const tasks = modules.map((module) => {
|
||||
return rollupModule({
|
||||
moduleName: `ng.${module}`,
|
||||
inputFile: path.resolve(root, 'node_modules', '@angular', module, '@angular', `${module}.es5.js`),
|
||||
outputDirectory: path.resolve(root, 'node_modules', '@angular', module, 'bundles'),
|
||||
outputFileName: `${module}.umd.js`,
|
||||
external: modules.map(mn => `@angular/${mn}`)
|
||||
});
|
||||
});
|
||||
// array of booleans
|
||||
const x = await Promise.all(tasks);
|
||||
const result = x.reduce((prev, current) => {
|
||||
if (!current.result) {
|
||||
prev.fails.push(current.name);
|
||||
prev.exceptions.push(current.exception);
|
||||
prev.result = false;
|
||||
}
|
||||
return prev;
|
||||
}, {
|
||||
fails: [],
|
||||
exceptions: [],
|
||||
result: true,
|
||||
});
|
||||
if (!result.result) {
|
||||
return reject(`failures: ${result.fails} - exceptions: ${JSON.stringify(result.exceptions)}`);
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
exports.rollupAngular = rollupAngular;
|
||||
125
build/lib/rollup.ts
Normal file
125
build/lib/rollup.ts
Normal file
@@ -0,0 +1,125 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as rollup from 'rollup';
|
||||
import * as path from 'path';
|
||||
|
||||
// getting around stupid import rules
|
||||
const nodeResolve = require('rollup-plugin-node-resolve');
|
||||
const commonjs = require('rollup-plugin-commonjs');
|
||||
|
||||
export interface IRollupOptions {
|
||||
moduleName: string;
|
||||
inputFile: string;
|
||||
outputDirectory: string;
|
||||
outputFileName: string;
|
||||
external?: string[];
|
||||
}
|
||||
|
||||
async function rollupModule(options: IRollupOptions) {
|
||||
const moduleName = options.moduleName;
|
||||
try {
|
||||
const inputFile = options.inputFile;
|
||||
const outputDirectory = options.outputDirectory;
|
||||
|
||||
await fs.promises.mkdir(outputDirectory, {
|
||||
recursive: true
|
||||
});
|
||||
|
||||
const outputFileName = options.outputFileName;
|
||||
const outputMapName = `${outputFileName}.map`;
|
||||
const external = options.external || [];
|
||||
|
||||
const outputFilePath = path.resolve(outputDirectory, outputFileName);
|
||||
const outputMapPath = path.resolve(outputDirectory, outputMapName);
|
||||
|
||||
const bundle = await rollup.rollup({
|
||||
input: inputFile,
|
||||
plugins: [
|
||||
nodeResolve(),
|
||||
commonjs(),
|
||||
],
|
||||
external,
|
||||
});
|
||||
|
||||
const generatedBundle = await bundle.generate({
|
||||
name: moduleName,
|
||||
format: 'umd',
|
||||
sourcemap: true
|
||||
});
|
||||
|
||||
const result = generatedBundle.output[0];
|
||||
result.code = result.code + '\n//# sourceMappingURL=' + path.basename(outputMapName);
|
||||
|
||||
await fs.promises.writeFile(outputFilePath, result.code);
|
||||
await fs.promises.writeFile(outputMapPath, result.map);
|
||||
|
||||
return {
|
||||
name: moduleName,
|
||||
result: true
|
||||
};
|
||||
} catch (ex) {
|
||||
return {
|
||||
name: moduleName,
|
||||
result: false,
|
||||
exception: ex
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function rollupAngularSlickgrid(root: string): Promise<void> {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
const result = await rollupModule({
|
||||
moduleName: 'angular2-slickgrid',
|
||||
inputFile: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'index.js'),
|
||||
outputDirectory: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'bundles'),
|
||||
outputFileName: 'angular2-slickgrid.umd.js'
|
||||
});
|
||||
|
||||
if (!result.result) {
|
||||
return reject(`angular2-slickgrid failed to bundle - ${result.exception}`);
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
}
|
||||
|
||||
export function rollupAngular(root: string): Promise<void> {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
|
||||
const modules = ['core', 'animations', 'common', 'compiler', 'forms', 'platform-browser', 'platform-browser-dynamic', 'router'];
|
||||
const tasks = modules.map((module) => {
|
||||
return rollupModule({
|
||||
moduleName: `ng.${module}`,
|
||||
inputFile: path.resolve(root, 'node_modules', '@angular', module, '@angular', `${module}.es5.js`),
|
||||
outputDirectory: path.resolve(root, 'node_modules', '@angular', module, 'bundles'),
|
||||
outputFileName: `${module}.umd.js`,
|
||||
external: modules.map(mn => `@angular/${mn}`)
|
||||
});
|
||||
});
|
||||
|
||||
// array of booleans
|
||||
const x = await Promise.all(tasks);
|
||||
|
||||
const result = x.reduce<{ fails: string[]; exceptions: string[]; result: boolean }>((prev, current) => {
|
||||
if (!current.result) {
|
||||
prev.fails.push(current.name);
|
||||
prev.exceptions.push(current.exception);
|
||||
prev.result = false;
|
||||
}
|
||||
return prev;
|
||||
}, {
|
||||
fails: [],
|
||||
exceptions: [],
|
||||
result: true,
|
||||
});
|
||||
|
||||
if (!result.result) {
|
||||
return reject(`failures: ${result.fails} - exceptions: ${JSON.stringify(result.exceptions)}`);
|
||||
}
|
||||
resolve();
|
||||
});
|
||||
|
||||
}
|
||||
@@ -43,7 +43,9 @@ function extractEditor(options) {
|
||||
compilerOptions.declaration = false;
|
||||
compilerOptions.moduleResolution = ts.ModuleResolutionKind.Classic;
|
||||
options.compilerOptions = compilerOptions;
|
||||
console.log(`Running with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
|
||||
console.log(`Running tree shaker with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
|
||||
// Take the extra included .d.ts files from `tsconfig.monaco.json`
|
||||
options.typings = tsConfig.include.filter(includedFile => /\.d\.ts$/.test(includedFile));
|
||||
let result = tss.shake(options);
|
||||
for (let fileName in result) {
|
||||
if (result.hasOwnProperty(fileName)) {
|
||||
|
||||
@@ -50,7 +50,10 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
|
||||
|
||||
options.compilerOptions = compilerOptions;
|
||||
|
||||
console.log(`Running with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
|
||||
console.log(`Running tree shaker with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
|
||||
|
||||
// Take the extra included .d.ts files from `tsconfig.monaco.json`
|
||||
options.typings = (<string[]>tsConfig.include).filter(includedFile => /\.d\.ts$/.test(includedFile));
|
||||
|
||||
let result = tss.shake(options);
|
||||
for (let fileName in result) {
|
||||
|
||||
@@ -116,7 +116,7 @@ function submitAllStats(productJson, commit) {
|
||||
}
|
||||
*/
|
||||
appInsights.defaultClient.trackEvent({
|
||||
name: 'monacoworkbench/packagemetrics',
|
||||
name: `${productJson.quality !== 'stable' ? 'adsworkbench' : 'monacoworkbench'}/packagemetrics`,
|
||||
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
|
||||
});
|
||||
appInsights.defaultClient.flush({
|
||||
|
||||
@@ -126,7 +126,7 @@ export function submitAllStats(productJson: any, commit: string): Promise<boolea
|
||||
}
|
||||
*/
|
||||
appInsights.defaultClient.trackEvent({
|
||||
name: 'monacoworkbench/packagemetrics',
|
||||
name: `${productJson.quality !== 'stable' ? 'adsworkbench' : 'monacoworkbench'}/packagemetrics`, // {{SQL CARBON EDIT}}
|
||||
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
|
||||
});
|
||||
|
||||
|
||||
@@ -25,17 +25,17 @@ function toStringShakeLevel(shakeLevel) {
|
||||
}
|
||||
}
|
||||
exports.toStringShakeLevel = toStringShakeLevel;
|
||||
function printDiagnostics(diagnostics) {
|
||||
function printDiagnostics(options, diagnostics) {
|
||||
for (const diag of diagnostics) {
|
||||
let result = '';
|
||||
if (diag.file) {
|
||||
result += `${diag.file.fileName}: `;
|
||||
result += `${path.join(options.sourcesRoot, diag.file.fileName)}`;
|
||||
}
|
||||
if (diag.file && diag.start) {
|
||||
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
|
||||
result += `- ${location.line + 1},${location.character} - `;
|
||||
result += `:${location.line + 1}:${location.character}`;
|
||||
}
|
||||
result += JSON.stringify(diag.messageText);
|
||||
result += ` - ` + JSON.stringify(diag.messageText);
|
||||
console.log(result);
|
||||
}
|
||||
}
|
||||
@@ -44,17 +44,17 @@ function shake(options) {
|
||||
const program = languageService.getProgram();
|
||||
const globalDiagnostics = program.getGlobalDiagnostics();
|
||||
if (globalDiagnostics.length > 0) {
|
||||
printDiagnostics(globalDiagnostics);
|
||||
printDiagnostics(options, globalDiagnostics);
|
||||
throw new Error(`Compilation Errors encountered.`);
|
||||
}
|
||||
const syntacticDiagnostics = program.getSyntacticDiagnostics();
|
||||
if (syntacticDiagnostics.length > 0) {
|
||||
printDiagnostics(syntacticDiagnostics);
|
||||
printDiagnostics(options, syntacticDiagnostics);
|
||||
throw new Error(`Compilation Errors encountered.`);
|
||||
}
|
||||
const semanticDiagnostics = program.getSemanticDiagnostics();
|
||||
if (semanticDiagnostics.length > 0) {
|
||||
printDiagnostics(semanticDiagnostics);
|
||||
printDiagnostics(options, semanticDiagnostics);
|
||||
throw new Error(`Compilation Errors encountered.`);
|
||||
}
|
||||
markNodes(languageService, options);
|
||||
@@ -358,7 +358,7 @@ function markNodes(languageService, options) {
|
||||
++step;
|
||||
let node;
|
||||
if (step % 100 === 0) {
|
||||
console.log(`${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
|
||||
console.log(`Treeshaking - ${Math.floor(100 * step / (step + black_queue.length + gray_queue.length))}% - ${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
|
||||
}
|
||||
if (black_queue.length === 0) {
|
||||
for (let i = 0; i < gray_queue.length; i++) {
|
||||
|
||||
@@ -71,17 +71,17 @@ export interface ITreeShakingResult {
|
||||
[file: string]: string;
|
||||
}
|
||||
|
||||
function printDiagnostics(diagnostics: ReadonlyArray<ts.Diagnostic>): void {
|
||||
function printDiagnostics(options: ITreeShakingOptions, diagnostics: ReadonlyArray<ts.Diagnostic>): void {
|
||||
for (const diag of diagnostics) {
|
||||
let result = '';
|
||||
if (diag.file) {
|
||||
result += `${diag.file.fileName}: `;
|
||||
result += `${path.join(options.sourcesRoot, diag.file.fileName)}`;
|
||||
}
|
||||
if (diag.file && diag.start) {
|
||||
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
|
||||
result += `- ${location.line + 1},${location.character} - `;
|
||||
result += `:${location.line + 1}:${location.character}`;
|
||||
}
|
||||
result += JSON.stringify(diag.messageText);
|
||||
result += ` - ` + JSON.stringify(diag.messageText);
|
||||
console.log(result);
|
||||
}
|
||||
}
|
||||
@@ -92,19 +92,19 @@ export function shake(options: ITreeShakingOptions): ITreeShakingResult {
|
||||
|
||||
const globalDiagnostics = program.getGlobalDiagnostics();
|
||||
if (globalDiagnostics.length > 0) {
|
||||
printDiagnostics(globalDiagnostics);
|
||||
printDiagnostics(options, globalDiagnostics);
|
||||
throw new Error(`Compilation Errors encountered.`);
|
||||
}
|
||||
|
||||
const syntacticDiagnostics = program.getSyntacticDiagnostics();
|
||||
if (syntacticDiagnostics.length > 0) {
|
||||
printDiagnostics(syntacticDiagnostics);
|
||||
printDiagnostics(options, syntacticDiagnostics);
|
||||
throw new Error(`Compilation Errors encountered.`);
|
||||
}
|
||||
|
||||
const semanticDiagnostics = program.getSemanticDiagnostics();
|
||||
if (semanticDiagnostics.length > 0) {
|
||||
printDiagnostics(semanticDiagnostics);
|
||||
printDiagnostics(options, semanticDiagnostics);
|
||||
throw new Error(`Compilation Errors encountered.`);
|
||||
}
|
||||
|
||||
@@ -471,7 +471,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
|
||||
let node: ts.Node;
|
||||
|
||||
if (step % 100 === 0) {
|
||||
console.log(`${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
|
||||
console.log(`Treeshaking - ${Math.floor(100 * step / (step + black_queue.length + gray_queue.length))}% - ${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
|
||||
}
|
||||
|
||||
if (black_queue.length === 0) {
|
||||
|
||||
@@ -48,9 +48,7 @@ class DoubleQuotedStringArgRuleWalker extends Lint.RuleWalker {
|
||||
const argText = arg.getText();
|
||||
const doubleQuotedArg = argText.length >= 2 && argText[0] === DoubleQuotedStringArgRuleWalker.DOUBLE_QUOTE && argText[argText.length - 1] === DoubleQuotedStringArgRuleWalker.DOUBLE_QUOTE;
|
||||
if (!doubleQuotedArg) {
|
||||
const fix = [
|
||||
Lint.Replacement.replaceFromTo(arg.getStart(), arg.getWidth(), `"${arg.getText().slice(1, arg.getWidth() - 2)}"`),
|
||||
];
|
||||
const fix = Lint.Replacement.replaceFromTo(arg.getStart(), arg.getEnd(), `"${arg.getText().slice(1, arg.getWidth() - 1)}"`);
|
||||
this.addFailure(this.createFailure(arg.getStart(), arg.getWidth(), `Argument ${this.argIndex + 1} to '${functionName}' must be double quoted.`, fix));
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -68,9 +68,7 @@ class DoubleQuotedStringArgRuleWalker extends Lint.RuleWalker {
|
||||
const doubleQuotedArg = argText.length >= 2 && argText[0] === DoubleQuotedStringArgRuleWalker.DOUBLE_QUOTE && argText[argText.length - 1] === DoubleQuotedStringArgRuleWalker.DOUBLE_QUOTE;
|
||||
|
||||
if (!doubleQuotedArg) {
|
||||
const fix = [
|
||||
Lint.Replacement.replaceFromTo(arg.getStart(), arg.getWidth(), `"${arg.getText().slice(1, arg.getWidth() - 2)}"`),
|
||||
];
|
||||
const fix = Lint.Replacement.replaceFromTo(arg.getStart(), arg.getEnd(), `"${arg.getText().slice(1, arg.getWidth() - 1)}"`);
|
||||
this.addFailure(this.createFailure(
|
||||
arg.getStart(), arg.getWidth(),
|
||||
`Argument ${this.argIndex! + 1} to '${functionName}' must be double quoted.`, fix));
|
||||
|
||||
49
build/lib/tslint/noUnderscoreInLocalizeKeysRule.js
Normal file
49
build/lib/tslint/noUnderscoreInLocalizeKeysRule.js
Normal file
@@ -0,0 +1,49 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ts = require("typescript");
|
||||
const Lint = require("tslint");
|
||||
/**
|
||||
* Implementation of the no-localize-keys-with-underscore rule which verifies that keys to the localize
|
||||
* calls don't contain underscores (_) since those break the localization process.
|
||||
*/
|
||||
class Rule extends Lint.Rules.AbstractRule {
|
||||
apply(sourceFile) {
|
||||
return this.applyWithWalker(new NoLocalizeKeysWithUnderscore(sourceFile, this.getOptions()));
|
||||
}
|
||||
}
|
||||
exports.Rule = Rule;
|
||||
const signatures = [
|
||||
"localize",
|
||||
"nls.localize"
|
||||
];
|
||||
class NoLocalizeKeysWithUnderscore extends Lint.RuleWalker {
|
||||
constructor(file, opts) {
|
||||
super(file, opts);
|
||||
}
|
||||
visitCallExpression(node) {
|
||||
this.checkCallExpression(node);
|
||||
super.visitCallExpression(node);
|
||||
}
|
||||
checkCallExpression(node) {
|
||||
// If this isn't one of the localize functions then continue on
|
||||
const functionName = node.expression.getText();
|
||||
if (functionName && !signatures.some(s => s === functionName)) {
|
||||
return;
|
||||
}
|
||||
const arg = node && node.arguments && node.arguments.length > 0 ? node.arguments[0] : undefined; // The key is the first element
|
||||
// Ignore if the arg isn't a string - we expect the compiler to warn if that's an issue
|
||||
if (arg && ts.isStringLiteral(arg)) {
|
||||
if (arg.getText().indexOf('_') >= 0) {
|
||||
const fix = [
|
||||
Lint.Replacement.replaceFromTo(arg.getStart(), arg.getEnd(), `${arg.getText().replace(/_/g, '.')}`),
|
||||
];
|
||||
this.addFailure(this.createFailure(arg.getStart(), arg.getWidth(), `Keys for localize calls must not contain underscores. Use periods (.) instead.`, fix));
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
55
build/lib/tslint/noUnderscoreInLocalizeKeysRule.ts
Normal file
55
build/lib/tslint/noUnderscoreInLocalizeKeysRule.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as ts from 'typescript';
|
||||
import * as Lint from 'tslint';
|
||||
|
||||
/**
|
||||
* Implementation of the no-localize-keys-with-underscore rule which verifies that keys to the localize
|
||||
* calls don't contain underscores (_) since those break the localization process.
|
||||
*/
|
||||
export class Rule extends Lint.Rules.AbstractRule {
|
||||
public apply(sourceFile: ts.SourceFile): Lint.RuleFailure[] {
|
||||
return this.applyWithWalker(new NoLocalizeKeysWithUnderscore(sourceFile, this.getOptions()));
|
||||
}
|
||||
}
|
||||
|
||||
const signatures = [
|
||||
"localize",
|
||||
"nls.localize"
|
||||
];
|
||||
|
||||
class NoLocalizeKeysWithUnderscore extends Lint.RuleWalker {
|
||||
|
||||
constructor(file: ts.SourceFile, opts: Lint.IOptions) {
|
||||
super(file, opts);
|
||||
}
|
||||
|
||||
protected visitCallExpression(node: ts.CallExpression): void {
|
||||
this.checkCallExpression(node);
|
||||
super.visitCallExpression(node);
|
||||
}
|
||||
|
||||
private checkCallExpression(node: ts.CallExpression): void {
|
||||
// If this isn't one of the localize functions then continue on
|
||||
const functionName = node.expression.getText();
|
||||
if (functionName && !signatures.some(s => s === functionName)) {
|
||||
return;
|
||||
}
|
||||
const arg = node && node.arguments && node.arguments.length > 0 ? node.arguments[0] : undefined; // The key is the first element
|
||||
// Ignore if the arg isn't a string - we expect the compiler to warn if that's an issue
|
||||
if(arg && ts.isStringLiteral(arg)) {
|
||||
if (arg.getText().indexOf('_') >= 0) {
|
||||
const fix = [
|
||||
Lint.Replacement.replaceFromTo(arg.getStart(), arg.getEnd(), `${arg.getText().replace(/_/g, '.')}`),
|
||||
];
|
||||
this.addFailure(this.createFailure(
|
||||
arg.getStart(), arg.getWidth(),
|
||||
`Keys for localize calls must not contain underscores. Use periods (.) instead.`, fix));
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -166,22 +166,50 @@ function stripSourceMappingURL() {
|
||||
}
|
||||
exports.stripSourceMappingURL = stripSourceMappingURL;
|
||||
function rimraf(dir) {
|
||||
let retries = 0;
|
||||
const retry = (cb) => {
|
||||
_rimraf(dir, { maxBusyTries: 1 }, (err) => {
|
||||
if (!err) {
|
||||
return cb();
|
||||
}
|
||||
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
|
||||
return setTimeout(() => retry(cb), 10);
|
||||
}
|
||||
return cb(err);
|
||||
});
|
||||
};
|
||||
retry.taskName = `clean-${path.basename(dir).toLowerCase()}`;
|
||||
return retry;
|
||||
const result = () => new Promise((c, e) => {
|
||||
let retries = 0;
|
||||
const retry = () => {
|
||||
_rimraf(dir, { maxBusyTries: 1 }, (err) => {
|
||||
if (!err) {
|
||||
return c();
|
||||
}
|
||||
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
|
||||
return setTimeout(() => retry(), 10);
|
||||
}
|
||||
return e(err);
|
||||
});
|
||||
};
|
||||
retry();
|
||||
});
|
||||
result.taskName = `clean-${path.basename(dir).toLowerCase()}`;
|
||||
return result;
|
||||
}
|
||||
exports.rimraf = rimraf;
|
||||
function _rreaddir(dirPath, prepend, result) {
|
||||
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
_rreaddir(path.join(dirPath, entry.name), `${prepend}/${entry.name}`, result);
|
||||
}
|
||||
else {
|
||||
result.push(`${prepend}/${entry.name}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
function rreddir(dirPath) {
|
||||
let result = [];
|
||||
_rreaddir(dirPath, '', result);
|
||||
return result;
|
||||
}
|
||||
exports.rreddir = rreddir;
|
||||
function ensureDir(dirPath) {
|
||||
if (fs.existsSync(dirPath)) {
|
||||
return;
|
||||
}
|
||||
ensureDir(path.dirname(dirPath));
|
||||
fs.mkdirSync(dirPath);
|
||||
}
|
||||
exports.ensureDir = ensureDir;
|
||||
function getVersion(root) {
|
||||
let version = process.env['BUILD_SOURCEVERSION'];
|
||||
if (!version || !/^[0-9a-f]{40}$/i.test(version)) {
|
||||
@@ -219,3 +247,10 @@ function versionStringToNumber(versionStr) {
|
||||
return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10);
|
||||
}
|
||||
exports.versionStringToNumber = versionStringToNumber;
|
||||
function streamToPromise(stream) {
|
||||
return new Promise((c, e) => {
|
||||
stream.on('error', err => e(err));
|
||||
stream.on('end', () => c());
|
||||
});
|
||||
}
|
||||
exports.streamToPromise = streamToPromise;
|
||||
|
||||
@@ -218,24 +218,54 @@ export function stripSourceMappingURL(): NodeJS.ReadWriteStream {
|
||||
return es.duplex(input, output);
|
||||
}
|
||||
|
||||
export function rimraf(dir: string): (cb: any) => void {
|
||||
let retries = 0;
|
||||
export function rimraf(dir: string): () => Promise<void> {
|
||||
const result = () => new Promise<void>((c, e) => {
|
||||
let retries = 0;
|
||||
|
||||
const retry = (cb: (err?: any) => void) => {
|
||||
_rimraf(dir, { maxBusyTries: 1 }, (err: any) => {
|
||||
if (!err) {
|
||||
return cb();
|
||||
}
|
||||
const retry = () => {
|
||||
_rimraf(dir, { maxBusyTries: 1 }, (err: any) => {
|
||||
if (!err) {
|
||||
return c();
|
||||
}
|
||||
|
||||
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
|
||||
return setTimeout(() => retry(cb), 10);
|
||||
}
|
||||
if (err.code === 'ENOTEMPTY' && ++retries < 5) {
|
||||
return setTimeout(() => retry(), 10);
|
||||
}
|
||||
|
||||
return cb(err);
|
||||
});
|
||||
};
|
||||
retry.taskName = `clean-${path.basename(dir).toLowerCase()}`;
|
||||
return retry;
|
||||
return e(err);
|
||||
});
|
||||
};
|
||||
|
||||
retry();
|
||||
});
|
||||
|
||||
result.taskName = `clean-${path.basename(dir).toLowerCase()}`;
|
||||
return result;
|
||||
}
|
||||
|
||||
function _rreaddir(dirPath: string, prepend: string, result: string[]): void {
|
||||
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (entry.isDirectory()) {
|
||||
_rreaddir(path.join(dirPath, entry.name), `${prepend}/${entry.name}`, result);
|
||||
} else {
|
||||
result.push(`${prepend}/${entry.name}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function rreddir(dirPath: string): string[] {
|
||||
let result: string[] = [];
|
||||
_rreaddir(dirPath, '', result);
|
||||
return result;
|
||||
}
|
||||
|
||||
export function ensureDir(dirPath: string): void {
|
||||
if (fs.existsSync(dirPath)) {
|
||||
return;
|
||||
}
|
||||
ensureDir(path.dirname(dirPath));
|
||||
fs.mkdirSync(dirPath);
|
||||
}
|
||||
|
||||
export function getVersion(root: string): string | undefined {
|
||||
@@ -281,3 +311,10 @@ export function versionStringToNumber(versionStr: string) {
|
||||
|
||||
return parseInt(match[1], 10) * 1e4 + parseInt(match[2], 10) * 1e2 + parseInt(match[3], 10);
|
||||
}
|
||||
|
||||
export function streamToPromise(stream: NodeJS.ReadWriteStream): Promise<void> {
|
||||
return new Promise((c, e) => {
|
||||
stream.on('error', err => e(err));
|
||||
stream.on('end', () => c());
|
||||
});
|
||||
}
|
||||
|
||||
@@ -3,14 +3,7 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
const es = require('event-stream');
|
||||
|
||||
|
||||
let watch = undefined;
|
||||
|
||||
if (!watch) {
|
||||
watch = process.platform === 'win32' ? require('./watch-win32') : require('gulp-watch');
|
||||
}
|
||||
const watch = process.platform === 'win32' ? require('./watch-win32') : require('vscode-gulp-watch');
|
||||
|
||||
module.exports = function () {
|
||||
return watch.apply(null, arguments);
|
||||
|
||||
@@ -5,7 +5,8 @@
|
||||
"author": "Microsoft ",
|
||||
"private": true,
|
||||
"license": "MIT",
|
||||
"devDependencies": {
|
||||
"gulp-watch": "5.0.1"
|
||||
"devDependencies": {},
|
||||
"dependencies": {
|
||||
"vscode-gulp-watch": "^5.0.2"
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -516,7 +516,7 @@ class DeclarationResolver {
|
||||
'file.ts': fileContents
|
||||
};
|
||||
const service = ts.createLanguageService(new TypeScriptLanguageServiceHost({}, fileMap, {}));
|
||||
const text = service.getEmitOutput('file.ts', true).outputFiles[0].text;
|
||||
const text = service.getEmitOutput('file.ts', true, true).outputFiles[0].text;
|
||||
return new CacheEntry(ts.createSourceFile(fileName, text, ts.ScriptTarget.ES5), mtime);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -617,7 +617,7 @@ export class DeclarationResolver {
|
||||
'file.ts': fileContents
|
||||
};
|
||||
const service = ts.createLanguageService(new TypeScriptLanguageServiceHost({}, fileMap, {}));
|
||||
const text = service.getEmitOutput('file.ts', true).outputFiles[0].text;
|
||||
const text = service.getEmitOutput('file.ts', true, true).outputFiles[0].text;
|
||||
return new CacheEntry(
|
||||
ts.createSourceFile(fileName, text, ts.ScriptTarget.ES5),
|
||||
mtime
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user