mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-22 11:01:37 -05:00
Compare commits
213 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
27468f75a5 | ||
|
|
79ad848216 | ||
|
|
1a2c6f1578 | ||
|
|
17e4d3bfa3 | ||
|
|
2599fb1252 | ||
|
|
26d59b528e | ||
|
|
093f44a1e7 | ||
|
|
290f43dbd7 | ||
|
|
fb2486a54b | ||
|
|
7e02c16fd7 | ||
|
|
6638db1f35 | ||
|
|
f0dde491be | ||
|
|
994a2382ad | ||
|
|
856fec4243 | ||
|
|
d1c594cfd0 | ||
|
|
86da9852ca | ||
|
|
4ba6a979ba | ||
|
|
82974a2135 | ||
|
|
585c18ef4d | ||
|
|
7ec516d851 | ||
|
|
7e970d04ca | ||
|
|
808ce4366d | ||
|
|
8271226487 | ||
|
|
bbb7a67bd2 | ||
|
|
698b4fce41 | ||
|
|
302e8305ef | ||
|
|
659f392196 | ||
|
|
775a25d944 | ||
|
|
bceb766d28 | ||
|
|
36fd618ed4 | ||
|
|
99c473cdf6 | ||
|
|
88b55d0e06 | ||
|
|
184d4bbe27 | ||
|
|
4fc6f4a13e | ||
|
|
adad11c725 | ||
|
|
a1b5af0445 | ||
|
|
db4f512991 | ||
|
|
42ff30515c | ||
|
|
62565e0577 | ||
|
|
de177c0335 | ||
|
|
d614116b63 | ||
|
|
a7ff238653 | ||
|
|
6fb120f5dd | ||
|
|
6e8cc3aaca | ||
|
|
18ab73cc1d | ||
|
|
d1c7370f1c | ||
|
|
794f7a14c0 | ||
|
|
7cd2a6d6aa | ||
|
|
febf6b9e70 | ||
|
|
7201025a15 | ||
|
|
4787d7ba5c | ||
|
|
3de95af25c | ||
|
|
0bf4790a64 | ||
|
|
0d9353d99e | ||
|
|
a898c46e74 | ||
|
|
493e7087cf | ||
|
|
f5ce7fb2a5 | ||
|
|
a8818ab0df | ||
|
|
9691fab917 | ||
|
|
a7f5741608 | ||
|
|
c34869c243 | ||
|
|
ab94b9785e | ||
|
|
ee94524ab1 | ||
|
|
6cce532ca4 | ||
|
|
7f16a4d857 | ||
|
|
255ea1945b | ||
|
|
b38b53b658 | ||
|
|
82c60a23c0 | ||
|
|
c93ea20b75 | ||
|
|
6a4e4fc07b | ||
|
|
d358cdac1e | ||
|
|
4f8ced1f6b | ||
|
|
8cc60fde90 | ||
|
|
b8bc629970 | ||
|
|
9a83dfc022 | ||
|
|
d5e26527a6 | ||
|
|
c9226a07c5 | ||
|
|
d451528b36 | ||
|
|
48b2cbb0bf | ||
|
|
39e6b9933d | ||
|
|
d08fb1aee2 | ||
|
|
5235a1d029 | ||
|
|
3135b8525b | ||
|
|
0e9797c394 | ||
|
|
4145ecfb32 | ||
|
|
20e9b329b1 | ||
|
|
d1ccbf028f | ||
|
|
a1fc621e1b | ||
|
|
9ef6bec960 | ||
|
|
b631530753 | ||
|
|
d9997cebfc | ||
|
|
ffee69a765 | ||
|
|
7a38943412 | ||
|
|
c970173fc0 | ||
|
|
0979ce8de6 | ||
|
|
878bcc0d92 | ||
|
|
68b2f1a8e4 | ||
|
|
8cd06f74b9 | ||
|
|
43387f0d0b | ||
|
|
f3a6fc6f88 | ||
|
|
f2bc367e78 | ||
|
|
46a8410fc5 | ||
|
|
be7c26ede5 | ||
|
|
fb3b7be9e5 | ||
|
|
78731e0c8c | ||
|
|
6b67f27cac | ||
|
|
56182a53d1 | ||
|
|
66e1c01793 | ||
|
|
f0039a64a7 | ||
|
|
22501a09a1 | ||
|
|
c03cce7f60 | ||
|
|
98abf4a758 | ||
|
|
c19bc54877 | ||
|
|
e17d4e96ae | ||
|
|
b333788c3c | ||
|
|
632ca0685e | ||
|
|
52de2b4751 | ||
|
|
fc0c05c755 | ||
|
|
22b8ebd281 | ||
|
|
ec91d3eda0 | ||
|
|
7b31ee27d8 | ||
|
|
927120fa3b | ||
|
|
f1ca2a35ef | ||
|
|
1760af13d1 | ||
|
|
183cb84fbc | ||
|
|
019d5088ec | ||
|
|
15913e5e48 | ||
|
|
e6ffb97a7b | ||
|
|
8655044dfb | ||
|
|
f26c790736 | ||
|
|
7e553031ce | ||
|
|
164ec41fb1 | ||
|
|
8cd9097526 | ||
|
|
134b0b32c6 | ||
|
|
de4b7af1ad | ||
|
|
f976fc9418 | ||
|
|
55059907a3 | ||
|
|
8ca0082ec4 | ||
|
|
5b50696a1b | ||
|
|
840683e3f0 | ||
|
|
ee5dbdffb9 | ||
|
|
43f6a5576d | ||
|
|
8a44de27e7 | ||
|
|
fa9bbd4e1e | ||
|
|
66048f1d63 | ||
|
|
bafd9fd437 | ||
|
|
dae71c3bf4 | ||
|
|
ae8304fc33 | ||
|
|
d6ef42c8b0 | ||
|
|
131b0b93bf | ||
|
|
82185f75d7 | ||
|
|
3769b5066f | ||
|
|
ba8c331356 | ||
|
|
80248846bb | ||
|
|
cf5297958a | ||
|
|
22996cbce7 | ||
|
|
7563416754 | ||
|
|
02b1673c71 | ||
|
|
6ef87d7067 | ||
|
|
e3921c6d14 | ||
|
|
f9ef9d85f4 | ||
|
|
7a2c30e159 | ||
|
|
6438967202 | ||
|
|
63bf82ad84 | ||
|
|
18ab2ae799 | ||
|
|
86d6295bf0 | ||
|
|
3f306d2396 | ||
|
|
2f1f5b2376 | ||
|
|
62d7c71093 | ||
|
|
4f69ed5745 | ||
|
|
ddddf3beb4 | ||
|
|
0ae525cbd5 | ||
|
|
0520870754 | ||
|
|
8b17b77010 | ||
|
|
e2ef1f8a89 | ||
|
|
7f7052ad42 | ||
|
|
738ca479e4 | ||
|
|
34a274a7d1 | ||
|
|
b1496aa12f | ||
|
|
30acba7921 | ||
|
|
b5c0c37a23 | ||
|
|
ef0a92d83f | ||
|
|
b364e32beb | ||
|
|
7f51921176 | ||
|
|
efebd681b6 | ||
|
|
d635390b33 | ||
|
|
61f0d614ce | ||
|
|
27b80804f5 | ||
|
|
df0c505452 | ||
|
|
564f78b6f6 | ||
|
|
df6b6ded33 | ||
|
|
e801a04bcf | ||
|
|
3b1eaca58e | ||
|
|
22a427f934 | ||
|
|
4645a8ba6b | ||
|
|
1b88c10197 | ||
|
|
5a392dfd58 | ||
|
|
e49ff93122 | ||
|
|
399788ccc1 | ||
|
|
08fde8719d | ||
|
|
f7bef3f87b | ||
|
|
c70e7794eb | ||
|
|
9eb438bb24 | ||
|
|
38decaea90 | ||
|
|
ade68b184d | ||
|
|
3c702c15e2 | ||
|
|
76e8805a6b | ||
|
|
5dc7049f8c | ||
|
|
97f852c3d6 | ||
|
|
a8eed6114b | ||
|
|
6864d39f85 | ||
|
|
6f47c1fcda | ||
|
|
c2c64293f5 |
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -2,7 +2,7 @@
|
|||||||
name: Bug report
|
name: Bug report
|
||||||
about: Create a report to help us improve
|
about: Create a report to help us improve
|
||||||
title: ''
|
title: ''
|
||||||
labels: Bug
|
labels: ''
|
||||||
assignees: ''
|
assignees: ''
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|||||||
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
blank_issues_enabled: false
|
||||||
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -2,7 +2,7 @@
|
|||||||
name: Feature request
|
name: Feature request
|
||||||
about: Suggest an idea for this project
|
about: Suggest an idea for this project
|
||||||
title: ''
|
title: ''
|
||||||
labels: Enhancement
|
labels: ''
|
||||||
assignees: ''
|
assignees: ''
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|||||||
8
.github/classifier.yml
vendored
8
.github/classifier.yml
vendored
@@ -2,12 +2,14 @@
|
|||||||
perform: true,
|
perform: true,
|
||||||
alwaysRequireAssignee: false,
|
alwaysRequireAssignee: false,
|
||||||
labelsRequiringAssignee: [],
|
labelsRequiringAssignee: [],
|
||||||
|
defaultLabel: 'Triage: Needed',
|
||||||
|
defaultAssignee: '',
|
||||||
autoAssignees: {
|
autoAssignees: {
|
||||||
Area - Acquisition: [],
|
Area - Acquisition: [],
|
||||||
Area - Azure: [],
|
Area - Azure: [],
|
||||||
Area - Backup\Restore: [],
|
Area - Backup\Restore: [],
|
||||||
Area - Charting\Insights: [],
|
Area - Charting\Insights: [],
|
||||||
Area - Connection: [],
|
Area - Connection: [ charles-gagnon ],
|
||||||
Area - DacFX: [],
|
Area - DacFX: [],
|
||||||
Area - Dashboard: [],
|
Area - Dashboard: [],
|
||||||
Area - Data Explorer: [],
|
Area - Data Explorer: [],
|
||||||
@@ -15,9 +17,9 @@
|
|||||||
Area - Extensibility: [],
|
Area - Extensibility: [],
|
||||||
Area - External Table: [],
|
Area - External Table: [],
|
||||||
Area - Fundamentals: [],
|
Area - Fundamentals: [],
|
||||||
Area - Language Service: [],
|
Area - Language Service: [ charles-gagnon ],
|
||||||
Area - Localization: [],
|
Area - Localization: [],
|
||||||
Area - Notebooks: [],
|
Area - Notebooks: [ chlafreniere ],
|
||||||
Area - Performance: [],
|
Area - Performance: [],
|
||||||
Area - Query Editor: [ anthonydresser ],
|
Area - Query Editor: [ anthonydresser ],
|
||||||
Area - Query Plan: [],
|
Area - Query Plan: [],
|
||||||
|
|||||||
6
.github/stale.yml
vendored
Normal file
6
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
perform: true,
|
||||||
|
label: 'Stale PR',
|
||||||
|
daysSinceLastUpdate: 7,
|
||||||
|
ignoredLabels: ['Do Not Merge']
|
||||||
|
}
|
||||||
19
.vscode/launch.json
vendored
19
.vscode/launch.json
vendored
@@ -67,17 +67,16 @@
|
|||||||
"request": "launch",
|
"request": "launch",
|
||||||
"name": "Launch azuredatastudio",
|
"name": "Launch azuredatastudio",
|
||||||
"windows": {
|
"windows": {
|
||||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat",
|
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat"
|
||||||
"timeout": 45000
|
|
||||||
},
|
},
|
||||||
"osx": {
|
"osx": {
|
||||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
|
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||||
"timeout": 45000
|
|
||||||
},
|
},
|
||||||
"linux": {
|
"linux": {
|
||||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
|
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||||
"timeout": 45000
|
|
||||||
},
|
},
|
||||||
|
"port": 9222,
|
||||||
|
"timeout": 20000,
|
||||||
"env": {
|
"env": {
|
||||||
"VSCODE_EXTHOST_WILL_SEND_SOCKET": null
|
"VSCODE_EXTHOST_WILL_SEND_SOCKET": null
|
||||||
},
|
},
|
||||||
@@ -260,6 +259,14 @@
|
|||||||
"Attach to Main Process"
|
"Attach to Main Process"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug azuredatastudio Main, Renderer & Extension Host",
|
||||||
|
"configurations": [
|
||||||
|
"Launch azuredatastudio",
|
||||||
|
"Attach to Main Process",
|
||||||
|
"Attach to Extension Host"
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "Debug Renderer and search processes",
|
"name": "Debug Renderer and search processes",
|
||||||
"configurations": [
|
"configurations": [
|
||||||
|
|||||||
1
.vscode/settings.json
vendored
1
.vscode/settings.json
vendored
@@ -39,6 +39,7 @@
|
|||||||
],
|
],
|
||||||
"typescript.tsdk": "node_modules/typescript/lib",
|
"typescript.tsdk": "node_modules/typescript/lib",
|
||||||
"npm.exclude": "**/extensions/**",
|
"npm.exclude": "**/extensions/**",
|
||||||
|
"npm.packageManager": "yarn",
|
||||||
"emmet.excludeLanguages": [],
|
"emmet.excludeLanguages": [],
|
||||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||||
"typescript.preferences.quoteStyle": "single",
|
"typescript.preferences.quoteStyle": "single",
|
||||||
|
|||||||
6
.vscode/tasks.json
vendored
6
.vscode/tasks.json
vendored
@@ -33,15 +33,15 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "npm",
|
"type": "npm",
|
||||||
"script": "strict-initialization-watch",
|
"script": "strict-function-types-watch",
|
||||||
"label": "TS - Strict Initialization",
|
"label": "TS - Strict Function Types",
|
||||||
"isBackground": true,
|
"isBackground": true,
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "never"
|
"reveal": "never"
|
||||||
},
|
},
|
||||||
"problemMatcher": {
|
"problemMatcher": {
|
||||||
"base": "$tsc-watch",
|
"base": "$tsc-watch",
|
||||||
"owner": "typescript-strict-initialization",
|
"owner": "typescript-function-types",
|
||||||
"applyTo": "allDocuments"
|
"applyTo": "allDocuments"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
2
.yarnrc
2
.yarnrc
@@ -1,3 +1,3 @@
|
|||||||
disturl "https://atom.io/download/electron"
|
disturl "https://atom.io/download/electron"
|
||||||
target "6.0.12"
|
target "6.1.5"
|
||||||
runtime "electron"
|
runtime "electron"
|
||||||
|
|||||||
15
CHANGELOG.md
15
CHANGELOG.md
@@ -1,5 +1,20 @@
|
|||||||
# Change Log
|
# Change Log
|
||||||
|
|
||||||
|
## Version 1.13.1
|
||||||
|
* Release date: November 15, 2019
|
||||||
|
* Release status: General Availability
|
||||||
|
* Resolved [#8210 Copy/Paste results are out of order](https://github.com/microsoft/azuredatastudio/issues/8210).
|
||||||
|
|
||||||
|
## Version 1.13.0
|
||||||
|
* Release date: November 4, 2019
|
||||||
|
* Release status: General Availability
|
||||||
|
* General Availability release for Schema Compare and DACPAC extensions
|
||||||
|
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/43?closed=1).
|
||||||
|
|
||||||
|
## Contributions and "thank you"
|
||||||
|
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||||
|
* aspnerd for `Use selected DB for import wizard schema list` [#7878](https://github.com/microsoft/azuredatastudio/pull/7878)
|
||||||
|
|
||||||
## Version 1.12.2
|
## Version 1.12.2
|
||||||
* Release date: October 11, 2019
|
* Release date: October 11, 2019
|
||||||
* Release status: General Availability
|
* Release status: General Availability
|
||||||
|
|||||||
17
README.md
17
README.md
@@ -10,13 +10,13 @@ Azure Data Studio is a data management tool that enables you to work with SQL Se
|
|||||||
|
|
||||||
Platform | Link
|
Platform | Link
|
||||||
-- | --
|
-- | --
|
||||||
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2105135
|
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2109256
|
||||||
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2105134
|
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2109085
|
||||||
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2104938
|
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2109255
|
||||||
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2105133
|
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2109180
|
||||||
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2105132
|
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2109179
|
||||||
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2104937
|
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2109178
|
||||||
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2105131
|
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2109254
|
||||||
|
|
||||||
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
|
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
|
||||||
|
|
||||||
@@ -69,6 +69,9 @@ The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.micro
|
|||||||
## Contributions and "Thank You"
|
## Contributions and "Thank You"
|
||||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||||
|
|
||||||
|
* eulercamposbarros for `Prevent connections from moving on click (#7528)`
|
||||||
|
* AlexFsmn for `Fixed issue where task icons got hidden if text was too long`
|
||||||
|
* jamesrod817 for `Tempdb (#7022)`
|
||||||
* dzsquared for `fix(snippets): ads parenthesis to sqlcreateindex snippet #7020`
|
* dzsquared for `fix(snippets): ads parenthesis to sqlcreateindex snippet #7020`
|
||||||
* devmattrick for `Update row count as updates are received #6642`
|
* devmattrick for `Update row count as updates are received #6642`
|
||||||
* mottykohn for `In Message panel onclick scroll to line #6417`
|
* mottykohn for `In Message panel onclick scroll to line #6417`
|
||||||
|
|||||||
@@ -1,3 +1,7 @@
|
|||||||
|
trigger:
|
||||||
|
- master
|
||||||
|
- release/*
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
- job: Windows
|
- job: Windows
|
||||||
pool:
|
pool:
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
2019-08-30T20:24:23.714Z
|
2019-12-01T02:20:58.491Z
|
||||||
|
|||||||
36
build/azure-pipelines/common/copyArtifacts.ts
Normal file
36
build/azure-pipelines/common/copyArtifacts.ts
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
import * as vfs from 'vinyl-fs';
|
||||||
|
|
||||||
|
const files = [
|
||||||
|
'.build/extensions/**/*.vsix', // external extensions
|
||||||
|
'.build/win32-x64/**/*.{exe,zip}', // windows binaries
|
||||||
|
'.build/linux/sha256hashes.txt', // linux hashes
|
||||||
|
'.build/linux/deb/amd64/deb/*', // linux debs
|
||||||
|
'.build/linux/rpm/x86_64/*', // linux rpms
|
||||||
|
'.build/linux/server/*', // linux server
|
||||||
|
'.build/linux/archive/*', // linux archive
|
||||||
|
'.build/docker/**', // docker images
|
||||||
|
'.build/darwin/**', // darwin binaries
|
||||||
|
'.build/version.json' // version information
|
||||||
|
];
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const stream = vfs.src(files, { base: '.build', allowEmpty: true })
|
||||||
|
.pipe(vfs.dest(process.env.BUILD_ARTIFACTSTAGINGDIRECTORY!));
|
||||||
|
|
||||||
|
stream.on('end', () => resolve());
|
||||||
|
stream.on('error', e => reject(e));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch(err => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
132
build/azure-pipelines/common/createAsset.ts
Normal file
132
build/azure-pipelines/common/createAsset.ts
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import { Readable } from 'stream';
|
||||||
|
import * as crypto from 'crypto';
|
||||||
|
import * as azure from 'azure-storage';
|
||||||
|
import * as mime from 'mime';
|
||||||
|
import { CosmosClient } from '@azure/cosmos';
|
||||||
|
|
||||||
|
interface Asset {
|
||||||
|
platform: string;
|
||||||
|
type: string;
|
||||||
|
url: string;
|
||||||
|
mooncakeUrl?: string;
|
||||||
|
hash: string;
|
||||||
|
sha256hash: string;
|
||||||
|
size: number;
|
||||||
|
supportsFastUpdate?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.argv.length !== 6) {
|
||||||
|
console.error('Usage: node createAsset.js PLATFORM TYPE NAME FILE');
|
||||||
|
process.exit(-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
function hashStream(hashName: string, stream: Readable): Promise<string> {
|
||||||
|
return new Promise<string>((c, e) => {
|
||||||
|
const shasum = crypto.createHash(hashName);
|
||||||
|
|
||||||
|
stream
|
||||||
|
.on('data', shasum.update.bind(shasum))
|
||||||
|
.on('error', e)
|
||||||
|
.on('close', () => c(shasum.digest('hex')));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function doesAssetExist(blobService: azure.BlobService, quality: string, blobName: string): Promise<boolean | undefined> {
|
||||||
|
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
|
||||||
|
return existsResult.exists;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function uploadBlob(blobService: azure.BlobService, quality: string, blobName: string, file: string): Promise<void> {
|
||||||
|
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
|
||||||
|
contentSettings: {
|
||||||
|
contentType: mime.lookup(file),
|
||||||
|
cacheControl: 'max-age=31536000, public'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||||
|
}
|
||||||
|
|
||||||
|
function getEnv(name: string): string {
|
||||||
|
const result = process.env[name];
|
||||||
|
|
||||||
|
if (typeof result === 'undefined') {
|
||||||
|
throw new Error('Missing env: ' + name);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main(): Promise<void> {
|
||||||
|
const [, , platform, type, name, file] = process.argv;
|
||||||
|
const quality = getEnv('VSCODE_QUALITY');
|
||||||
|
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||||
|
|
||||||
|
console.log('Creating asset...');
|
||||||
|
|
||||||
|
const stat = await new Promise<fs.Stats>((c, e) => fs.stat(file, (err, stat) => err ? e(err) : c(stat)));
|
||||||
|
const size = stat.size;
|
||||||
|
|
||||||
|
console.log('Size:', size);
|
||||||
|
|
||||||
|
const stream = fs.createReadStream(file);
|
||||||
|
const [sha1hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]);
|
||||||
|
|
||||||
|
console.log('SHA1:', sha1hash);
|
||||||
|
console.log('SHA256:', sha256hash);
|
||||||
|
|
||||||
|
const blobName = commit + '/' + name;
|
||||||
|
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2']!;
|
||||||
|
|
||||||
|
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2']!)
|
||||||
|
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||||
|
|
||||||
|
const blobExists = await doesAssetExist(blobService, quality, blobName);
|
||||||
|
|
||||||
|
if (blobExists) {
|
||||||
|
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Uploading blobs to Azure storage...');
|
||||||
|
|
||||||
|
await uploadBlob(blobService, quality, blobName, file);
|
||||||
|
|
||||||
|
console.log('Blobs successfully uploaded.');
|
||||||
|
|
||||||
|
const asset: Asset = {
|
||||||
|
platform,
|
||||||
|
type,
|
||||||
|
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
|
||||||
|
hash: sha1hash,
|
||||||
|
sha256hash,
|
||||||
|
size
|
||||||
|
};
|
||||||
|
|
||||||
|
// Remove this if we ever need to rollback fast updates for windows
|
||||||
|
if (/win32/.test(platform)) {
|
||||||
|
asset.supportsFastUpdate = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
||||||
|
|
||||||
|
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||||
|
const scripts = client.database('builds').container(quality).scripts;
|
||||||
|
await scripts.storedProcedure('createAsset').execute('', [commit, asset, true]);
|
||||||
|
}
|
||||||
|
|
||||||
|
main().then(() => {
|
||||||
|
console.log('Asset successfully created');
|
||||||
|
process.exit(0);
|
||||||
|
}, err => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
60
build/azure-pipelines/common/createBuild.ts
Normal file
60
build/azure-pipelines/common/createBuild.ts
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
import { CosmosClient } from '@azure/cosmos';
|
||||||
|
|
||||||
|
if (process.argv.length !== 3) {
|
||||||
|
console.error('Usage: node createBuild.js VERSION');
|
||||||
|
process.exit(-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getEnv(name: string): string {
|
||||||
|
const result = process.env[name];
|
||||||
|
|
||||||
|
if (typeof result === 'undefined') {
|
||||||
|
throw new Error('Missing env: ' + name);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main(): Promise<void> {
|
||||||
|
const [, , _version] = process.argv;
|
||||||
|
const quality = getEnv('VSCODE_QUALITY');
|
||||||
|
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||||
|
const queuedBy = getEnv('BUILD_QUEUEDBY');
|
||||||
|
const sourceBranch = getEnv('BUILD_SOURCEBRANCH');
|
||||||
|
const version = _version + (quality === 'stable' ? '' : `-${quality}`);
|
||||||
|
|
||||||
|
console.log('Creating build...');
|
||||||
|
console.log('Quality:', quality);
|
||||||
|
console.log('Version:', version);
|
||||||
|
console.log('Commit:', commit);
|
||||||
|
|
||||||
|
const build = {
|
||||||
|
id: commit,
|
||||||
|
timestamp: (new Date()).getTime(),
|
||||||
|
version,
|
||||||
|
isReleased: false,
|
||||||
|
sourceBranch,
|
||||||
|
queuedBy,
|
||||||
|
assets: [],
|
||||||
|
updates: {}
|
||||||
|
};
|
||||||
|
|
||||||
|
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||||
|
const scripts = client.database('builds').container(quality).scripts;
|
||||||
|
await scripts.storedProcedure('createBuild').execute('', [{ ...build, _partitionKey: '' }]);
|
||||||
|
}
|
||||||
|
|
||||||
|
main().then(() => {
|
||||||
|
console.log('Build successfully created');
|
||||||
|
process.exit(0);
|
||||||
|
}, err => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
70
build/azure-pipelines/common/releaseBuild.ts
Normal file
70
build/azure-pipelines/common/releaseBuild.ts
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
import { CosmosClient } from '@azure/cosmos';
|
||||||
|
|
||||||
|
function getEnv(name: string): string {
|
||||||
|
const result = process.env[name];
|
||||||
|
|
||||||
|
if (typeof result === 'undefined') {
|
||||||
|
throw new Error('Missing env: ' + name);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Config {
|
||||||
|
id: string;
|
||||||
|
frozen: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
function createDefaultConfig(quality: string): Config {
|
||||||
|
return {
|
||||||
|
id: quality,
|
||||||
|
frozen: false
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getConfig(client: CosmosClient, quality: string): Promise<Config> {
|
||||||
|
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${quality}"`;
|
||||||
|
|
||||||
|
const res = await client.database('builds').container('config').items.query(query).fetchAll();
|
||||||
|
|
||||||
|
if (res.resources.length === 0) {
|
||||||
|
return createDefaultConfig(quality);
|
||||||
|
}
|
||||||
|
|
||||||
|
return res.resources[0] as Config;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main(): Promise<void> {
|
||||||
|
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||||
|
const quality = getEnv('VSCODE_QUALITY');
|
||||||
|
|
||||||
|
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||||
|
const config = await getConfig(client, quality);
|
||||||
|
|
||||||
|
console.log('Quality config:', config);
|
||||||
|
|
||||||
|
if (config.frozen) {
|
||||||
|
console.log(`Skipping release because quality ${quality} is frozen.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Releasing build ${commit}...`);
|
||||||
|
|
||||||
|
const scripts = client.database('builds').container(quality).scripts;
|
||||||
|
await scripts.storedProcedure('releaseBuild').execute('', [commit]);
|
||||||
|
}
|
||||||
|
|
||||||
|
main().then(() => {
|
||||||
|
console.log('Build successfully released');
|
||||||
|
process.exit(0);
|
||||||
|
}, err => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
@@ -8,7 +8,7 @@
|
|||||||
import * as url from 'url';
|
import * as url from 'url';
|
||||||
import * as azure from 'azure-storage';
|
import * as azure from 'azure-storage';
|
||||||
import * as mime from 'mime';
|
import * as mime from 'mime';
|
||||||
import { DocumentClient, RetrievedDocument } from 'documentdb';
|
import { CosmosClient } from '@azure/cosmos';
|
||||||
|
|
||||||
function log(...args: any[]) {
|
function log(...args: any[]) {
|
||||||
console.log(...[`[${new Date().toISOString()}]`, ...args]);
|
console.log(...[`[${new Date().toISOString()}]`, ...args]);
|
||||||
@@ -23,7 +23,7 @@ if (process.argv.length < 3) {
|
|||||||
process.exit(-1);
|
process.exit(-1);
|
||||||
}
|
}
|
||||||
|
|
||||||
interface Build extends RetrievedDocument {
|
interface Build {
|
||||||
assets: Asset[];
|
assets: Asset[];
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -38,62 +38,20 @@ interface Asset {
|
|||||||
supportsFastUpdate?: boolean;
|
supportsFastUpdate?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
function updateBuild(commit: string, quality: string, platform: string, type: string, asset: Asset): Promise<void> {
|
|
||||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
|
||||||
const collection = 'dbs/builds/colls/' + quality;
|
|
||||||
const updateQuery = {
|
|
||||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
|
||||||
parameters: [{ name: '@id', value: commit }]
|
|
||||||
};
|
|
||||||
|
|
||||||
let updateTries = 0;
|
|
||||||
|
|
||||||
function _update(): Promise<void> {
|
|
||||||
updateTries++;
|
|
||||||
|
|
||||||
return new Promise<void>((c, e) => {
|
|
||||||
client.queryDocuments(collection, updateQuery).toArray((err, results) => {
|
|
||||||
if (err) { return e(err); }
|
|
||||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
|
||||||
|
|
||||||
const release = results[0];
|
|
||||||
|
|
||||||
release.assets = [
|
|
||||||
...release.assets.filter((a: any) => !(a.platform === platform && a.type === type)),
|
|
||||||
asset
|
|
||||||
];
|
|
||||||
|
|
||||||
client.replaceDocument(release._self, release, err => {
|
|
||||||
if (err && err.code === 409 && updateTries < 5) { return c(_update()); }
|
|
||||||
if (err) { return e(err); }
|
|
||||||
|
|
||||||
log('Build successfully updated.');
|
|
||||||
c();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return _update();
|
|
||||||
}
|
|
||||||
|
|
||||||
async function sync(commit: string, quality: string): Promise<void> {
|
async function sync(commit: string, quality: string): Promise<void> {
|
||||||
log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
|
log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
|
||||||
|
|
||||||
const cosmosdb = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||||
const collection = `dbs/builds/colls/${quality}`;
|
const container = client.database('builds').container(quality);
|
||||||
const query = {
|
|
||||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
|
||||||
parameters: [{ name: '@id', value: commit }]
|
|
||||||
};
|
|
||||||
|
|
||||||
const build = await new Promise<Build>((c, e) => {
|
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${commit}"`;
|
||||||
cosmosdb.queryDocuments(collection, query).toArray((err, results) => {
|
const res = await container.items.query<Build>(query, {}).fetchAll();
|
||||||
if (err) { return e(err); }
|
|
||||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
if (res.resources.length !== 1) {
|
||||||
c(results[0] as Build);
|
throw new Error(`No builds found for ${commit}`);
|
||||||
});
|
}
|
||||||
});
|
|
||||||
|
const build = res.resources[0];
|
||||||
|
|
||||||
log(`Found build for ${commit}, with ${build.assets.length} assets`);
|
log(`Found build for ${commit}, with ${build.assets.length} assets`);
|
||||||
|
|
||||||
@@ -140,8 +98,9 @@ async function sync(commit: string, quality: string): Promise<void> {
|
|||||||
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
|
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
|
||||||
|
|
||||||
log(` Updating build in DB...`);
|
log(` Updating build in DB...`);
|
||||||
asset.mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
||||||
await updateBuild(commit, quality, asset.platform, asset.type, asset);
|
await container.scripts.storedProcedure('setAssetMooncakeUrl')
|
||||||
|
.execute('', [commit, asset.platform, asset.type, mooncakeUrl]);
|
||||||
|
|
||||||
log(` Done ✔️`);
|
log(` Done ✔️`);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|||||||
@@ -1,32 +1,28 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
||||||
|
inputs:
|
||||||
|
versionSpec: "1.x"
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache
|
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
|
||||||
inputs:
|
|
||||||
versionSpec: "1.x"
|
|
||||||
- script: |
|
- script: |
|
||||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||||
displayName: Install Dependencies
|
displayName: Install Dependencies
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache
|
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
- script: |
|
- script: |
|
||||||
yarn electron x64
|
yarn electron x64
|
||||||
displayName: Download Electron
|
displayName: Download Electron
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
|
|
||||||
- script: |
|
- script: |
|
||||||
yarn gulp hygiene --skip-tslint
|
yarn gulp hygiene --skip-tslint
|
||||||
displayName: Run Hygiene Checks
|
displayName: Run Hygiene Checks
|
||||||
@@ -60,8 +56,3 @@ steps:
|
|||||||
testResultsFiles: '*-results.xml'
|
testResultsFiles: '*-results.xml'
|
||||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||||
condition: succeededOrFailed()
|
condition: succeededOrFailed()
|
||||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 # {{SQL CARBON EDIT}} add task
|
|
||||||
displayName: 'Component Detection'
|
|
||||||
inputs:
|
|
||||||
alertWarningLevel: High
|
|
||||||
failOnAlert: true
|
|
||||||
|
|||||||
14
build/azure-pipelines/darwin/createDrop.sh
Executable file
14
build/azure-pipelines/darwin/createDrop.sh
Executable file
@@ -0,0 +1,14 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -e
|
||||||
|
REPO="$(pwd)"
|
||||||
|
|
||||||
|
# ensure drop directories exist
|
||||||
|
mkdir -p $REPO/.build/darwin/{archive,server}
|
||||||
|
|
||||||
|
# remove pkg from archive
|
||||||
|
zip -d $REPO/.build/darwin/archive/azuredatastudio-darwin.zip "*.pkg"
|
||||||
|
|
||||||
|
# package Remote Extension Host
|
||||||
|
pushd .. && mv azuredatastudio-reh-darwin azuredatastudio-server-darwin && zip -Xry $REPO/.build/darwin/server/azuredatastudio-server-darwin.zip azuredatastudio-server-darwin && popd
|
||||||
|
|
||||||
|
node build/azure-pipelines/common/copyArtifacts.js
|
||||||
@@ -21,7 +21,7 @@ steps:
|
|||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
|
|||||||
@@ -5,28 +5,20 @@ set -e
|
|||||||
zip -d ../VSCode-darwin.zip "*.pkg"
|
zip -d ../VSCode-darwin.zip "*.pkg"
|
||||||
|
|
||||||
# publish the build
|
# publish the build
|
||||||
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
|
node build/azure-pipelines/common/createAsset.js \
|
||||||
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
|
|
||||||
node build/azure-pipelines/common/publish.js \
|
|
||||||
"$VSCODE_QUALITY" \
|
|
||||||
darwin \
|
darwin \
|
||||||
archive \
|
archive \
|
||||||
"VSCode-darwin-$VSCODE_QUALITY.zip" \
|
"VSCode-darwin-$VSCODE_QUALITY.zip" \
|
||||||
$VERSION \
|
|
||||||
true \
|
|
||||||
../VSCode-darwin.zip
|
../VSCode-darwin.zip
|
||||||
|
|
||||||
# package Remote Extension Host
|
# package Remote Extension Host
|
||||||
pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
|
pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
|
||||||
|
|
||||||
# publish Remote Extension Host
|
# publish Remote Extension Host
|
||||||
node build/azure-pipelines/common/publish.js \
|
node build/azure-pipelines/common/createAsset.js \
|
||||||
"$VSCODE_QUALITY" \
|
|
||||||
server-darwin \
|
server-darwin \
|
||||||
archive-unsigned \
|
archive-unsigned \
|
||||||
"vscode-server-darwin.zip" \
|
"vscode-server-darwin.zip" \
|
||||||
$VERSION \
|
|
||||||
true \
|
|
||||||
../vscode-server-darwin.zip
|
../vscode-server-darwin.zip
|
||||||
|
|
||||||
# publish hockeyapp symbols
|
# publish hockeyapp symbols
|
||||||
|
|||||||
169
build/azure-pipelines/darwin/sql-product-build-darwin.yml
Normal file
169
build/azure-pipelines/darwin/sql-product-build-darwin.yml
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
steps:
|
||||||
|
- script: |
|
||||||
|
mkdir -p .build
|
||||||
|
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||||
|
echo -n $VSCODE_QUALITY > .build/quality
|
||||||
|
displayName: Prepare cache flag
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||||
|
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
platformIndependent: true
|
||||||
|
alias: 'Compilation'
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
exit 1
|
||||||
|
displayName: Check RestoreCache
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- task: NodeTool@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: '10.15.3'
|
||||||
|
|
||||||
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||||
|
inputs:
|
||||||
|
versionSpec: '1.x'
|
||||||
|
|
||||||
|
- task: AzureKeyVault@1
|
||||||
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
|
inputs:
|
||||||
|
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||||
|
KeyVaultName: ado-secrets
|
||||||
|
SecretsFilter: 'github-distro-mixin-password'
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
|
||||||
|
cat << EOF > ~/.netrc
|
||||||
|
machine github.com
|
||||||
|
login azuredatastudio
|
||||||
|
password $(github-distro-mixin-password)
|
||||||
|
EOF
|
||||||
|
|
||||||
|
git config user.email "andresse@microsoft.com"
|
||||||
|
git config user.name "AzureDataStudio"
|
||||||
|
displayName: Prepare tooling
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||||
|
git fetch distro
|
||||||
|
git merge $(node -p "require('./package.json').distro")
|
||||||
|
displayName: Merge distro
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||||
|
displayName: Install dependencies
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn postinstall
|
||||||
|
displayName: Run postinstall scripts
|
||||||
|
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
node build/azure-pipelines/mixin
|
||||||
|
displayName: Mix in quality
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp install-sqltoolsservice
|
||||||
|
displayName: Install sqltoolsservice
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp package-rebuild-extensions
|
||||||
|
yarn gulp vscode-darwin-min-ci
|
||||||
|
yarn gulp vscode-reh-darwin-min-ci
|
||||||
|
yarn gulp vscode-reh-web-darwin-min-ci
|
||||||
|
displayName: Build
|
||||||
|
env:
|
||||||
|
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
./scripts/test.sh --build --coverage --reporter mocha-junit-reporter
|
||||||
|
displayName: Run unit tests
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
mkdir -p .build/darwin/archive
|
||||||
|
pushd ../azuredatastudio-darwin && zip -r -X -y $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip * && popd
|
||||||
|
displayName: 'Archive'
|
||||||
|
|
||||||
|
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||||
|
displayName: 'ESRP CodeSigning'
|
||||||
|
inputs:
|
||||||
|
ConnectedServiceName: 'Code Signing'
|
||||||
|
FolderPath: '$(Build.SourcesDirectory)/.build/darwin/archive'
|
||||||
|
Pattern: 'azuredatastudio-darwin.zip'
|
||||||
|
signConfigType: inlineSignParams
|
||||||
|
inlineOperation: |
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"keyCode": "CP-401337-Apple",
|
||||||
|
"operationSetCode": "MacAppDeveloperSign",
|
||||||
|
"parameters": [],
|
||||||
|
"toolName": "sign",
|
||||||
|
"toolVersion": "1.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
SessionTimeout: 20
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
./build/azure-pipelines/darwin/createDrop.sh
|
||||||
|
displayName: Create Drop
|
||||||
|
|
||||||
|
- task: PublishBuildArtifacts@1
|
||||||
|
displayName: 'Publish Artifact: drop'
|
||||||
|
|
||||||
|
- task: PublishTestResults@2
|
||||||
|
displayName: 'Publish Test Results test-results.xml'
|
||||||
|
inputs:
|
||||||
|
testResultsFiles: 'test-results.xml'
|
||||||
|
searchFolder: '$(Build.SourcesDirectory)'
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- task: PublishTestResults@2
|
||||||
|
displayName: 'Publish Integration and Smoke Test Results'
|
||||||
|
inputs:
|
||||||
|
testResultsFiles: 'dawin-integration-tests-results.xml'
|
||||||
|
searchFolder: '$(Build.ArtifactStagingDirectory)\test-results'
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- task: PublishCodeCoverageResults@1
|
||||||
|
displayName: 'Publish code coverage from $(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||||
|
inputs:
|
||||||
|
codeCoverageTool: Cobertura
|
||||||
|
summaryFileLocation: '$(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||||
|
reportDirectory: '$(Build.SourcesDirectory)/.build/coverage'
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||||
|
displayName: 'Component Detection'
|
||||||
|
inputs:
|
||||||
|
failOnAlert: true
|
||||||
19
build/azure-pipelines/darwin/sql-publish.ps1
Normal file
19
build/azure-pipelines/darwin/sql-publish.ps1
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
Param(
|
||||||
|
[string]$sourcesDir,
|
||||||
|
[string]$artifactsDir,
|
||||||
|
[string]$storageKey,
|
||||||
|
[string]$documentDbKey
|
||||||
|
)
|
||||||
|
|
||||||
|
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
|
||||||
|
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
||||||
|
|
||||||
|
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
|
||||||
|
$Version = $VersionJson.version
|
||||||
|
$Quality = $VersionJson.quality
|
||||||
|
$CommitId = $VersionJson.commit
|
||||||
|
|
||||||
|
$ZipName = "azuredatastudio-darwin.zip"
|
||||||
|
$Zip = "$artifactsDir\darwin\archive\$ZipName"
|
||||||
|
|
||||||
|
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality darwin archive $ZipName $Version true $Zip $CommitId
|
||||||
@@ -1,3 +1,6 @@
|
|||||||
|
pool:
|
||||||
|
vmImage: 'Ubuntu-16.04'
|
||||||
|
|
||||||
trigger:
|
trigger:
|
||||||
branches:
|
branches:
|
||||||
include: ['master', 'release/*']
|
include: ['master', 'release/*']
|
||||||
@@ -8,27 +11,27 @@ pr:
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
inputs:
|
inputs:
|
||||||
azureSubscription: 'vscode-builds-subscription'
|
azureSubscription: 'azuredatastudio-adointegration'
|
||||||
KeyVaultName: vscode
|
KeyVaultName: ado-secrets
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
cat << EOF > ~/.netrc
|
cat << EOF > ~/.netrc
|
||||||
machine github.com
|
machine github.com
|
||||||
login vscode
|
login azuredatastudio
|
||||||
password $(github-distro-mixin-password)
|
password $(github-distro-mixin-password)
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
git config user.email "vscode@microsoft.com"
|
git config user.email "andresse@microsoft.com"
|
||||||
git config user.name "VSCode"
|
git config user.name "AzureDataStudio"
|
||||||
|
|
||||||
git remote add distro "https://github.com/$VSCODE_MIXIN_REPO.git"
|
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||||
git fetch distro
|
git fetch distro
|
||||||
|
|
||||||
# Push master branch into oss/master
|
# Push master branch into oss/master
|
||||||
|
|||||||
16
build/azure-pipelines/docker/Dockerfile
Normal file
16
build/azure-pipelines/docker/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
#Download base image ubuntu 16.04
|
||||||
|
FROM ubuntu:16.04
|
||||||
|
|
||||||
|
# Update Software repository
|
||||||
|
RUN apt-get update
|
||||||
|
|
||||||
|
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus libgtk-3-0
|
||||||
|
|
||||||
|
ADD ./ /opt/ads-server
|
||||||
|
|
||||||
|
RUN chmod +x /opt/ads-server/server.sh && chmod +x /opt/ads-server/node
|
||||||
|
|
||||||
|
CMD ["/opt/ads-server/server.sh"]
|
||||||
|
|
||||||
|
EXPOSE 8000:8000
|
||||||
|
EXPOSE 8001:8001
|
||||||
@@ -7,7 +7,7 @@ pr: none
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
|
|||||||
20
build/azure-pipelines/linux/Dockerfile
Normal file
20
build/azure-pipelines/linux/Dockerfile
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
#Download base image ubuntu 16.04
|
||||||
|
FROM ubuntu:16.04
|
||||||
|
|
||||||
|
# Update Software repository
|
||||||
|
RUN apt-get update --fix-missing
|
||||||
|
|
||||||
|
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 \
|
||||||
|
libkrb5-dev git apt-transport-https ca-certificates curl gnupg-agent software-properties-common \
|
||||||
|
libnss3 libasound2 make gcc libx11-dev fakeroot rpm
|
||||||
|
|
||||||
|
#docker
|
||||||
|
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||||
|
RUN apt-key fingerprint 0EBFCD88
|
||||||
|
RUN add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||||
|
RUN apt-get update
|
||||||
|
RUN apt-get -y install docker-ce docker-ce-cli containerd.io
|
||||||
|
|
||||||
|
ADD ./xvfb.init /etc/init.d/xvfb
|
||||||
|
RUN chmod +x /etc/init.d/xvfb
|
||||||
|
RUN update-rc.d xvfb defaults
|
||||||
@@ -9,32 +9,28 @@ steps:
|
|||||||
sudo service xvfb start
|
sudo service xvfb start
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||||
|
inputs:
|
||||||
|
versionSpec: "1.x"
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache
|
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
|
||||||
inputs:
|
|
||||||
versionSpec: "1.x"
|
|
||||||
- script: |
|
- script: |
|
||||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||||
displayName: Install Dependencies
|
displayName: Install Dependencies
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache
|
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
- script: |
|
- script: |
|
||||||
yarn electron x64
|
yarn electron x64
|
||||||
displayName: Download Electron
|
displayName: Download Electron
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
|
|
||||||
- script: |
|
- script: |
|
||||||
yarn gulp hygiene --skip-tslint
|
yarn gulp hygiene --skip-tslint
|
||||||
displayName: Run Hygiene Checks
|
displayName: Run Hygiene Checks
|
||||||
@@ -68,8 +64,3 @@ steps:
|
|||||||
testResultsFiles: '*-results.xml'
|
testResultsFiles: '*-results.xml'
|
||||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||||
condition: succeededOrFailed()
|
condition: succeededOrFailed()
|
||||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 # {{SQL CARBON EDIT}} add task
|
|
||||||
displayName: 'Component Detection'
|
|
||||||
inputs:
|
|
||||||
alertWarningLevel: High
|
|
||||||
failOnAlert: true
|
|
||||||
|
|||||||
37
build/azure-pipelines/linux/createDrop.sh
Executable file
37
build/azure-pipelines/linux/createDrop.sh
Executable file
@@ -0,0 +1,37 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -e
|
||||||
|
REPO="$(pwd)"
|
||||||
|
ROOT="$REPO/.."
|
||||||
|
|
||||||
|
# Publish tarball
|
||||||
|
mkdir -p $REPO/.build/linux/{archive,server}
|
||||||
|
PLATFORM_LINUX="linux-x64"
|
||||||
|
BUILDNAME="azuredatastudio-$PLATFORM_LINUX"
|
||||||
|
BUILD="$ROOT/$BUILDNAME"
|
||||||
|
TARBALL_FILENAME="azuredatastudio-$PLATFORM_LINUX.tar.gz"
|
||||||
|
TARBALL_PATH="$REPO/.build/linux/archive/$TARBALL_FILENAME"
|
||||||
|
|
||||||
|
# create version
|
||||||
|
PACKAGEJSON="$BUILD/resources/app/package.json"
|
||||||
|
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||||
|
COMMIT_ID=$(git rev-parse HEAD)
|
||||||
|
echo -e "{ \"version\": \"$VERSION\", \"quality\": \"$VSCODE_QUALITY\", \"commit\": \"$COMMIT_ID\" }" > "$REPO/.build/version.json"
|
||||||
|
|
||||||
|
rm -rf $ROOT/code-*.tar.*
|
||||||
|
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||||
|
|
||||||
|
# Publish Remote Extension Host
|
||||||
|
LEGACY_SERVER_BUILD_NAME="azuredatastudio-reh-$PLATFORM_LINUX"
|
||||||
|
SERVER_BUILD_NAME="azuredatastudio-server-$PLATFORM_LINUX"
|
||||||
|
SERVER_TARBALL_FILENAME="azuredatastudio-server-$PLATFORM_LINUX.tar.gz"
|
||||||
|
SERVER_TARBALL_PATH="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME"
|
||||||
|
|
||||||
|
rm -rf $ROOT/azuredatastudio-server-*.tar.*
|
||||||
|
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||||
|
|
||||||
|
# create docker
|
||||||
|
mkdir -p $REPO/.build/docker
|
||||||
|
docker build -t azuredatastudio-server -f $REPO/build/azure-pipelines/docker/Dockerfile $ROOT/$SERVER_BUILD_NAME
|
||||||
|
docker save azuredatastudio-server | gzip > $REPO/.build/docker/azuredatastudio-server-docker.tar.gz
|
||||||
|
|
||||||
|
node build/azure-pipelines/common/copyArtifacts.js
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
/*---------------------------------------------------------------------------------------------
|
|
||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
|
||||||
*--------------------------------------------------------------------------------------------*/
|
|
||||||
'use strict';
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const documentdb_1 = require("documentdb");
|
|
||||||
function createDefaultConfig(quality) {
|
|
||||||
return {
|
|
||||||
id: quality,
|
|
||||||
frozen: false
|
|
||||||
};
|
|
||||||
}
|
|
||||||
function getConfig(quality) {
|
|
||||||
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
|
||||||
const collection = 'dbs/builds/colls/config';
|
|
||||||
const query = {
|
|
||||||
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
|
|
||||||
parameters: [
|
|
||||||
{ name: '@quality', value: quality }
|
|
||||||
]
|
|
||||||
};
|
|
||||||
return new Promise((c, e) => {
|
|
||||||
client.queryDocuments(collection, query).toArray((err, results) => {
|
|
||||||
if (err && err.code !== 409) {
|
|
||||||
return e(err);
|
|
||||||
}
|
|
||||||
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
getConfig(process.argv[2])
|
|
||||||
.then(config => {
|
|
||||||
console.log(config.frozen);
|
|
||||||
process.exit(0);
|
|
||||||
})
|
|
||||||
.catch(err => {
|
|
||||||
console.error(err);
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
@@ -21,7 +21,7 @@ steps:
|
|||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ steps:
|
|||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
@@ -118,6 +118,32 @@ steps:
|
|||||||
displayName: Run integration tests
|
displayName: Run integration tests
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp "vscode-linux-x64-build-deb"
|
||||||
|
yarn gulp "vscode-linux-x64-build-rpm"
|
||||||
|
yarn gulp "vscode-linux-x64-prepare-snap"
|
||||||
|
displayName: Build packages
|
||||||
|
|
||||||
|
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||||
|
inputs:
|
||||||
|
ConnectedServiceName: 'ESRP CodeSign'
|
||||||
|
FolderPath: '.build/linux/rpm/x86_64'
|
||||||
|
Pattern: '*.rpm'
|
||||||
|
signConfigType: inlineSignParams
|
||||||
|
inlineOperation: |
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"keyCode": "CP-450779-Pgp",
|
||||||
|
"operationSetCode": "LinuxSign",
|
||||||
|
"parameters": [ ],
|
||||||
|
"toolName": "sign",
|
||||||
|
"toolVersion": "1.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
SessionTimeout: 120
|
||||||
|
displayName: Codesign rpm
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||||
|
|||||||
@@ -10,13 +10,11 @@ BUILD="$ROOT/$BUILDNAME"
|
|||||||
BUILD_VERSION="$(date +%s)"
|
BUILD_VERSION="$(date +%s)"
|
||||||
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
|
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
|
||||||
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
|
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
|
||||||
PACKAGEJSON="$BUILD/resources/app/package.json"
|
|
||||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
|
||||||
|
|
||||||
rm -rf $ROOT/code-*.tar.*
|
rm -rf $ROOT/code-*.tar.*
|
||||||
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||||
|
|
||||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$VERSION" true "$TARBALL_PATH"
|
node build/azure-pipelines/common/createAsset.js "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$TARBALL_PATH"
|
||||||
|
|
||||||
# Publish Remote Extension Host
|
# Publish Remote Extension Host
|
||||||
LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
|
LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
|
||||||
@@ -27,32 +25,28 @@ SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
|
|||||||
rm -rf $ROOT/vscode-server-*.tar.*
|
rm -rf $ROOT/vscode-server-*.tar.*
|
||||||
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||||
|
|
||||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$VERSION" true "$SERVER_TARBALL_PATH"
|
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
|
||||||
|
|
||||||
# Publish hockeyapp symbols
|
# Publish hockeyapp symbols
|
||||||
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "x64" "$VSCODE_HOCKEYAPP_ID_LINUX64"
|
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "x64" "$VSCODE_HOCKEYAPP_ID_LINUX64"
|
||||||
|
|
||||||
# Publish DEB
|
# Publish DEB
|
||||||
yarn gulp "vscode-linux-x64-build-deb"
|
|
||||||
PLATFORM_DEB="linux-deb-x64"
|
PLATFORM_DEB="linux-deb-x64"
|
||||||
DEB_ARCH="amd64"
|
DEB_ARCH="amd64"
|
||||||
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
|
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
|
||||||
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
|
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
|
||||||
|
|
||||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_DEB" package "$DEB_FILENAME" "$VERSION" true "$DEB_PATH"
|
node build/azure-pipelines/common/createAsset.js "$PLATFORM_DEB" package "$DEB_FILENAME" "$DEB_PATH"
|
||||||
|
|
||||||
# Publish RPM
|
# Publish RPM
|
||||||
yarn gulp "vscode-linux-x64-build-rpm"
|
|
||||||
PLATFORM_RPM="linux-rpm-x64"
|
PLATFORM_RPM="linux-rpm-x64"
|
||||||
RPM_ARCH="x86_64"
|
RPM_ARCH="x86_64"
|
||||||
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
|
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
|
||||||
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
|
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
|
||||||
|
|
||||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_RPM" package "$RPM_FILENAME" "$VERSION" true "$RPM_PATH"
|
node build/azure-pipelines/common/createAsset.js "$PLATFORM_RPM" package "$RPM_FILENAME" "$RPM_PATH"
|
||||||
|
|
||||||
# Publish Snap
|
# Publish Snap
|
||||||
yarn gulp "vscode-linux-x64-prepare-snap"
|
|
||||||
|
|
||||||
# Pack snap tarball artifact, in order to preserve file perms
|
# Pack snap tarball artifact, in order to preserve file perms
|
||||||
mkdir -p $REPO/.build/linux/snap-tarball
|
mkdir -p $REPO/.build/linux/snap-tarball
|
||||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-x64.tar.gz"
|
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-x64.tar.gz"
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
@@ -43,12 +43,10 @@ steps:
|
|||||||
# Create snap package
|
# Create snap package
|
||||||
BUILD_VERSION="$(date +%s)"
|
BUILD_VERSION="$(date +%s)"
|
||||||
SNAP_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.snap"
|
SNAP_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.snap"
|
||||||
PACKAGEJSON="$(ls $SNAP_ROOT/code*/usr/share/code*/resources/app/package.json)"
|
|
||||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
|
||||||
SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME"
|
SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME"
|
||||||
(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft snap --output "$SNAP_PATH")
|
(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft snap --output "$SNAP_PATH")
|
||||||
|
|
||||||
# Publish snap package
|
# Publish snap package
|
||||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-x64" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"
|
node build/azure-pipelines/common/createAsset.js "linux-snap-x64" package "$SNAP_FILENAME" "$SNAP_PATH"
|
||||||
|
|||||||
170
build/azure-pipelines/linux/sql-product-build-linux.yml
Normal file
170
build/azure-pipelines/linux/sql-product-build-linux.yml
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
steps:
|
||||||
|
- script: |
|
||||||
|
mkdir -p .build
|
||||||
|
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||||
|
echo -n $VSCODE_QUALITY > .build/quality
|
||||||
|
displayName: Prepare cache flag
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||||
|
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
platformIndependent: true
|
||||||
|
alias: 'Compilation'
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
exit 1
|
||||||
|
displayName: Check RestoreCache
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- task: NodeTool@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: '10.15.1'
|
||||||
|
|
||||||
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||||
|
inputs:
|
||||||
|
versionSpec: "1.x"
|
||||||
|
|
||||||
|
- task: AzureKeyVault@1
|
||||||
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
|
inputs:
|
||||||
|
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||||
|
KeyVaultName: ado-secrets
|
||||||
|
SecretsFilter: 'github-distro-mixin-password'
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
cat << EOF > ~/.netrc
|
||||||
|
machine github.com
|
||||||
|
login azuredatastudio
|
||||||
|
password $(github-distro-mixin-password)
|
||||||
|
EOF
|
||||||
|
|
||||||
|
git config user.email "andresse@microsoft.com"
|
||||||
|
git config user.name "AzureDataStudio"
|
||||||
|
displayName: Prepare tooling
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||||
|
git fetch distro
|
||||||
|
git merge $(node -p "require('./package.json').distro")
|
||||||
|
displayName: Merge distro
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||||
|
displayName: Install dependencies
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn postinstall
|
||||||
|
displayName: Run postinstall scripts
|
||||||
|
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
node build/azure-pipelines/mixin
|
||||||
|
displayName: Mix in quality
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp install-sqltoolsservice
|
||||||
|
yarn gulp install-ssmsmin
|
||||||
|
displayName: Install extension binaries
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp vscode-linux-x64-min-ci
|
||||||
|
yarn gulp vscode-reh-linux-x64-min-ci
|
||||||
|
yarn gulp vscode-reh-web-linux-x64-min-ci
|
||||||
|
displayName: Build
|
||||||
|
env:
|
||||||
|
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
service xvfb start
|
||||||
|
displayName: Start xvfb
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp package-rebuild-extensions
|
||||||
|
yarn gulp compile-extensions
|
||||||
|
yarn gulp package-external-extensions
|
||||||
|
displayName: Package External extensions
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||||
|
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||||
|
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||||
|
DISPLAY=:10 ./scripts/test-extensions-unit.sh
|
||||||
|
displayName: 'Run Stable Extension Unit Tests'
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||||
|
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||||
|
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||||
|
DISPLAY=:10 ./scripts/test-extensions-unit-unstable.sh
|
||||||
|
displayName: 'Run Unstable Extension Unit Tests'
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp vscode-linux-x64-build-deb
|
||||||
|
displayName: Build Deb
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp vscode-linux-x64-build-rpm
|
||||||
|
displayName: Build Rpm
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
./build/azure-pipelines/linux/createDrop.sh
|
||||||
|
displayName: Create Drop
|
||||||
|
|
||||||
|
- task: PublishBuildArtifacts@1
|
||||||
|
displayName: 'Publish Artifact: drop'
|
||||||
|
|
||||||
|
- task: PublishTestResults@2
|
||||||
|
displayName: 'Publish Test Results test-results.xml'
|
||||||
|
inputs:
|
||||||
|
testResultsFiles: 'test-results.xml'
|
||||||
|
searchFolder: '$(Build.SourcesDirectory)'
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- task: PublishCodeCoverageResults@1
|
||||||
|
displayName: 'Publish code coverage from $(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||||
|
inputs:
|
||||||
|
codeCoverageTool: Cobertura
|
||||||
|
summaryFileLocation: '$(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||||
|
reportDirectory: '$(Build.SourcesDirectory)/.build/coverage'
|
||||||
|
continueOnError: true
|
||||||
|
|
||||||
|
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||||
|
displayName: 'Component Detection'
|
||||||
|
inputs:
|
||||||
|
failOnAlert: true
|
||||||
36
build/azure-pipelines/linux/sql-publish.ps1
Normal file
36
build/azure-pipelines/linux/sql-publish.ps1
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
Param(
|
||||||
|
[string]$sourcesDir,
|
||||||
|
[string]$artifactsDir,
|
||||||
|
[string]$storageKey,
|
||||||
|
[string]$documentDbKey
|
||||||
|
)
|
||||||
|
|
||||||
|
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
|
||||||
|
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
||||||
|
|
||||||
|
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
|
||||||
|
$Version = $VersionJson.version
|
||||||
|
$Quality = $VersionJson.quality
|
||||||
|
$CommitId = $VersionJson.commit
|
||||||
|
$Arch = "x64"
|
||||||
|
|
||||||
|
# Publish tarball
|
||||||
|
$PlatformLinux = "linux-$Arch"
|
||||||
|
$TarballFilename = "azuredatastudio-linux-$Arch.tar.gz"
|
||||||
|
$TarballPath = "$artifactsDir\linux\archive\$TarballFilename"
|
||||||
|
|
||||||
|
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformLinux archive-unsigned $TarballFilename $Version true $TarballPath $CommitId
|
||||||
|
|
||||||
|
# Publish DEB
|
||||||
|
$PlatformDeb = "linux-deb-$Arch"
|
||||||
|
$DebFilename = "$(Get-ChildItem -File -Name $artifactsDir\linux\deb\amd64\deb\*.deb)"
|
||||||
|
$DebPath = "$artifactsDir\linux\deb\amd64\deb\$DebFilename"
|
||||||
|
|
||||||
|
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformDeb package $DebFilename $Version true $DebPath $CommitId
|
||||||
|
|
||||||
|
# Publish RPM
|
||||||
|
$PlatformRpm = "linux-rpm-$Arch"
|
||||||
|
$RpmFilename = "$(Get-ChildItem -File -Name $artifactsDir\linux\rpm\x86_64\*.rpm)"
|
||||||
|
$RpmPath = "$artifactsDir\linux\rpm\x86_64\$RpmFilename"
|
||||||
|
|
||||||
|
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformRpm package $RpmFilename $Version true $RpmPath $CommitId
|
||||||
@@ -21,7 +21,7 @@ function main() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const productJsonFilter = filter('product.json', { restore: true });
|
const productJsonFilter = filter('**/product.json', { restore: true });
|
||||||
|
|
||||||
fancyLog(ansiColors.blue('[mixin]'), `Mixing in sources:`);
|
fancyLog(ansiColors.blue('[mixin]'), `Mixing in sources:`);
|
||||||
return vfs
|
return vfs
|
||||||
@@ -29,7 +29,7 @@ function main() {
|
|||||||
.pipe(filter(f => !f.isDirectory()))
|
.pipe(filter(f => !f.isDirectory()))
|
||||||
.pipe(productJsonFilter)
|
.pipe(productJsonFilter)
|
||||||
.pipe(buffer())
|
.pipe(buffer())
|
||||||
.pipe(json(o => Object.assign({}, require('../product.json'), o)))
|
.pipe(json(o => Object.assign({}, require('../../product.json'), o)))
|
||||||
.pipe(productJsonFilter.restore)
|
.pipe(productJsonFilter.restore)
|
||||||
.pipe(es.mapSync(function (f) {
|
.pipe(es.mapSync(function (f) {
|
||||||
fancyLog(ansiColors.blue('[mixin]'), f.relative, ansiColors.green('✔︎'));
|
fancyLog(ansiColors.blue('[mixin]'), f.relative, ansiColors.green('✔︎'));
|
||||||
@@ -38,4 +38,4 @@ function main() {
|
|||||||
.pipe(vfs.dest('.'));
|
.pipe(vfs.dest('.'));
|
||||||
}
|
}
|
||||||
|
|
||||||
main();
|
main();
|
||||||
|
|||||||
@@ -67,7 +67,7 @@ jobs:
|
|||||||
- template: linux/product-build-linux-multiarch.yml
|
- template: linux/product-build-linux-multiarch.yml
|
||||||
|
|
||||||
- job: LinuxArm64
|
- job: LinuxArm64
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'), ne(variables['VSCODE_QUALITY'], 'stable'))
|
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
|
||||||
pool:
|
pool:
|
||||||
vmImage: 'Ubuntu-16.04'
|
vmImage: 'Ubuntu-16.04'
|
||||||
variables:
|
variables:
|
||||||
@@ -118,6 +118,7 @@ jobs:
|
|||||||
- Linux
|
- Linux
|
||||||
- LinuxSnap
|
- LinuxSnap
|
||||||
- LinuxArmhf
|
- LinuxArmhf
|
||||||
|
- LinuxArm64
|
||||||
- LinuxAlpine
|
- LinuxAlpine
|
||||||
- macOS
|
- macOS
|
||||||
steps:
|
steps:
|
||||||
@@ -133,6 +134,7 @@ jobs:
|
|||||||
- Linux
|
- Linux
|
||||||
- LinuxSnap
|
- LinuxSnap
|
||||||
- LinuxArmhf
|
- LinuxArmhf
|
||||||
|
- LinuxArm64
|
||||||
- LinuxAlpine
|
- LinuxAlpine
|
||||||
- LinuxWeb
|
- LinuxWeb
|
||||||
- macOS
|
- macOS
|
||||||
|
|||||||
@@ -12,23 +12,24 @@ steps:
|
|||||||
vstsFeed: 'npm-vscode'
|
vstsFeed: 'npm-vscode'
|
||||||
platformIndependent: true
|
platformIndependent: true
|
||||||
alias: 'Compilation'
|
alias: 'Compilation'
|
||||||
|
dryRun: true
|
||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "1.x"
|
versionSpec: "1.x"
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
inputs:
|
inputs:
|
||||||
azureSubscription: 'vscode-builds-subscription'
|
azureSubscription: 'vscode-builds-subscription'
|
||||||
KeyVaultName: vscode
|
KeyVaultName: vscode
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
@@ -41,7 +42,7 @@ steps:
|
|||||||
git config user.email "vscode@microsoft.com"
|
git config user.email "vscode@microsoft.com"
|
||||||
git config user.name "VSCode"
|
git config user.name "VSCode"
|
||||||
displayName: Prepare tooling
|
displayName: Prepare tooling
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
@@ -49,33 +50,33 @@ steps:
|
|||||||
git fetch distro
|
git fetch distro
|
||||||
git merge $(node -p "require('./package.json').distro")
|
git merge $(node -p "require('./package.json').distro")
|
||||||
displayName: Merge distro
|
displayName: Merge distro
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||||
vstsFeed: 'npm-vscode'
|
vstsFeed: 'npm-vscode'
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||||
displayName: Install dependencies
|
displayName: Install dependencies
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||||
vstsFeed: 'npm-vscode'
|
vstsFeed: 'npm-vscode'
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
yarn postinstall
|
yarn postinstall
|
||||||
displayName: Run postinstall scripts
|
displayName: Run postinstall scripts
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
# Mixin must run before optimize, because the CSS loader will
|
# Mixin must run before optimize, because the CSS loader will
|
||||||
# inline small SVGs
|
# inline small SVGs
|
||||||
@@ -83,7 +84,7 @@ steps:
|
|||||||
set -e
|
set -e
|
||||||
node build/azure-pipelines/mixin
|
node build/azure-pipelines/mixin
|
||||||
displayName: Mix in quality
|
displayName: Mix in quality
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
@@ -91,20 +92,20 @@ steps:
|
|||||||
yarn gulp tslint
|
yarn gulp tslint
|
||||||
yarn monaco-compile-check
|
yarn monaco-compile-check
|
||||||
displayName: Run hygiene, tslint and monaco compile checks
|
displayName: Run hygiene, tslint and monaco compile checks
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -
|
set -
|
||||||
./build/azure-pipelines/common/extract-telemetry.sh
|
./build/azure-pipelines/common/extract-telemetry.sh
|
||||||
displayName: Extract Telemetry
|
displayName: Extract Telemetry
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
|
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
|
||||||
./build/azure-pipelines/common/publish-webview.sh
|
./build/azure-pipelines/common/publish-webview.sh
|
||||||
displayName: Publish Webview
|
displayName: Publish Webview
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
@@ -114,14 +115,22 @@ steps:
|
|||||||
yarn gulp minify-vscode-reh
|
yarn gulp minify-vscode-reh
|
||||||
yarn gulp minify-vscode-reh-web
|
yarn gulp minify-vscode-reh-web
|
||||||
displayName: Compile
|
displayName: Compile
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
||||||
node build/azure-pipelines/upload-sourcemaps
|
node build/azure-pipelines/upload-sourcemaps
|
||||||
displayName: Upload sourcemaps
|
displayName: Upload sourcemaps
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
VERSION=`node -p "require(\"./package.json\").version"`
|
||||||
|
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||||
|
node build/azure-pipelines/common/createBuild.js $VERSION
|
||||||
|
displayName: Create build
|
||||||
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
inputs:
|
inputs:
|
||||||
@@ -130,4 +139,4 @@ steps:
|
|||||||
vstsFeed: 'npm-vscode'
|
vstsFeed: 'npm-vscode'
|
||||||
platformIndependent: true
|
platformIndependent: true
|
||||||
alias: 'Compilation'
|
alias: 'Compilation'
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|||||||
@@ -35,9 +35,9 @@ function isValidTag(t: string) {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
|
if (isNaN(parseInt(major, 10)) || isNaN(parseInt(minor, 10))) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ pr: none
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
|
|||||||
@@ -19,4 +19,4 @@ steps:
|
|||||||
(cd build ; yarn)
|
(cd build ; yarn)
|
||||||
|
|
||||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||||
node build/azure-pipelines/common/release.js
|
node build/azure-pipelines/common/releaseBuild.js
|
||||||
|
|||||||
73
build/azure-pipelines/sql-product-build.yml
Normal file
73
build/azure-pipelines/sql-product-build.yml
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
resources:
|
||||||
|
containers:
|
||||||
|
- container: linux-x64
|
||||||
|
image: sqltoolscontainers.azurecr.io/linux-build-agent:x64
|
||||||
|
endpoint: ContainerRegistry
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
- job: Compile
|
||||||
|
pool:
|
||||||
|
vmImage: 'Ubuntu-16.04'
|
||||||
|
container: linux-x64
|
||||||
|
steps:
|
||||||
|
- template: sql-product-compile.yml
|
||||||
|
|
||||||
|
- job: macOS
|
||||||
|
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
|
||||||
|
pool:
|
||||||
|
vmImage: macOS 10.13
|
||||||
|
dependsOn:
|
||||||
|
- Compile
|
||||||
|
steps:
|
||||||
|
- template: darwin/sql-product-build-darwin.yml
|
||||||
|
|
||||||
|
- job: Linux
|
||||||
|
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
|
||||||
|
pool:
|
||||||
|
vmImage: 'Ubuntu-16.04'
|
||||||
|
container: linux-x64
|
||||||
|
dependsOn:
|
||||||
|
- Compile
|
||||||
|
steps:
|
||||||
|
- template: linux/sql-product-build-linux.yml
|
||||||
|
|
||||||
|
- job: Windows
|
||||||
|
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
|
||||||
|
pool:
|
||||||
|
vmImage: VS2017-Win2016
|
||||||
|
dependsOn:
|
||||||
|
- Compile
|
||||||
|
steps:
|
||||||
|
- template: win32/sql-product-build-win32.yml
|
||||||
|
|
||||||
|
- job: Windows_Test
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
||||||
|
pool:
|
||||||
|
name: mssqltools
|
||||||
|
dependsOn:
|
||||||
|
- Linux
|
||||||
|
- Windows
|
||||||
|
steps:
|
||||||
|
- template: win32/sql-product-test-win32.yml
|
||||||
|
|
||||||
|
- job: Release
|
||||||
|
condition: and(succeeded(), or(eq(variables['VSCODE_RELEASE'], 'true'), and(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['Build.Reason'], 'Schedule'))))
|
||||||
|
pool:
|
||||||
|
vmImage: 'Ubuntu-16.04'
|
||||||
|
dependsOn:
|
||||||
|
- macOS
|
||||||
|
- Linux
|
||||||
|
- Windows
|
||||||
|
- Windows_Test
|
||||||
|
steps:
|
||||||
|
- template: sql-release.yml
|
||||||
|
|
||||||
|
trigger: none
|
||||||
|
pr: none
|
||||||
|
|
||||||
|
schedules:
|
||||||
|
- cron: "0 5 * * Mon-Fri"
|
||||||
|
displayName: Mon-Fri at 7:00
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- master
|
||||||
112
build/azure-pipelines/sql-product-compile.yml
Normal file
112
build/azure-pipelines/sql-product-compile.yml
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
steps:
|
||||||
|
- script: |
|
||||||
|
mkdir -p .build
|
||||||
|
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||||
|
echo -n $VSCODE_QUALITY > .build/quality
|
||||||
|
displayName: Prepare cache flag
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||||
|
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
platformIndependent: true
|
||||||
|
alias: 'Compilation'
|
||||||
|
|
||||||
|
- task: NodeTool@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: "10.15.1"
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||||
|
inputs:
|
||||||
|
versionSpec: "1.x"
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- task: AzureKeyVault@1
|
||||||
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
|
inputs:
|
||||||
|
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||||
|
KeyVaultName: ado-secrets
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
cat << EOF > ~/.netrc
|
||||||
|
machine github.com
|
||||||
|
login azuredatastudio
|
||||||
|
password $(github-distro-mixin-password)
|
||||||
|
EOF
|
||||||
|
|
||||||
|
git config user.email "andresse@microsoft.com"
|
||||||
|
git config user.name "AzureDataStudio"
|
||||||
|
displayName: Prepare tooling
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||||
|
git fetch distro
|
||||||
|
git merge $(node -p "require('./package.json').distro")
|
||||||
|
displayName: Merge distro
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||||
|
displayName: Install dependencies
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn postinstall
|
||||||
|
displayName: Run postinstall scripts
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
# Mixin must run before optimize, because the CSS loader will
|
||||||
|
# inline small SVGs
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
node build/azure-pipelines/mixin
|
||||||
|
displayName: Mix in quality
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp hygiene --skip-tslint
|
||||||
|
yarn gulp tslint
|
||||||
|
displayName: Run hygiene, tslint
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp compile-build
|
||||||
|
yarn gulp compile-extensions-build
|
||||||
|
yarn gulp minify-vscode
|
||||||
|
yarn gulp minify-vscode-reh
|
||||||
|
yarn gulp minify-vscode-reh-web
|
||||||
|
displayName: Compile
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||||
|
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
platformIndependent: true
|
||||||
|
alias: 'Compilation'
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
5
build/azure-pipelines/sql-release.yml
Normal file
5
build/azure-pipelines/sql-release.yml
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
steps:
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
echo "##vso[build.addbuildtag]Release"
|
||||||
|
displayName: Set For Release
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
|
|||||||
@@ -21,7 +21,7 @@ steps:
|
|||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
|
|||||||
@@ -7,12 +7,9 @@ ROOT="$REPO/.."
|
|||||||
WEB_BUILD_NAME="vscode-web"
|
WEB_BUILD_NAME="vscode-web"
|
||||||
WEB_TARBALL_FILENAME="vscode-web.tar.gz"
|
WEB_TARBALL_FILENAME="vscode-web.tar.gz"
|
||||||
WEB_TARBALL_PATH="$ROOT/$WEB_TARBALL_FILENAME"
|
WEB_TARBALL_PATH="$ROOT/$WEB_TARBALL_FILENAME"
|
||||||
BUILD="$ROOT/$WEB_BUILD_NAME"
|
|
||||||
PACKAGEJSON="$BUILD/package.json"
|
|
||||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
|
||||||
|
|
||||||
rm -rf $ROOT/vscode-web.tar.*
|
rm -rf $ROOT/vscode-web.tar.*
|
||||||
|
|
||||||
(cd $ROOT && tar --owner=0 --group=0 -czf $WEB_TARBALL_PATH $WEB_BUILD_NAME)
|
(cd $ROOT && tar --owner=0 --group=0 -czf $WEB_TARBALL_PATH $WEB_BUILD_NAME)
|
||||||
|
|
||||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "web-standalone" archive-unsigned "$WEB_TARBALL_FILENAME" "$VERSION" true "$WEB_TARBALL_PATH"
|
node build/azure-pipelines/common/createAsset.js web-standalone archive-unsigned "$WEB_TARBALL_FILENAME" "$WEB_TARBALL_PATH"
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "1.x"
|
versionSpec: "1.x"
|
||||||
@@ -13,24 +13,21 @@ steps:
|
|||||||
inputs:
|
inputs:
|
||||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache
|
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||||
- powershell: |
|
- powershell: |
|
||||||
yarn --frozen-lockfile
|
yarn --frozen-lockfile
|
||||||
env:
|
env:
|
||||||
CHILD_CONCURRENCY: "1"
|
CHILD_CONCURRENCY: "1"
|
||||||
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
|
|
||||||
displayName: Install Dependencies
|
displayName: Install Dependencies
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
vstsFeed: '$(build-cache)' # {{SQL CARBON EDIT}} update build cache
|
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
- powershell: |
|
- powershell: |
|
||||||
yarn electron
|
yarn electron
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: $(GITHUB_TOKEN) # {{SQL CARBON EDIT}} add github token
|
|
||||||
- script: |
|
- script: |
|
||||||
yarn gulp hygiene --skip-tslint
|
yarn gulp hygiene --skip-tslint
|
||||||
displayName: Run Hygiene Checks
|
displayName: Run Hygiene Checks
|
||||||
@@ -64,8 +61,3 @@ steps:
|
|||||||
testResultsFiles: '*-results.xml'
|
testResultsFiles: '*-results.xml'
|
||||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||||
condition: succeededOrFailed()
|
condition: succeededOrFailed()
|
||||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0 # {{SQL CARBON EDIT}} add task
|
|
||||||
displayName: 'Component Detection'
|
|
||||||
inputs:
|
|
||||||
alertWarningLevel: High
|
|
||||||
failOnAlert: true
|
|
||||||
|
|||||||
20
build/azure-pipelines/win32/createDrop.ps1
Normal file
20
build/azure-pipelines/win32/createDrop.ps1
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
|
||||||
|
$Arch = "x64"
|
||||||
|
|
||||||
|
$Repo = "$(pwd)"
|
||||||
|
$Root = "$Repo\.."
|
||||||
|
$LegacyServer = "$Root\azuredatastudio-reh-win32-$Arch"
|
||||||
|
$ServerName = "azuredatastudio-server-win32-$Arch"
|
||||||
|
$Server = "$Root\$ServerName"
|
||||||
|
$ServerZipLocation = "$Repo\.build\win32-$Arch\server"
|
||||||
|
$ServerZip = "$ServerZipLocation\azuredatastudio-server-win32-$Arch.zip"
|
||||||
|
|
||||||
|
# Create server archive
|
||||||
|
New-Item $ServerZipLocation -ItemType Directory # this will throw even when success for we don't want to exec this
|
||||||
|
$global:LASTEXITCODE = 0
|
||||||
|
exec { Rename-Item -Path $LegacyServer -NewName $ServerName } "Rename Item"
|
||||||
|
exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r } "Zip Server"
|
||||||
|
|
||||||
|
exec { node build/azure-pipelines/common/copyArtifacts.js } "Copy Artifacts"
|
||||||
@@ -21,7 +21,7 @@ steps:
|
|||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
|
|||||||
@@ -23,14 +23,13 @@ exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r }
|
|||||||
# get version
|
# get version
|
||||||
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
|
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
|
||||||
$Version = $PackageJson.version
|
$Version = $PackageJson.version
|
||||||
$Quality = "$env:VSCODE_QUALITY"
|
|
||||||
|
|
||||||
$AssetPlatform = if ("$Arch" -eq "ia32") { "win32" } else { "win32-x64" }
|
$AssetPlatform = if ("$Arch" -eq "ia32") { "win32" } else { "win32-x64" }
|
||||||
|
|
||||||
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform-archive" archive "VSCode-win32-$Arch-$Version.zip" $Version true $Zip }
|
exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform-archive" archive "VSCode-win32-$Arch-$Version.zip" $Zip }
|
||||||
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform" setup "VSCodeSetup-$Arch-$Version.exe" $Version true $SystemExe }
|
exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform" setup "VSCodeSetup-$Arch-$Version.exe" $SystemExe }
|
||||||
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform-user" setup "VSCodeUserSetup-$Arch-$Version.exe" $Version true $UserExe }
|
exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform-user" setup "VSCodeUserSetup-$Arch-$Version.exe" $UserExe }
|
||||||
exec { node build/azure-pipelines/common/publish.js $Quality "server-$AssetPlatform" archive "vscode-server-win32-$Arch.zip" $Version true $ServerZip }
|
exec { node build/azure-pipelines/common/createAsset.js "server-$AssetPlatform" archive "vscode-server-win32-$Arch.zip" $ServerZip }
|
||||||
|
|
||||||
# publish hockeyapp symbols
|
# publish hockeyapp symbols
|
||||||
$hockeyAppId = if ("$Arch" -eq "ia32") { "$env:VSCODE_HOCKEYAPP_ID_WIN32" } else { "$env:VSCODE_HOCKEYAPP_ID_WIN64" }
|
$hockeyAppId = if ("$Arch" -eq "ia32") { "$env:VSCODE_HOCKEYAPP_ID_WIN32" } else { "$env:VSCODE_HOCKEYAPP_ID_WIN64" }
|
||||||
|
|||||||
278
build/azure-pipelines/win32/sql-product-build-win32.yml
Normal file
278
build/azure-pipelines/win32/sql-product-build-win32.yml
Normal file
@@ -0,0 +1,278 @@
|
|||||||
|
steps:
|
||||||
|
- powershell: |
|
||||||
|
mkdir .build -ea 0
|
||||||
|
"$env:BUILD_SOURCEVERSION" | Out-File -Encoding ascii -NoNewLine .build\commit
|
||||||
|
"$env:VSCODE_QUALITY" | Out-File -Encoding ascii -NoNewLine .build\quality
|
||||||
|
displayName: Prepare cache flag
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||||
|
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
platformIndependent: true
|
||||||
|
alias: 'Compilation'
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exit 1
|
||||||
|
displayName: Check RestoreCache
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- task: NodeTool@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: "10.15.1"
|
||||||
|
|
||||||
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||||
|
inputs:
|
||||||
|
versionSpec: "1.x"
|
||||||
|
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: '2.x'
|
||||||
|
addToPath: true
|
||||||
|
|
||||||
|
- task: AzureKeyVault@1
|
||||||
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
|
inputs:
|
||||||
|
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||||
|
KeyVaultName: ado-secrets
|
||||||
|
SecretsFilter: 'github-distro-mixin-password'
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
"machine github.com`nlogin azuredatastudio`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
|
||||||
|
|
||||||
|
exec { git config user.email "andresse@microsoft.com" }
|
||||||
|
exec { git config user.name "AzureDataStudio" }
|
||||||
|
displayName: Prepare tooling
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||||
|
git fetch distro
|
||||||
|
git merge $(node -p "require('./package.json').distro")
|
||||||
|
displayName: Merge distro
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
$env:CHILD_CONCURRENCY="1"
|
||||||
|
exec { yarn --frozen-lockfile }
|
||||||
|
displayName: Install dependencies
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exec { yarn postinstall }
|
||||||
|
displayName: Run postinstall scripts
|
||||||
|
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exec { node build/azure-pipelines/mixin }
|
||||||
|
displayName: Mix in quality
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exec { yarn gulp "install-sqltoolsservice" }
|
||||||
|
displayName: Install sqltoolsservice
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exec { yarn gulp "package-rebuild-extensions" }
|
||||||
|
exec { yarn gulp "vscode-win32-x64-min-ci" }
|
||||||
|
exec { yarn gulp "vscode-reh-win32-x64-min-ci" }
|
||||||
|
exec { yarn gulp "vscode-reh-web-win32-x64-min-ci" }
|
||||||
|
displayName: Build
|
||||||
|
env:
|
||||||
|
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exec { .\scripts\test-unstable.bat --build --coverage --reporter mocha-junit-reporter }
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||||
|
displayName: Run unstable tests
|
||||||
|
|
||||||
|
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||||
|
displayName: 'Sign out code'
|
||||||
|
inputs:
|
||||||
|
ConnectedServiceName: 'Code Signing'
|
||||||
|
FolderPath: '$(agent.builddirectory)/azuredatastudio-win32-x64'
|
||||||
|
Pattern: '*.exe,*.node,resources/app/node_modules.asar.unpacked/*.dll,swiftshader/*.dll,d3dcompiler_47.dll,libGLESv2.dll,ffmpeg.dll,libEGL.dll,Microsoft.SqlTools.Hosting.dll,Microsoft.SqlTools.ResourceProvider.Core.dll,Microsoft.SqlTools.ResourceProvider.DefaultImpl.dll,MicrosoftSqlToolsCredentials.dll,MicrosoftSqlToolsServiceLayer.dll,Newtonsoft.Json.dll,SqlSerializationService.dll,SqlToolsResourceProviderService.dll,Microsoft.SqlServer.*.dll,Microsoft.Data.Tools.Sql.BatchParser.dll'
|
||||||
|
signConfigType: inlineSignParams
|
||||||
|
inlineOperation: |
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"keyCode": "CP-230012",
|
||||||
|
"operationSetCode": "SigntoolSign",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"parameterName": "OpusName",
|
||||||
|
"parameterValue": "Azure Data Studio"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameterName": "OpusInfo",
|
||||||
|
"parameterValue": "https://github.com/microsoft/azuredatastudio"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameterName": "PageHash",
|
||||||
|
"parameterValue": "/NPH"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameterName": "FileDigest",
|
||||||
|
"parameterValue": "/fd sha256"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameterName": "TimeStamp",
|
||||||
|
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"toolName": "signtool.exe",
|
||||||
|
"toolVersion": "6.2.9304.0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"keyCode": "CP-230012",
|
||||||
|
"operationSetCode": "SigntoolVerify",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"parameterName": "VerifyAll",
|
||||||
|
"parameterValue": "/all"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"toolName": "signtool.exe",
|
||||||
|
"toolVersion": "6.2.9304.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
SessionTimeout: 600
|
||||||
|
MaxConcurrency: 5
|
||||||
|
MaxRetryAttempts: 20
|
||||||
|
condition: and(succeeded(), eq(variables['signed'], true))
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exec { yarn gulp "vscode-win32-x64-user-setup" }
|
||||||
|
exec { yarn gulp "vscode-win32-x64-system-setup" }
|
||||||
|
exec { yarn gulp "vscode-win32-x64-archive" }
|
||||||
|
displayName: Archive & User & System setup
|
||||||
|
|
||||||
|
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||||
|
displayName: 'Sign installers'
|
||||||
|
inputs:
|
||||||
|
ConnectedServiceName: 'Code Signing'
|
||||||
|
FolderPath: '.build'
|
||||||
|
Pattern: '*.exe'
|
||||||
|
signConfigType: inlineSignParams
|
||||||
|
inlineOperation: |
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"keyCode": "CP-230012",
|
||||||
|
"operationSetCode": "SigntoolSign",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"parameterName": "OpusName",
|
||||||
|
"parameterValue": "Azure Data Studio"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameterName": "OpusInfo",
|
||||||
|
"parameterValue": "https://github.com/microsoft/azuredatastudio"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameterName": "PageHash",
|
||||||
|
"parameterValue": "/NPH"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameterName": "FileDigest",
|
||||||
|
"parameterValue": "/fd sha256"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameterName": "TimeStamp",
|
||||||
|
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"toolName": "signtool.exe",
|
||||||
|
"toolVersion": "6.2.9304.0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"keyCode": "CP-230012",
|
||||||
|
"operationSetCode": "SigntoolVerify",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"parameterName": "VerifyAll",
|
||||||
|
"parameterValue": "/all"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"toolName": "signtool.exe",
|
||||||
|
"toolVersion": "6.2.9304.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
SessionTimeout: 600
|
||||||
|
MaxConcurrency: 5
|
||||||
|
MaxRetryAttempts: 20
|
||||||
|
condition: and(succeeded(), eq(variables['signed'], true))
|
||||||
|
|
||||||
|
- task: ArchiveFiles@2
|
||||||
|
displayName: 'Archive build scripts source'
|
||||||
|
inputs:
|
||||||
|
rootFolderOrFile: '$(Build.SourcesDirectory)/build'
|
||||||
|
archiveType: tar
|
||||||
|
archiveFile: '$(Build.BinariesDirectory)/source.tar.gz'
|
||||||
|
|
||||||
|
- task: PublishBuildArtifacts@1
|
||||||
|
displayName: 'Publish Artifact: build scripts source'
|
||||||
|
inputs:
|
||||||
|
PathtoPublish: '$(Build.BinariesDirectory)/source.tar.gz'
|
||||||
|
ArtifactName: source
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
.\build\azure-pipelines\win32\createDrop.ps1
|
||||||
|
displayName: Create Drop
|
||||||
|
|
||||||
|
- task: PublishBuildArtifacts@1
|
||||||
|
displayName: 'Publish Artifact: drop'
|
||||||
|
|
||||||
|
- task: PublishTestResults@2
|
||||||
|
displayName: 'Publish Test Results test-results.xml'
|
||||||
|
inputs:
|
||||||
|
testResultsFiles: 'test-results.xml'
|
||||||
|
searchFolder: '$(Build.SourcesDirectory)'
|
||||||
|
failTaskOnFailedTests: true
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- task: PublishTestResults@2
|
||||||
|
displayName: 'Publish Integration and Smoke Test Results'
|
||||||
|
inputs:
|
||||||
|
testResultsFiles: '*.xml'
|
||||||
|
searchFolder: '$(Build.ArtifactStagingDirectory)\test-results'
|
||||||
|
mergeTestResults: true
|
||||||
|
failTaskOnFailedTests: true
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||||
|
displayName: 'Component Detection'
|
||||||
|
inputs:
|
||||||
|
failOnAlert: true
|
||||||
106
build/azure-pipelines/win32/sql-product-test-win32.yml
Normal file
106
build/azure-pipelines/win32/sql-product-test-win32.yml
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
steps:
|
||||||
|
- task: NodeTool@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: "10.15.1"
|
||||||
|
|
||||||
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||||
|
inputs:
|
||||||
|
versionSpec: "1.x"
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
$env:CHILD_CONCURRENCY="1"
|
||||||
|
exec { git clean -fxd }
|
||||||
|
displayName: Clean repo
|
||||||
|
|
||||||
|
- task: DownloadPipelineArtifact@2
|
||||||
|
inputs:
|
||||||
|
buildType: 'current'
|
||||||
|
targetPath: '$(Build.SourcesDirectory)\.build'
|
||||||
|
artifactName: drop
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
$env:CHILD_CONCURRENCY="1"
|
||||||
|
exec { yarn --frozen-lockfile }
|
||||||
|
displayName: Install dependencies
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exec { .\node_modules\7zip\7zip-lite\7z.exe x $(Build.SourcesDirectory)\.build\win32-x64/archive/azuredatastudio-win32-x64.zip -o$(Agent.TempDirectory)\azuredatastudio-win32-x64 }
|
||||||
|
displayName: Unzip artifact
|
||||||
|
|
||||||
|
- task: AzureKeyVault@1
|
||||||
|
displayName: 'Azure Key Vault: SqlToolsSecretStore'
|
||||||
|
inputs:
|
||||||
|
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||||
|
KeyVaultName: SqlToolsSecretStore
|
||||||
|
SecretsFilter: 'ads-integration-test-azure-server,ads-integration-test-azure-server-password,ads-integration-test-azure-server-username,ads-integration-test-bdc-server,ads-integration-test-bdc-server-password,ads-integration-test-bdc-server-username,ads-integration-test-standalone-server,ads-integration-test-standalone-server-password,ads-integration-test-standalone-server-username'
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
$AppRoot = "$(Agent.TempDirectory)\azuredatastudio-win32-x64"
|
||||||
|
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||||
|
$AppNameShort = $AppProductJson.nameShort
|
||||||
|
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:INTEGRATION_TEST_CLI_PATH = "$AppRoot\bin\$AppNameShort"; .\scripts\sql-test-integration.bat }
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
displayName: Run stable tests
|
||||||
|
env:
|
||||||
|
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
|
||||||
|
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
|
||||||
|
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
|
||||||
|
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
|
||||||
|
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
|
||||||
|
STANDALONE_SQL: $(ads-integration-test-standalone-server)
|
||||||
|
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
|
||||||
|
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
|
||||||
|
AZURE_SQL: $(ads-integration-test-azure-server)
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
$AppRoot = "$(Agent.TempDirectory)\azuredatastudio-win32-x64"
|
||||||
|
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||||
|
$AppNameShort = $AppProductJson.nameShort
|
||||||
|
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:INTEGRATION_TEST_CLI_PATH = "$AppRoot\bin\$AppNameShort"; .\scripts\sql-test-integration.bat }
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
displayName: Run release tests
|
||||||
|
env:
|
||||||
|
ADS_TEST_GREP: (.*@REL@|integration test setup)
|
||||||
|
ADS_TEST_INVERT_GREP: 0
|
||||||
|
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
|
||||||
|
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
|
||||||
|
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
|
||||||
|
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
|
||||||
|
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
|
||||||
|
STANDALONE_SQL: $(ads-integration-test-standalone-server)
|
||||||
|
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
|
||||||
|
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
|
||||||
|
AZURE_SQL: $(ads-integration-test-azure-server)
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
$AppRoot = "$(Agent.TempDirectory)\azuredatastudio-win32-x64"
|
||||||
|
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||||
|
$AppNameShort = $AppProductJson.nameShort
|
||||||
|
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; .\scripts\sql-test-integration-unstable.bat }
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||||
|
displayName: Run unstable integration tests
|
||||||
|
env:
|
||||||
|
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
|
||||||
|
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
|
||||||
|
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
|
||||||
|
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
|
||||||
|
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
|
||||||
|
STANDALONE_SQL: $(ads-integration-test-standalone-server)
|
||||||
|
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
|
||||||
|
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
|
||||||
|
AZURE_SQL: $(ads-integration-test-azure-server)
|
||||||
29
build/azure-pipelines/win32/sql-publish.ps1
Normal file
29
build/azure-pipelines/win32/sql-publish.ps1
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
Param(
|
||||||
|
[string]$sourcesDir,
|
||||||
|
[string]$artifactsDir,
|
||||||
|
[string]$storageKey,
|
||||||
|
[string]$documentDbKey
|
||||||
|
)
|
||||||
|
|
||||||
|
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
|
||||||
|
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
||||||
|
|
||||||
|
$ExeName = "AzureDataStudioSetup.exe"
|
||||||
|
$SystemExe = "$artifactsDir\win32-x64\system-setup\$ExeName"
|
||||||
|
$UserExe = "$artifactsDir\win32-x64\user-setup\$ExeName"
|
||||||
|
$UserExeName = "AzureDataStudioUserSetup.exe"
|
||||||
|
$ZipName = "azuredatastudio-win32-x64.zip"
|
||||||
|
$Zip = "$artifactsDir\win32-x64\archive\$ZipName"
|
||||||
|
|
||||||
|
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
|
||||||
|
$Version = $VersionJson.version
|
||||||
|
$Quality = $VersionJson.quality
|
||||||
|
$CommitId = $VersionJson.commit
|
||||||
|
|
||||||
|
$assetPlatform = "win32-x64"
|
||||||
|
|
||||||
|
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform-archive" archive $ZipName $Version true $Zip $CommitId
|
||||||
|
|
||||||
|
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform" setup $ExeName $Version true $SystemExe $CommitId
|
||||||
|
|
||||||
|
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform-user" setup $UserExeName $Version true $UserExe $CommitId
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
[
|
[
|
||||||
{
|
{
|
||||||
"name": "Microsoft.sqlservernotebook",
|
"name": "Microsoft.sqlservernotebook",
|
||||||
"version": "0.3.2",
|
"version": "0.3.3",
|
||||||
"repo": "https://github.com/Microsoft/azuredatastudio"
|
"repo": "https://github.com/Microsoft/azuredatastudio"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,7 +1,7 @@
|
|||||||
[
|
[
|
||||||
{
|
{
|
||||||
"name": "Microsoft.sqlservernotebook",
|
"name": "Microsoft.sqlservernotebook",
|
||||||
"version": "0.3.2",
|
"version": "0.3.3",
|
||||||
"repo": "https://github.com/Microsoft/azuredatastudio"
|
"repo": "https://github.com/Microsoft/azuredatastudio"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -57,7 +57,6 @@ var BUNDLED_FILE_HEADER = [
|
|||||||
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
|
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
|
||||||
|
|
||||||
const extractEditorSrcTask = task.define('extract-editor-src', () => {
|
const extractEditorSrcTask = task.define('extract-editor-src', () => {
|
||||||
console.log(`If the build fails, consider tweaking shakeLevel below to a lower value.`);
|
|
||||||
const apiusages = monacoapi.execute().usageContent;
|
const apiusages = monacoapi.execute().usageContent;
|
||||||
const extrausages = fs.readFileSync(path.join(root, 'build', 'monaco', 'monaco.usage.recipe')).toString();
|
const extrausages = fs.readFileSync(path.join(root, 'build', 'monaco', 'monaco.usage.recipe')).toString();
|
||||||
standalone.extractEditor({
|
standalone.extractEditor({
|
||||||
@@ -71,14 +70,6 @@ const extractEditorSrcTask = task.define('extract-editor-src', () => {
|
|||||||
apiusages,
|
apiusages,
|
||||||
extrausages
|
extrausages
|
||||||
],
|
],
|
||||||
typings: [
|
|
||||||
'typings/lib.ie11_safe_es6.d.ts',
|
|
||||||
'typings/thenable.d.ts',
|
|
||||||
'typings/es6-promise.d.ts',
|
|
||||||
'typings/require-monaco.d.ts',
|
|
||||||
"typings/lib.es2018.promise.d.ts",
|
|
||||||
'vs/monaco.d.ts'
|
|
||||||
],
|
|
||||||
libs: [
|
libs: [
|
||||||
`lib.es5.d.ts`,
|
`lib.es5.d.ts`,
|
||||||
`lib.dom.d.ts`,
|
`lib.dom.d.ts`,
|
||||||
@@ -86,7 +77,8 @@ const extractEditorSrcTask = task.define('extract-editor-src', () => {
|
|||||||
],
|
],
|
||||||
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
|
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
|
||||||
importIgnorePattern: /(^vs\/css!)|(promise-polyfill\/polyfill)/,
|
importIgnorePattern: /(^vs\/css!)|(promise-polyfill\/polyfill)/,
|
||||||
destRoot: path.join(root, 'out-editor-src')
|
destRoot: path.join(root, 'out-editor-src'),
|
||||||
|
redirects: []
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -137,18 +129,70 @@ const createESMSourcesAndResourcesTask = task.define('extract-editor-esm', () =>
|
|||||||
});
|
});
|
||||||
|
|
||||||
const compileEditorESMTask = task.define('compile-editor-esm', () => {
|
const compileEditorESMTask = task.define('compile-editor-esm', () => {
|
||||||
|
console.log(`Launching the TS compiler at ${path.join(__dirname, '../out-editor-esm')}...`);
|
||||||
|
let result;
|
||||||
if (process.platform === 'win32') {
|
if (process.platform === 'win32') {
|
||||||
const result = cp.spawnSync(`..\\node_modules\\.bin\\tsc.cmd`, {
|
result = cp.spawnSync(`..\\node_modules\\.bin\\tsc.cmd`, {
|
||||||
cwd: path.join(__dirname, '../out-editor-esm')
|
cwd: path.join(__dirname, '../out-editor-esm')
|
||||||
});
|
});
|
||||||
console.log(result.stdout.toString());
|
|
||||||
console.log(result.stderr.toString());
|
|
||||||
} else {
|
} else {
|
||||||
const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
|
result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
|
||||||
cwd: path.join(__dirname, '../out-editor-esm')
|
cwd: path.join(__dirname, '../out-editor-esm')
|
||||||
});
|
});
|
||||||
console.log(result.stdout.toString());
|
}
|
||||||
console.log(result.stderr.toString());
|
|
||||||
|
console.log(result.stdout.toString());
|
||||||
|
console.log(result.stderr.toString());
|
||||||
|
|
||||||
|
if (result.status !== 0) {
|
||||||
|
console.log(`The TS Compilation failed, preparing analysis folder...`);
|
||||||
|
const destPath = path.join(__dirname, '../../vscode-monaco-editor-esm-analysis');
|
||||||
|
return util.rimraf(destPath)().then(() => {
|
||||||
|
fs.mkdirSync(destPath);
|
||||||
|
|
||||||
|
// initialize a new repository
|
||||||
|
cp.spawnSync(`git`, [`init`], {
|
||||||
|
cwd: destPath
|
||||||
|
});
|
||||||
|
|
||||||
|
// build a list of files to copy
|
||||||
|
const files = util.rreddir(path.join(__dirname, '../out-editor-esm'));
|
||||||
|
|
||||||
|
// copy files from src
|
||||||
|
for (const file of files) {
|
||||||
|
const srcFilePath = path.join(__dirname, '../src', file);
|
||||||
|
const dstFilePath = path.join(destPath, file);
|
||||||
|
if (fs.existsSync(srcFilePath)) {
|
||||||
|
util.ensureDir(path.dirname(dstFilePath));
|
||||||
|
const contents = fs.readFileSync(srcFilePath).toString().replace(/\r\n|\r|\n/g, '\n');
|
||||||
|
fs.writeFileSync(dstFilePath, contents);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// create an initial commit to diff against
|
||||||
|
cp.spawnSync(`git`, [`add`, `.`], {
|
||||||
|
cwd: destPath
|
||||||
|
});
|
||||||
|
|
||||||
|
// create the commit
|
||||||
|
cp.spawnSync(`git`, [`commit`, `-m`, `"original sources"`, `--no-gpg-sign`], {
|
||||||
|
cwd: destPath
|
||||||
|
});
|
||||||
|
|
||||||
|
// copy files from esm
|
||||||
|
for (const file of files) {
|
||||||
|
const srcFilePath = path.join(__dirname, '../out-editor-esm', file);
|
||||||
|
const dstFilePath = path.join(destPath, file);
|
||||||
|
if (fs.existsSync(srcFilePath)) {
|
||||||
|
util.ensureDir(path.dirname(dstFilePath));
|
||||||
|
const contents = fs.readFileSync(srcFilePath).toString().replace(/\r\n|\r|\n/g, '\n');
|
||||||
|
fs.writeFileSync(dstFilePath, contents);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Open in VS Code the folder at '${destPath}' and you can alayze the compilation error`);
|
||||||
|
throw new Error('Standalone Editor compilation failed. If this is the build machine, simply launch `yarn run gulp editor-distro` on your machine to further analyze the compilation problem.');
|
||||||
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -115,7 +115,8 @@ const tasks = compilations.map(function (tsconfigFile) {
|
|||||||
|
|
||||||
const compileTask = task.define(`compile-extension:${name}`, task.series(cleanTask, () => {
|
const compileTask = task.define(`compile-extension:${name}`, task.series(cleanTask, () => {
|
||||||
const pipeline = createPipeline(sqlLocalizedExtensions.includes(name), true); // {{SQL CARBON EDIT}}
|
const pipeline = createPipeline(sqlLocalizedExtensions.includes(name), true); // {{SQL CARBON EDIT}}
|
||||||
const input = pipeline.tsProjectSrc();
|
const nonts = gulp.src(src, srcOpts).pipe(filter(['**', '!**/*.ts']));
|
||||||
|
const input = es.merge(nonts, pipeline.tsProjectSrc());
|
||||||
|
|
||||||
return input
|
return input
|
||||||
.pipe(pipeline())
|
.pipe(pipeline())
|
||||||
@@ -124,7 +125,8 @@ const tasks = compilations.map(function (tsconfigFile) {
|
|||||||
|
|
||||||
const watchTask = task.define(`watch-extension:${name}`, task.series(cleanTask, () => {
|
const watchTask = task.define(`watch-extension:${name}`, task.series(cleanTask, () => {
|
||||||
const pipeline = createPipeline(false);
|
const pipeline = createPipeline(false);
|
||||||
const input = pipeline.tsProjectSrc();
|
const nonts = gulp.src(src, srcOpts).pipe(filter(['**', '!**/*.ts']));
|
||||||
|
const input = es.merge(nonts, pipeline.tsProjectSrc());
|
||||||
const watchInput = watcher(src, { ...srcOpts, ...{ readDelay: 200 } });
|
const watchInput = watcher(src, { ...srcOpts, ...{ readDelay: 200 } });
|
||||||
|
|
||||||
return watchInput
|
return watchInput
|
||||||
@@ -134,7 +136,8 @@ const tasks = compilations.map(function (tsconfigFile) {
|
|||||||
|
|
||||||
const compileBuildTask = task.define(`compile-build-extension-${name}`, task.series(cleanTask, () => {
|
const compileBuildTask = task.define(`compile-build-extension-${name}`, task.series(cleanTask, () => {
|
||||||
const pipeline = createPipeline(true, true);
|
const pipeline = createPipeline(true, true);
|
||||||
const input = pipeline.tsProjectSrc();
|
const nonts = gulp.src(src, srcOpts).pipe(filter(['**', '!**/*.ts']));
|
||||||
|
const input = es.merge(nonts, pipeline.tsProjectSrc());
|
||||||
|
|
||||||
return input
|
return input
|
||||||
.pipe(pipeline())
|
.pipe(pipeline())
|
||||||
|
|||||||
@@ -125,6 +125,7 @@ const copyrightFilter = [
|
|||||||
'!**/*.opts',
|
'!**/*.opts',
|
||||||
'!**/*.disabled',
|
'!**/*.disabled',
|
||||||
'!**/*.code-workspace',
|
'!**/*.code-workspace',
|
||||||
|
'!**/*.js.map',
|
||||||
'!**/promise-polyfill/polyfill.js',
|
'!**/promise-polyfill/polyfill.js',
|
||||||
'!build/**/*.init',
|
'!build/**/*.init',
|
||||||
'!resources/linux/snap/snapcraft.yaml',
|
'!resources/linux/snap/snapcraft.yaml',
|
||||||
@@ -139,26 +140,26 @@ const copyrightFilter = [
|
|||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
'!extensions/notebook/src/intellisense/text.ts',
|
'!extensions/notebook/src/intellisense/text.ts',
|
||||||
'!extensions/mssql/src/hdfs/webhdfs.ts',
|
'!extensions/mssql/src/hdfs/webhdfs.ts',
|
||||||
'!src/sql/workbench/parts/notebook/browser/outputs/tableRenderers.ts',
|
'!src/sql/workbench/contrib/notebook/browser/outputs/tableRenderers.ts',
|
||||||
'!src/sql/workbench/parts/notebook/common/models/url.ts',
|
'!src/sql/workbench/contrib/notebook/common/models/url.ts',
|
||||||
'!src/sql/workbench/parts/notebook/browser/models/renderMimeInterfaces.ts',
|
'!src/sql/workbench/contrib/notebook/browser/models/renderMimeInterfaces.ts',
|
||||||
'!src/sql/workbench/parts/notebook/browser/models/outputProcessor.ts',
|
'!src/sql/workbench/contrib/notebook/browser/models/outputProcessor.ts',
|
||||||
'!src/sql/workbench/parts/notebook/browser/models/mimemodel.ts',
|
'!src/sql/workbench/contrib/notebook/browser/models/mimemodel.ts',
|
||||||
'!src/sql/workbench/parts/notebook/browser/cellViews/media/*.css',
|
'!src/sql/workbench/contrib/notebook/browser/cellViews/media/*.css',
|
||||||
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
|
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
|
||||||
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
|
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
|
||||||
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
|
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
|
||||||
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
|
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
|
||||||
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
|
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
|
||||||
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
|
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
|
||||||
'!src/sql/workbench/parts/notebook/browser/outputs/sanitizer.ts',
|
'!src/sql/workbench/contrib/notebook/browser/outputs/sanitizer.ts',
|
||||||
'!src/sql/workbench/parts/notebook/browser/outputs/renderers.ts',
|
'!src/sql/workbench/contrib/notebook/browser/outputs/renderers.ts',
|
||||||
'!src/sql/workbench/parts/notebook/browser/outputs/registry.ts',
|
'!src/sql/workbench/contrib/notebook/browser/outputs/registry.ts',
|
||||||
'!src/sql/workbench/parts/notebook/browser/outputs/factories.ts',
|
'!src/sql/workbench/contrib/notebook/browser/outputs/factories.ts',
|
||||||
'!src/sql/workbench/parts/notebook/common/models/nbformat.ts',
|
'!src/sql/workbench/contrib/notebook/common/models/nbformat.ts',
|
||||||
'!extensions/markdown-language-features/media/tomorrow.css',
|
'!extensions/markdown-language-features/media/tomorrow.css',
|
||||||
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
|
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
|
||||||
'!src/sql/workbench/parts/notebook/electron-browser/cellViews/media/highlight.css',
|
'!src/sql/workbench/contrib/notebook/electron-browser/cellViews/media/highlight.css',
|
||||||
'!extensions/mssql/sqltoolsservice/**',
|
'!extensions/mssql/sqltoolsservice/**',
|
||||||
'!extensions/import/flatfileimportservice/**',
|
'!extensions/import/flatfileimportservice/**',
|
||||||
'!extensions/notebook/src/prompts/**',
|
'!extensions/notebook/src/prompts/**',
|
||||||
@@ -194,7 +195,8 @@ const tslintBaseFilter = [
|
|||||||
'!extensions/**/*.test.ts',
|
'!extensions/**/*.test.ts',
|
||||||
'!extensions/html-language-features/server/lib/jquery.d.ts',
|
'!extensions/html-language-features/server/lib/jquery.d.ts',
|
||||||
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts', // {{SQL CARBON EDIT}},
|
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts', // {{SQL CARBON EDIT}},
|
||||||
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts' // {{SQL CARBON EDIT}},
|
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts', // {{SQL CARBON EDIT}},
|
||||||
|
'!src/vs/workbench/services/themes/common/textMateScopeMatcher.ts' // {{SQL CARBON EDIT}} skip this because we have no plans on touching this and its not ours
|
||||||
];
|
];
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
@@ -246,6 +248,7 @@ const tslintHygieneFilter = [
|
|||||||
'src/**/*.ts',
|
'src/**/*.ts',
|
||||||
'test/**/*.ts',
|
'test/**/*.ts',
|
||||||
'extensions/**/*.ts',
|
'extensions/**/*.ts',
|
||||||
|
'!src/vs/workbench/contrib/extensions/browser/extensionTipsService.ts', // {{SQL CARBON EDIT}} known formatting issue do to commenting out code
|
||||||
...tslintBaseFilter
|
...tslintBaseFilter
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -423,7 +426,12 @@ function hygiene(some) {
|
|||||||
let input;
|
let input;
|
||||||
|
|
||||||
if (Array.isArray(some) || typeof some === 'string' || !some) {
|
if (Array.isArray(some) || typeof some === 'string' || !some) {
|
||||||
input = vfs.src(some || all, { base: '.', follow: true, allowEmpty: true });
|
const options = { base: '.', follow: true, allowEmpty: true };
|
||||||
|
if (some) {
|
||||||
|
input = vfs.src(some, options).pipe(filter(all)); // split this up to not unnecessarily filter all a second time
|
||||||
|
} else {
|
||||||
|
input = vfs.src(all, options);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
input = some;
|
input = some;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,79 +0,0 @@
|
|||||||
/*---------------------------------------------------------------------------------------------
|
|
||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
|
||||||
*--------------------------------------------------------------------------------------------*/
|
|
||||||
|
|
||||||
'use strict';
|
|
||||||
|
|
||||||
const gulp = require('gulp');
|
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
const jeditor = require('gulp-json-editor');
|
|
||||||
const product = require('../product.json');
|
|
||||||
|
|
||||||
gulp.task('mixin', function () {
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
const updateUrl = process.env['SQLOPS_UPDATEURL'];
|
|
||||||
if (!updateUrl) {
|
|
||||||
console.log('Missing SQLOPS_UPDATEURL, skipping mixin');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const quality = process.env['VSCODE_QUALITY'];
|
|
||||||
|
|
||||||
if (!quality) {
|
|
||||||
console.log('Missing VSCODE_QUALITY, skipping mixin');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}} - apply ADS insiders values if needed
|
|
||||||
let newValues = {
|
|
||||||
"nameShort": product.nameShort,
|
|
||||||
"nameLong": product.nameLong,
|
|
||||||
"applicationName": product.applicationName,
|
|
||||||
"dataFolderName": product.dataFolderName,
|
|
||||||
"win32MutexName": product.win32MutexName,
|
|
||||||
"win32DirName": product.win32DirName,
|
|
||||||
"win32NameVersion": product.win32NameVersion,
|
|
||||||
"win32RegValueName": product.win32RegValueName,
|
|
||||||
"win32AppId": product.win32AppId,
|
|
||||||
"win32x64AppId": product.win32x64AppId,
|
|
||||||
"win32UserAppId": product.win32UserAppId,
|
|
||||||
"win32x64UserAppId": product.win32x64UserAppId,
|
|
||||||
"win32AppUserModelId": product.win32AppUserModelId,
|
|
||||||
"win32ShellNameShort": product.win32ShellNameShort,
|
|
||||||
"darwinBundleIdentifier": product.darwinBundleIdentifier,
|
|
||||||
"updateUrl": updateUrl,
|
|
||||||
"quality": quality,
|
|
||||||
"extensionsGallery": {
|
|
||||||
"serviceUrl": 'https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery.json'
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if (quality === 'insider') {
|
|
||||||
let dashSuffix = '-insiders';
|
|
||||||
let dotSuffix = '.insiders';
|
|
||||||
let displaySuffix = ' - Insiders';
|
|
||||||
|
|
||||||
newValues.extensionsGallery.serviceUrl = `https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery-${quality}.json`;
|
|
||||||
newValues.nameShort += dashSuffix;
|
|
||||||
newValues.nameLong += displaySuffix;
|
|
||||||
newValues.applicationName += dashSuffix;
|
|
||||||
newValues.dataFolderName += dashSuffix;
|
|
||||||
newValues.win32MutexName += dashSuffix;
|
|
||||||
newValues.win32DirName += displaySuffix;
|
|
||||||
newValues.win32NameVersion += displaySuffix;
|
|
||||||
newValues.win32RegValueName += dashSuffix;
|
|
||||||
newValues.win32AppId = "{{9F0801B2-DEE3-4272-A2C6-FBDF25BAAF0F}";
|
|
||||||
newValues.win32x64AppId = "{{6748A5FD-29EB-4BA6-B3C6-E7B981B8D6B0}";
|
|
||||||
newValues.win32UserAppId = "{{0F8CD1ED-483C-40EB-8AD2-8ED784651AA1}";
|
|
||||||
newValues.win32x64UserAppId += dashSuffix;
|
|
||||||
newValues.win32AppUserModelId += dotSuffix;
|
|
||||||
newValues.win32ShellNameShort += displaySuffix;
|
|
||||||
newValues.darwinBundleIdentifier += dotSuffix;
|
|
||||||
}
|
|
||||||
|
|
||||||
return gulp.src('./product.json')
|
|
||||||
.pipe(jeditor(newValues))
|
|
||||||
.pipe(gulp.dest('.'));
|
|
||||||
});
|
|
||||||
@@ -11,8 +11,13 @@ const es = require('event-stream');
|
|||||||
const filter = require('gulp-filter');
|
const filter = require('gulp-filter');
|
||||||
const del = require('del');
|
const del = require('del');
|
||||||
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
|
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
|
||||||
const platformInfo = require('service-downloader/out/platform').PlatformInformation;
|
const platform = require('service-downloader/out/platform').PlatformInformation;
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
|
const ext = require('./lib/extensions');
|
||||||
|
const task = require('./lib/task');
|
||||||
|
const glob = require('glob');
|
||||||
|
const vsce = require('vsce');
|
||||||
|
const mkdirp = require('mkdirp');
|
||||||
|
|
||||||
gulp.task('clean-mssql-extension', util.rimraf('extensions/mssql/node_modules'));
|
gulp.task('clean-mssql-extension', util.rimraf('extensions/mssql/node_modules'));
|
||||||
gulp.task('clean-credentials-extension', util.rimraf('extensions/credentials/node_modules'));
|
gulp.task('clean-credentials-extension', util.rimraf('extensions/credentials/node_modules'));
|
||||||
@@ -93,7 +98,7 @@ const formatStagedFiles = () => {
|
|||||||
|
|
||||||
function installService() {
|
function installService() {
|
||||||
let config = require('../extensions/mssql/config.json');
|
let config = require('../extensions/mssql/config.json');
|
||||||
return platformInfo.getCurrent().then(p => {
|
return platform.getCurrent().then(p => {
|
||||||
let runtime = p.runtimeId;
|
let runtime = p.runtimeId;
|
||||||
// fix path since it won't be correct
|
// fix path since it won't be correct
|
||||||
config.installDirectory = path.join(__dirname, '../extensions/mssql/src', config.installDirectory);
|
config.installDirectory = path.join(__dirname, '../extensions/mssql/src', config.installDirectory);
|
||||||
@@ -113,25 +118,50 @@ gulp.task('install-sqltoolsservice', () => {
|
|||||||
return installService();
|
return installService();
|
||||||
});
|
});
|
||||||
|
|
||||||
function installSsmsMin() {
|
|
||||||
const config = require('../extensions/admin-tool-ext-win/config.json');
|
|
||||||
return platformInfo.getCurrent().then(p => {
|
|
||||||
const runtime = p.runtimeId;
|
|
||||||
// fix path since it won't be correct
|
|
||||||
config.installDirectory = path.join(__dirname, '..', 'extensions', 'admin-tool-ext-win', config.installDirectory);
|
|
||||||
var installer = new serviceDownloader(config);
|
|
||||||
const serviceInstallFolder = installer.getInstallDirectory(runtime);
|
|
||||||
const serviceCleanupFolder = path.join(serviceInstallFolder, '..');
|
|
||||||
console.log('Cleaning up the install folder: ' + serviceCleanupFolder);
|
|
||||||
return del(serviceCleanupFolder + '/*').then(() => {
|
|
||||||
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
|
|
||||||
return installer.installService(runtime);
|
|
||||||
}, delError => {
|
|
||||||
console.log('failed to delete the install folder error: ' + delError);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
gulp.task('install-ssmsmin', () => {
|
gulp.task('install-ssmsmin', () => {
|
||||||
return installSsmsMin();
|
const config = require('../extensions/admin-tool-ext-win/config.json');
|
||||||
|
const runtime = 'Windows_64'; // admin-tool-ext is a windows only extension, and we only ship a 64 bit version, so locking the binaries as such
|
||||||
|
// fix path since it won't be correct
|
||||||
|
config.installDirectory = path.join(__dirname, '..', 'extensions', 'admin-tool-ext-win', config.installDirectory);
|
||||||
|
var installer = new serviceDownloader(config);
|
||||||
|
const serviceInstallFolder = installer.getInstallDirectory(runtime);
|
||||||
|
const serviceCleanupFolder = path.join(serviceInstallFolder, '..');
|
||||||
|
console.log('Cleaning up the install folder: ' + serviceCleanupFolder);
|
||||||
|
return del(serviceCleanupFolder + '/*').then(() => {
|
||||||
|
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
|
||||||
|
return installer.installService(runtime);
|
||||||
|
}, delError => {
|
||||||
|
console.log('failed to delete the install folder error: ' + delError);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const root = path.dirname(__dirname);
|
||||||
|
|
||||||
|
gulp.task('package-external-extensions', task.series(
|
||||||
|
task.define('bundle-external-extensions-build', () => ext.packageExternalExtensionsStream().pipe(gulp.dest('.build/external'))),
|
||||||
|
task.define('create-external-extension-vsix-build', () => {
|
||||||
|
const vsixes = glob.sync('.build/external/extensions/*/package.json').map(manifestPath => {
|
||||||
|
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||||
|
const extensionName = path.basename(extensionPath);
|
||||||
|
return { name: extensionName, path: extensionPath };
|
||||||
|
}).map(element => {
|
||||||
|
const pkgJson = require(path.join(element.path, 'package.json'));
|
||||||
|
const vsixDirectory = path.join(root, '.build', 'extensions');
|
||||||
|
mkdirp.sync(vsixDirectory);
|
||||||
|
const packagePath = path.join(vsixDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
|
||||||
|
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
||||||
|
return vsce.createVSIX({
|
||||||
|
cwd: element.path,
|
||||||
|
packagePath: packagePath,
|
||||||
|
useYarn: true
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
return Promise.all(vsixes);
|
||||||
|
})
|
||||||
|
));
|
||||||
|
|
||||||
|
gulp.task('package-rebuild-extensions', task.series(
|
||||||
|
task.define('clean-rebuild-extensions', () => ext.cleanRebuildExtensions('.build/extensions')),
|
||||||
|
task.define('rebuild-extensions-build', () => ext.packageRebuildExtensionsStream().pipe(gulp.dest('.build'))),
|
||||||
|
));
|
||||||
|
|||||||
@@ -29,7 +29,6 @@ const packageJson = require('../package.json');
|
|||||||
const product = require('../product.json');
|
const product = require('../product.json');
|
||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
const i18n = require('./lib/i18n');
|
const i18n = require('./lib/i18n');
|
||||||
const ext = require('./lib/extensions'); // {{SQL CARBON EDIT}}
|
|
||||||
const deps = require('./dependencies');
|
const deps = require('./dependencies');
|
||||||
const { config } = require('./lib/electron');
|
const { config } = require('./lib/electron');
|
||||||
const createAsar = require('./lib/asar').createAsar;
|
const createAsar = require('./lib/asar').createAsar;
|
||||||
@@ -92,8 +91,7 @@ const vscodeResources = [
|
|||||||
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
|
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
|
||||||
'out-build/vs/code/electron-browser/issue/issueReporter.js',
|
'out-build/vs/code/electron-browser/issue/issueReporter.js',
|
||||||
'out-build/vs/code/electron-browser/processExplorer/processExplorer.js',
|
'out-build/vs/code/electron-browser/processExplorer/processExplorer.js',
|
||||||
// {{SQL CARBON EDIT}}
|
'out-build/sql/workbench/electron-browser/splashscreen/*', // {{SQL CARBON EDIT}} STart
|
||||||
'out-build/sql/workbench/electron-browser/splashscreen/*',
|
|
||||||
'out-build/sql/**/*.{svg,png,cur,html}',
|
'out-build/sql/**/*.{svg,png,cur,html}',
|
||||||
'out-build/sql/base/browser/ui/table/media/*.{gif,png,svg}',
|
'out-build/sql/base/browser/ui/table/media/*.{gif,png,svg}',
|
||||||
'out-build/sql/base/browser/ui/checkbox/media/*.{gif,png,svg}',
|
'out-build/sql/base/browser/ui/checkbox/media/*.{gif,png,svg}',
|
||||||
@@ -111,7 +109,8 @@ const vscodeResources = [
|
|||||||
'out-build/sql/media/objectTypes/*.svg',
|
'out-build/sql/media/objectTypes/*.svg',
|
||||||
'out-build/sql/media/icons/*.svg',
|
'out-build/sql/media/icons/*.svg',
|
||||||
'out-build/sql/workbench/parts/notebook/media/**/*.svg',
|
'out-build/sql/workbench/parts/notebook/media/**/*.svg',
|
||||||
'out-build/sql/setup.js',
|
'out-build/sql/setup.js', // {{SQL CARBON EDIT}} end
|
||||||
|
'out-build/vs/platform/auth/common/auth.css',
|
||||||
'!**/test/**'
|
'!**/test/**'
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -197,9 +196,6 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
|||||||
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }))
|
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }))
|
||||||
.pipe(util.setExecutableBit(['**/*.sh']));
|
.pipe(util.setExecutableBit(['**/*.sh']));
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
ext.packageBuiltInExtensions();
|
|
||||||
|
|
||||||
const extensions = gulp.src(['.build/extensions/**', '!.build/extensions/node_modules/**'], { base: '.build', dot: true }); // {{SQL CARBON EDIT}} - don't package the node_modules directory
|
const extensions = gulp.src(['.build/extensions/**', '!.build/extensions/node_modules/**'], { base: '.build', dot: true }); // {{SQL CARBON EDIT}} - don't package the node_modules directory
|
||||||
|
|
||||||
const sources = es.merge(src, extensions)
|
const sources = es.merge(src, extensions)
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ const repoPath = path.dirname(__dirname);
|
|||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
const buildPath = arch => path.join(path.dirname(repoPath), `azuredatastudio-win32-${arch}`);
|
const buildPath = arch => path.join(path.dirname(repoPath), `azuredatastudio-win32-${arch}`);
|
||||||
const zipDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'archive');
|
const zipDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'archive');
|
||||||
const zipPath = arch => path.join(zipDir(arch), `VSCode-win32-${arch}.zip`);
|
const zipPath = arch => path.join(zipDir(arch), `azuredatastudio-win32-${arch}.zip`);
|
||||||
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
|
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
|
||||||
const issPath = path.join(__dirname, 'win32', 'code.iss');
|
const issPath = path.join(__dirname, 'win32', 'code.iss');
|
||||||
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup'))), 'bin', 'ISCC.exe');
|
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup'))), 'bin', 'ISCC.exe');
|
||||||
|
|||||||
@@ -23,6 +23,13 @@ const quality = process.env['VSCODE_QUALITY'];
|
|||||||
const builtInExtensions = quality && quality === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
|
const builtInExtensions = quality && quality === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
|
||||||
// {{SQL CARBON EDIT}} - END
|
// {{SQL CARBON EDIT}} - END
|
||||||
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
||||||
|
const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE'];
|
||||||
|
|
||||||
|
function log() {
|
||||||
|
if (ENABLE_LOGGING) {
|
||||||
|
fancyLog.apply(this, arguments);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function getExtensionPath(extension) {
|
function getExtensionPath(extension) {
|
||||||
return path.join(root, '.build', 'builtInExtensions', extension.name);
|
return path.join(root, '.build', 'builtInExtensions', extension.name);
|
||||||
@@ -47,7 +54,7 @@ function isUpToDate(extension) {
|
|||||||
|
|
||||||
function syncMarketplaceExtension(extension) {
|
function syncMarketplaceExtension(extension) {
|
||||||
if (isUpToDate(extension)) {
|
if (isUpToDate(extension)) {
|
||||||
fancyLog(ansiColors.blue('[marketplace]'), `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));
|
log(ansiColors.blue('[marketplace]'), `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));
|
||||||
return es.readArray([]);
|
return es.readArray([]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -56,13 +63,13 @@ function syncMarketplaceExtension(extension) {
|
|||||||
return ext.fromMarketplace(extension.name, extension.version, extension.metadata)
|
return ext.fromMarketplace(extension.name, extension.version, extension.metadata)
|
||||||
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
|
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
|
||||||
.pipe(vfs.dest('.build/builtInExtensions'))
|
.pipe(vfs.dest('.build/builtInExtensions'))
|
||||||
.on('end', () => fancyLog(ansiColors.blue('[marketplace]'), extension.name, ansiColors.green('✔︎')));
|
.on('end', () => log(ansiColors.blue('[marketplace]'), extension.name, ansiColors.green('✔︎')));
|
||||||
}
|
}
|
||||||
|
|
||||||
function syncExtension(extension, controlState) {
|
function syncExtension(extension, controlState) {
|
||||||
switch (controlState) {
|
switch (controlState) {
|
||||||
case 'disabled':
|
case 'disabled':
|
||||||
fancyLog(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
|
log(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
|
||||||
return es.readArray([]);
|
return es.readArray([]);
|
||||||
|
|
||||||
case 'marketplace':
|
case 'marketplace':
|
||||||
@@ -70,15 +77,15 @@ function syncExtension(extension, controlState) {
|
|||||||
|
|
||||||
default:
|
default:
|
||||||
if (!fs.existsSync(controlState)) {
|
if (!fs.existsSync(controlState)) {
|
||||||
fancyLog(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
||||||
return es.readArray([]);
|
return es.readArray([]);
|
||||||
|
|
||||||
} else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
|
} else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
|
||||||
fancyLog(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
||||||
return es.readArray([]);
|
return es.readArray([]);
|
||||||
}
|
}
|
||||||
|
|
||||||
fancyLog(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎'));
|
log(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎'));
|
||||||
return es.readArray([]);
|
return es.readArray([]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -97,8 +104,8 @@ function writeControlFile(control) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function main() {
|
function main() {
|
||||||
fancyLog('Syncronizing built-in extensions...');
|
log('Syncronizing built-in extensions...');
|
||||||
fancyLog(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
|
log(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
|
||||||
|
|
||||||
const control = readControlFile();
|
const control = readControlFile();
|
||||||
const streams = [];
|
const streams = [];
|
||||||
|
|||||||
@@ -44,7 +44,7 @@ function createCompile(src, build, emitError) {
|
|||||||
const input = es.through();
|
const input = es.through();
|
||||||
const output = input
|
const output = input
|
||||||
.pipe(utf8Filter)
|
.pipe(utf8Filter)
|
||||||
.pipe(bom())
|
.pipe(bom()) // this is required to preserve BOM in test files that loose it otherwise
|
||||||
.pipe(utf8Filter.restore)
|
.pipe(utf8Filter.restore)
|
||||||
.pipe(tsFilter)
|
.pipe(tsFilter)
|
||||||
.pipe(util.loadSourcemaps())
|
.pipe(util.loadSourcemaps())
|
||||||
|
|||||||
@@ -54,7 +54,7 @@ function createCompile(src: string, build: boolean, emitError?: boolean) {
|
|||||||
const input = es.through();
|
const input = es.through();
|
||||||
const output = input
|
const output = input
|
||||||
.pipe(utf8Filter)
|
.pipe(utf8Filter)
|
||||||
.pipe(bom())
|
.pipe(bom()) // this is required to preserve BOM in test files that loose it otherwise
|
||||||
.pipe(utf8Filter.restore)
|
.pipe(utf8Filter.restore)
|
||||||
.pipe(tsFilter)
|
.pipe(tsFilter)
|
||||||
.pipe(util.loadSourcemaps())
|
.pipe(util.loadSourcemaps())
|
||||||
|
|||||||
@@ -189,9 +189,11 @@ const excludedExtensions = [
|
|||||||
'integration-tests'
|
'integration-tests'
|
||||||
];
|
];
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
const sqlBuiltInExtensions = [
|
const externalExtensions = [
|
||||||
// Add SQL built-in extensions here.
|
// This is the list of SQL extensions which the source code is included in this repository, but
|
||||||
// the extension will be excluded from SQLOps package and will have separate vsix packages
|
// they get packaged separately. Adding extension name here, will make the build to create
|
||||||
|
// a separate vsix package for the extension and the extension will be excluded from the main package.
|
||||||
|
// Any extension not included here will be installed by default.
|
||||||
'admin-tool-ext-win',
|
'admin-tool-ext-win',
|
||||||
'agent',
|
'agent',
|
||||||
'import',
|
'import',
|
||||||
@@ -201,7 +203,13 @@ const sqlBuiltInExtensions = [
|
|||||||
'schema-compare',
|
'schema-compare',
|
||||||
'cms',
|
'cms',
|
||||||
'query-history',
|
'query-history',
|
||||||
'liveshare'
|
'liveshare',
|
||||||
|
'sql-database-projects'
|
||||||
|
];
|
||||||
|
// extensions that require a rebuild since they have native parts
|
||||||
|
const rebuildExtensions = [
|
||||||
|
'big-data-cluster',
|
||||||
|
'mssql'
|
||||||
];
|
];
|
||||||
const builtInExtensions = process.env['VSCODE_QUALITY'] === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
|
const builtInExtensions = process.env['VSCODE_QUALITY'] === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
|
||||||
// {{SQL CARBON EDIT}} - End
|
// {{SQL CARBON EDIT}} - End
|
||||||
@@ -214,7 +222,7 @@ function packageLocalExtensionsStream() {
|
|||||||
})
|
})
|
||||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1); // {{SQL CARBON EDIT}} add aditional filter
|
.filter(({ name }) => externalExtensions.indexOf(name) === -1); // {{SQL CARBON EDIT}} Remove external Extensions with separate package
|
||||||
const nodeModules = gulp.src('extensions/node_modules/**', { base: '.' });
|
const nodeModules = gulp.src('extensions/node_modules/**', { base: '.' });
|
||||||
const localExtensions = localExtensionDescriptions.map(extension => {
|
const localExtensions = localExtensionDescriptions.map(extension => {
|
||||||
return fromLocal(extension.path)
|
return fromLocal(extension.path)
|
||||||
@@ -233,66 +241,40 @@ function packageMarketplaceExtensionsStream() {
|
|||||||
.pipe(util2.setExecutableBit(['**/*.sh']));
|
.pipe(util2.setExecutableBit(['**/*.sh']));
|
||||||
}
|
}
|
||||||
exports.packageMarketplaceExtensionsStream = packageMarketplaceExtensionsStream;
|
exports.packageMarketplaceExtensionsStream = packageMarketplaceExtensionsStream;
|
||||||
const vfs = require("vinyl-fs");
|
function packageExternalExtensionsStream() {
|
||||||
function packageBuiltInExtensions() {
|
const extenalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||||
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
|
||||||
.map(manifestPath => {
|
.map(manifestPath => {
|
||||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||||
const extensionName = path.basename(extensionPath);
|
const extensionName = path.basename(extensionPath);
|
||||||
return { name: extensionName, path: extensionPath };
|
return { name: extensionName, path: extensionPath };
|
||||||
})
|
})
|
||||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
.filter(({ name }) => externalExtensions.indexOf(name) >= 0);
|
||||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
const builtExtensions = extenalExtensionDescriptions.map(extension => {
|
||||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
|
return fromLocal(extension.path)
|
||||||
const visxDirectory = path.join(path.dirname(root), 'vsix');
|
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||||
try {
|
|
||||||
if (!fs.existsSync(visxDirectory)) {
|
|
||||||
fs.mkdirSync(visxDirectory);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
// don't fail the build if the output directory already exists
|
|
||||||
console.warn(err);
|
|
||||||
}
|
|
||||||
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
|
|
||||||
let pkgJson = JSON.parse(fs.readFileSync(path.join(element.path, 'package.json'), { encoding: 'utf8' }));
|
|
||||||
const packagePath = path.join(visxDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
|
|
||||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
|
||||||
vsce.createVSIX({
|
|
||||||
cwd: element.path,
|
|
||||||
packagePath: packagePath,
|
|
||||||
useYarn: true
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
return es.merge(builtExtensions);
|
||||||
}
|
}
|
||||||
exports.packageBuiltInExtensions = packageBuiltInExtensions;
|
exports.packageExternalExtensionsStream = packageExternalExtensionsStream;
|
||||||
function packageExtensionTask(extensionName, platform, arch) {
|
|
||||||
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
|
||||||
if (platform === 'darwin') {
|
|
||||||
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
|
|
||||||
}
|
|
||||||
platform = platform || process.platform;
|
|
||||||
return () => {
|
|
||||||
const root = path.resolve(path.join(__dirname, '../..'));
|
|
||||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
|
||||||
.map(manifestPath => {
|
|
||||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
|
||||||
const extensionName = path.basename(extensionPath);
|
|
||||||
return { name: extensionName, path: extensionPath };
|
|
||||||
})
|
|
||||||
.filter(({ name }) => extensionName === name);
|
|
||||||
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
|
||||||
return fromLocal(extension.path);
|
|
||||||
}));
|
|
||||||
let result = localExtensions
|
|
||||||
.pipe(util2.skipDirectories())
|
|
||||||
.pipe(util2.fixWin32DirectoryPermissions())
|
|
||||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
|
|
||||||
return result.pipe(vfs.dest(destination));
|
|
||||||
};
|
|
||||||
}
|
|
||||||
exports.packageExtensionTask = packageExtensionTask;
|
|
||||||
// {{SQL CARBON EDIT}} - End
|
// {{SQL CARBON EDIT}} - End
|
||||||
|
function cleanRebuildExtensions(root) {
|
||||||
|
return Promise.all(rebuildExtensions.map(async (e) => {
|
||||||
|
await util2.rimraf(path.join(root, e))();
|
||||||
|
})).then();
|
||||||
|
}
|
||||||
|
exports.cleanRebuildExtensions = cleanRebuildExtensions;
|
||||||
|
function packageRebuildExtensionsStream() {
|
||||||
|
const extenalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||||
|
.map(manifestPath => {
|
||||||
|
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||||
|
const extensionName = path.basename(extensionPath);
|
||||||
|
return { name: extensionName, path: extensionPath };
|
||||||
|
})
|
||||||
|
.filter(({ name }) => rebuildExtensions.indexOf(name) >= 0);
|
||||||
|
const builtExtensions = extenalExtensionDescriptions.map(extension => {
|
||||||
|
return fromLocal(extension.path)
|
||||||
|
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||||
|
});
|
||||||
|
return es.merge(builtExtensions);
|
||||||
|
}
|
||||||
|
exports.packageRebuildExtensionsStream = packageRebuildExtensionsStream;
|
||||||
|
|||||||
@@ -225,9 +225,11 @@ const excludedExtensions = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
const sqlBuiltInExtensions = [
|
const externalExtensions = [
|
||||||
// Add SQL built-in extensions here.
|
// This is the list of SQL extensions which the source code is included in this repository, but
|
||||||
// the extension will be excluded from SQLOps package and will have separate vsix packages
|
// they get packaged separately. Adding extension name here, will make the build to create
|
||||||
|
// a separate vsix package for the extension and the extension will be excluded from the main package.
|
||||||
|
// Any extension not included here will be installed by default.
|
||||||
'admin-tool-ext-win',
|
'admin-tool-ext-win',
|
||||||
'agent',
|
'agent',
|
||||||
'import',
|
'import',
|
||||||
@@ -237,7 +239,14 @@ const sqlBuiltInExtensions = [
|
|||||||
'schema-compare',
|
'schema-compare',
|
||||||
'cms',
|
'cms',
|
||||||
'query-history',
|
'query-history',
|
||||||
'liveshare'
|
'liveshare',
|
||||||
|
'sql-database-projects'
|
||||||
|
];
|
||||||
|
|
||||||
|
// extensions that require a rebuild since they have native parts
|
||||||
|
const rebuildExtensions = [
|
||||||
|
'big-data-cluster',
|
||||||
|
'mssql'
|
||||||
];
|
];
|
||||||
|
|
||||||
interface IBuiltInExtension {
|
interface IBuiltInExtension {
|
||||||
@@ -261,7 +270,7 @@ export function packageLocalExtensionsStream(): NodeJS.ReadWriteStream {
|
|||||||
})
|
})
|
||||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1); // {{SQL CARBON EDIT}} add aditional filter
|
.filter(({ name }) => externalExtensions.indexOf(name) === -1); // {{SQL CARBON EDIT}} Remove external Extensions with separate package
|
||||||
|
|
||||||
const nodeModules = gulp.src('extensions/node_modules/**', { base: '.' });
|
const nodeModules = gulp.src('extensions/node_modules/**', { base: '.' });
|
||||||
const localExtensions = localExtensionDescriptions.map(extension => {
|
const localExtensions = localExtensionDescriptions.map(extension => {
|
||||||
@@ -283,71 +292,43 @@ export function packageMarketplaceExtensionsStream(): NodeJS.ReadWriteStream {
|
|||||||
.pipe(util2.setExecutableBit(['**/*.sh']));
|
.pipe(util2.setExecutableBit(['**/*.sh']));
|
||||||
}
|
}
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
export function packageExternalExtensionsStream(): NodeJS.ReadWriteStream {
|
||||||
import * as _ from 'underscore';
|
const extenalExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
|
||||||
import * as vfs from 'vinyl-fs';
|
|
||||||
|
|
||||||
export function packageBuiltInExtensions() {
|
|
||||||
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
|
||||||
.map(manifestPath => {
|
.map(manifestPath => {
|
||||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||||
const extensionName = path.basename(extensionPath);
|
const extensionName = path.basename(extensionPath);
|
||||||
return { name: extensionName, path: extensionPath };
|
return { name: extensionName, path: extensionPath };
|
||||||
})
|
})
|
||||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
.filter(({ name }) => externalExtensions.indexOf(name) >= 0);
|
||||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
|
||||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
|
const builtExtensions = extenalExtensionDescriptions.map(extension => {
|
||||||
const visxDirectory = path.join(path.dirname(root), 'vsix');
|
return fromLocal(extension.path)
|
||||||
try {
|
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||||
if (!fs.existsSync(visxDirectory)) {
|
|
||||||
fs.mkdirSync(visxDirectory);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
// don't fail the build if the output directory already exists
|
|
||||||
console.warn(err);
|
|
||||||
}
|
|
||||||
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
|
|
||||||
let pkgJson = JSON.parse(fs.readFileSync(path.join(element.path, 'package.json'), { encoding: 'utf8' }));
|
|
||||||
const packagePath = path.join(visxDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
|
|
||||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
|
||||||
vsce.createVSIX({
|
|
||||||
cwd: element.path,
|
|
||||||
packagePath: packagePath,
|
|
||||||
useYarn: true
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
}
|
|
||||||
|
|
||||||
export function packageExtensionTask(extensionName: string, platform: string, arch: string) {
|
return es.merge(builtExtensions);
|
||||||
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
|
||||||
if (platform === 'darwin') {
|
|
||||||
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
|
|
||||||
} else {
|
|
||||||
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
|
|
||||||
}
|
|
||||||
|
|
||||||
platform = platform || process.platform;
|
|
||||||
|
|
||||||
return () => {
|
|
||||||
const root = path.resolve(path.join(__dirname, '../..'));
|
|
||||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
|
||||||
.map(manifestPath => {
|
|
||||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
|
||||||
const extensionName = path.basename(extensionPath);
|
|
||||||
return { name: extensionName, path: extensionPath };
|
|
||||||
})
|
|
||||||
.filter(({ name }) => extensionName === name);
|
|
||||||
|
|
||||||
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
|
||||||
return fromLocal(extension.path);
|
|
||||||
}));
|
|
||||||
|
|
||||||
let result = localExtensions
|
|
||||||
.pipe(util2.skipDirectories())
|
|
||||||
.pipe(util2.fixWin32DirectoryPermissions())
|
|
||||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
|
|
||||||
|
|
||||||
return result.pipe(vfs.dest(destination));
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
// {{SQL CARBON EDIT}} - End
|
// {{SQL CARBON EDIT}} - End
|
||||||
|
|
||||||
|
export function cleanRebuildExtensions(root: string): Promise<void> {
|
||||||
|
return Promise.all(rebuildExtensions.map(async e => {
|
||||||
|
await util2.rimraf(path.join(root, e))();
|
||||||
|
})).then();
|
||||||
|
}
|
||||||
|
|
||||||
|
export function packageRebuildExtensionsStream(): NodeJS.ReadWriteStream {
|
||||||
|
const extenalExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
|
||||||
|
.map(manifestPath => {
|
||||||
|
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||||
|
const extensionName = path.basename(extensionPath);
|
||||||
|
return { name: extensionName, path: extensionPath };
|
||||||
|
})
|
||||||
|
.filter(({ name }) => rebuildExtensions.indexOf(name) >= 0);
|
||||||
|
|
||||||
|
const builtExtensions = extenalExtensionDescriptions.map(extension => {
|
||||||
|
return fromLocal(extension.path)
|
||||||
|
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||||
|
});
|
||||||
|
|
||||||
|
return es.merge(builtExtensions);
|
||||||
|
}
|
||||||
|
|||||||
@@ -42,6 +42,10 @@
|
|||||||
"name": "vs/workbench/contrib/callHierarchy",
|
"name": "vs/workbench/contrib/callHierarchy",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "vs/workbench/contrib/codeActions",
|
||||||
|
"project": "vscode-workbench"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/comments",
|
"name": "vs/workbench/contrib/comments",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
@@ -135,7 +139,7 @@
|
|||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/stats",
|
"name": "vs/workbench/contrib/tags",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -194,6 +198,10 @@
|
|||||||
"name": "vs/workbench/services/actions",
|
"name": "vs/workbench/services/actions",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "vs/workbench/services/authToken",
|
||||||
|
"project": "vscode-workbench"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/services/bulkEdit",
|
"name": "vs/workbench/services/bulkEdit",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
|
|||||||
@@ -43,7 +43,9 @@ function extractEditor(options) {
|
|||||||
compilerOptions.declaration = false;
|
compilerOptions.declaration = false;
|
||||||
compilerOptions.moduleResolution = ts.ModuleResolutionKind.Classic;
|
compilerOptions.moduleResolution = ts.ModuleResolutionKind.Classic;
|
||||||
options.compilerOptions = compilerOptions;
|
options.compilerOptions = compilerOptions;
|
||||||
console.log(`Running with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
|
console.log(`Running tree shaker with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
|
||||||
|
// Take the extra included .d.ts files from `tsconfig.monaco.json`
|
||||||
|
options.typings = tsConfig.include.filter(includedFile => /\.d\.ts$/.test(includedFile));
|
||||||
let result = tss.shake(options);
|
let result = tss.shake(options);
|
||||||
for (let fileName in result) {
|
for (let fileName in result) {
|
||||||
if (result.hasOwnProperty(fileName)) {
|
if (result.hasOwnProperty(fileName)) {
|
||||||
|
|||||||
@@ -50,7 +50,10 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
|
|||||||
|
|
||||||
options.compilerOptions = compilerOptions;
|
options.compilerOptions = compilerOptions;
|
||||||
|
|
||||||
console.log(`Running with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
|
console.log(`Running tree shaker with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
|
||||||
|
|
||||||
|
// Take the extra included .d.ts files from `tsconfig.monaco.json`
|
||||||
|
options.typings = (<string[]>tsConfig.include).filter(includedFile => /\.d\.ts$/.test(includedFile));
|
||||||
|
|
||||||
let result = tss.shake(options);
|
let result = tss.shake(options);
|
||||||
for (let fileName in result) {
|
for (let fileName in result) {
|
||||||
|
|||||||
@@ -116,7 +116,7 @@ function submitAllStats(productJson, commit) {
|
|||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
appInsights.defaultClient.trackEvent({
|
appInsights.defaultClient.trackEvent({
|
||||||
name: 'monacoworkbench/packagemetrics',
|
name: `${productJson.quality !== 'stable' ? 'adsworkbench' : 'monacoworkbench'}/packagemetrics`,
|
||||||
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
|
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
|
||||||
});
|
});
|
||||||
appInsights.defaultClient.flush({
|
appInsights.defaultClient.flush({
|
||||||
|
|||||||
@@ -126,7 +126,7 @@ export function submitAllStats(productJson: any, commit: string): Promise<boolea
|
|||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
appInsights.defaultClient.trackEvent({
|
appInsights.defaultClient.trackEvent({
|
||||||
name: 'monacoworkbench/packagemetrics',
|
name: `${productJson.quality !== 'stable' ? 'adsworkbench' : 'monacoworkbench'}/packagemetrics`, // {{SQL CARBON EDIT}}
|
||||||
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
|
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -25,17 +25,17 @@ function toStringShakeLevel(shakeLevel) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.toStringShakeLevel = toStringShakeLevel;
|
exports.toStringShakeLevel = toStringShakeLevel;
|
||||||
function printDiagnostics(diagnostics) {
|
function printDiagnostics(options, diagnostics) {
|
||||||
for (const diag of diagnostics) {
|
for (const diag of diagnostics) {
|
||||||
let result = '';
|
let result = '';
|
||||||
if (diag.file) {
|
if (diag.file) {
|
||||||
result += `${diag.file.fileName}: `;
|
result += `${path.join(options.sourcesRoot, diag.file.fileName)}`;
|
||||||
}
|
}
|
||||||
if (diag.file && diag.start) {
|
if (diag.file && diag.start) {
|
||||||
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
|
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
|
||||||
result += `- ${location.line + 1},${location.character} - `;
|
result += `:${location.line + 1}:${location.character}`;
|
||||||
}
|
}
|
||||||
result += JSON.stringify(diag.messageText);
|
result += ` - ` + JSON.stringify(diag.messageText);
|
||||||
console.log(result);
|
console.log(result);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -44,17 +44,17 @@ function shake(options) {
|
|||||||
const program = languageService.getProgram();
|
const program = languageService.getProgram();
|
||||||
const globalDiagnostics = program.getGlobalDiagnostics();
|
const globalDiagnostics = program.getGlobalDiagnostics();
|
||||||
if (globalDiagnostics.length > 0) {
|
if (globalDiagnostics.length > 0) {
|
||||||
printDiagnostics(globalDiagnostics);
|
printDiagnostics(options, globalDiagnostics);
|
||||||
throw new Error(`Compilation Errors encountered.`);
|
throw new Error(`Compilation Errors encountered.`);
|
||||||
}
|
}
|
||||||
const syntacticDiagnostics = program.getSyntacticDiagnostics();
|
const syntacticDiagnostics = program.getSyntacticDiagnostics();
|
||||||
if (syntacticDiagnostics.length > 0) {
|
if (syntacticDiagnostics.length > 0) {
|
||||||
printDiagnostics(syntacticDiagnostics);
|
printDiagnostics(options, syntacticDiagnostics);
|
||||||
throw new Error(`Compilation Errors encountered.`);
|
throw new Error(`Compilation Errors encountered.`);
|
||||||
}
|
}
|
||||||
const semanticDiagnostics = program.getSemanticDiagnostics();
|
const semanticDiagnostics = program.getSemanticDiagnostics();
|
||||||
if (semanticDiagnostics.length > 0) {
|
if (semanticDiagnostics.length > 0) {
|
||||||
printDiagnostics(semanticDiagnostics);
|
printDiagnostics(options, semanticDiagnostics);
|
||||||
throw new Error(`Compilation Errors encountered.`);
|
throw new Error(`Compilation Errors encountered.`);
|
||||||
}
|
}
|
||||||
markNodes(languageService, options);
|
markNodes(languageService, options);
|
||||||
@@ -358,7 +358,7 @@ function markNodes(languageService, options) {
|
|||||||
++step;
|
++step;
|
||||||
let node;
|
let node;
|
||||||
if (step % 100 === 0) {
|
if (step % 100 === 0) {
|
||||||
console.log(`${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
|
console.log(`Treeshaking - ${Math.floor(100 * step / (step + black_queue.length + gray_queue.length))}% - ${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
|
||||||
}
|
}
|
||||||
if (black_queue.length === 0) {
|
if (black_queue.length === 0) {
|
||||||
for (let i = 0; i < gray_queue.length; i++) {
|
for (let i = 0; i < gray_queue.length; i++) {
|
||||||
|
|||||||
@@ -71,17 +71,17 @@ export interface ITreeShakingResult {
|
|||||||
[file: string]: string;
|
[file: string]: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
function printDiagnostics(diagnostics: ReadonlyArray<ts.Diagnostic>): void {
|
function printDiagnostics(options: ITreeShakingOptions, diagnostics: ReadonlyArray<ts.Diagnostic>): void {
|
||||||
for (const diag of diagnostics) {
|
for (const diag of diagnostics) {
|
||||||
let result = '';
|
let result = '';
|
||||||
if (diag.file) {
|
if (diag.file) {
|
||||||
result += `${diag.file.fileName}: `;
|
result += `${path.join(options.sourcesRoot, diag.file.fileName)}`;
|
||||||
}
|
}
|
||||||
if (diag.file && diag.start) {
|
if (diag.file && diag.start) {
|
||||||
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
|
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
|
||||||
result += `- ${location.line + 1},${location.character} - `;
|
result += `:${location.line + 1}:${location.character}`;
|
||||||
}
|
}
|
||||||
result += JSON.stringify(diag.messageText);
|
result += ` - ` + JSON.stringify(diag.messageText);
|
||||||
console.log(result);
|
console.log(result);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -92,19 +92,19 @@ export function shake(options: ITreeShakingOptions): ITreeShakingResult {
|
|||||||
|
|
||||||
const globalDiagnostics = program.getGlobalDiagnostics();
|
const globalDiagnostics = program.getGlobalDiagnostics();
|
||||||
if (globalDiagnostics.length > 0) {
|
if (globalDiagnostics.length > 0) {
|
||||||
printDiagnostics(globalDiagnostics);
|
printDiagnostics(options, globalDiagnostics);
|
||||||
throw new Error(`Compilation Errors encountered.`);
|
throw new Error(`Compilation Errors encountered.`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const syntacticDiagnostics = program.getSyntacticDiagnostics();
|
const syntacticDiagnostics = program.getSyntacticDiagnostics();
|
||||||
if (syntacticDiagnostics.length > 0) {
|
if (syntacticDiagnostics.length > 0) {
|
||||||
printDiagnostics(syntacticDiagnostics);
|
printDiagnostics(options, syntacticDiagnostics);
|
||||||
throw new Error(`Compilation Errors encountered.`);
|
throw new Error(`Compilation Errors encountered.`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const semanticDiagnostics = program.getSemanticDiagnostics();
|
const semanticDiagnostics = program.getSemanticDiagnostics();
|
||||||
if (semanticDiagnostics.length > 0) {
|
if (semanticDiagnostics.length > 0) {
|
||||||
printDiagnostics(semanticDiagnostics);
|
printDiagnostics(options, semanticDiagnostics);
|
||||||
throw new Error(`Compilation Errors encountered.`);
|
throw new Error(`Compilation Errors encountered.`);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -471,7 +471,7 @@ function markNodes(languageService: ts.LanguageService, options: ITreeShakingOpt
|
|||||||
let node: ts.Node;
|
let node: ts.Node;
|
||||||
|
|
||||||
if (step % 100 === 0) {
|
if (step % 100 === 0) {
|
||||||
console.log(`${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
|
console.log(`Treeshaking - ${Math.floor(100 * step / (step + black_queue.length + gray_queue.length))}% - ${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (black_queue.length === 0) {
|
if (black_queue.length === 0) {
|
||||||
|
|||||||
@@ -185,6 +185,31 @@ function rimraf(dir) {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
exports.rimraf = rimraf;
|
exports.rimraf = rimraf;
|
||||||
|
function _rreaddir(dirPath, prepend, result) {
|
||||||
|
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (entry.isDirectory()) {
|
||||||
|
_rreaddir(path.join(dirPath, entry.name), `${prepend}/${entry.name}`, result);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
result.push(`${prepend}/${entry.name}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function rreddir(dirPath) {
|
||||||
|
let result = [];
|
||||||
|
_rreaddir(dirPath, '', result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
exports.rreddir = rreddir;
|
||||||
|
function ensureDir(dirPath) {
|
||||||
|
if (fs.existsSync(dirPath)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
ensureDir(path.dirname(dirPath));
|
||||||
|
fs.mkdirSync(dirPath);
|
||||||
|
}
|
||||||
|
exports.ensureDir = ensureDir;
|
||||||
function getVersion(root) {
|
function getVersion(root) {
|
||||||
let version = process.env['BUILD_SOURCEVERSION'];
|
let version = process.env['BUILD_SOURCEVERSION'];
|
||||||
if (!version || !/^[0-9a-f]{40}$/i.test(version)) {
|
if (!version || !/^[0-9a-f]{40}$/i.test(version)) {
|
||||||
|
|||||||
@@ -243,6 +243,31 @@ export function rimraf(dir: string): () => Promise<void> {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function _rreaddir(dirPath: string, prepend: string, result: string[]): void {
|
||||||
|
const entries = fs.readdirSync(dirPath, { withFileTypes: true });
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (entry.isDirectory()) {
|
||||||
|
_rreaddir(path.join(dirPath, entry.name), `${prepend}/${entry.name}`, result);
|
||||||
|
} else {
|
||||||
|
result.push(`${prepend}/${entry.name}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function rreddir(dirPath: string): string[] {
|
||||||
|
let result: string[] = [];
|
||||||
|
_rreaddir(dirPath, '', result);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function ensureDir(dirPath: string): void {
|
||||||
|
if (fs.existsSync(dirPath)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
ensureDir(path.dirname(dirPath));
|
||||||
|
fs.mkdirSync(dirPath);
|
||||||
|
}
|
||||||
|
|
||||||
export function getVersion(root: string): string | undefined {
|
export function getVersion(root: string): string | undefined {
|
||||||
let version = process.env['BUILD_SOURCEVERSION'];
|
let version = process.env['BUILD_SOURCEVERSION'];
|
||||||
|
|
||||||
|
|||||||
@@ -3,14 +3,7 @@
|
|||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
*--------------------------------------------------------------------------------------------*/
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
const es = require('event-stream');
|
const watch = process.platform === 'win32' ? require('./watch-win32') : require('vscode-gulp-watch');
|
||||||
|
|
||||||
|
|
||||||
let watch = undefined;
|
|
||||||
|
|
||||||
if (!watch) {
|
|
||||||
watch = process.platform === 'win32' ? require('./watch-win32') : require('gulp-watch');
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = function () {
|
module.exports = function () {
|
||||||
return watch.apply(null, arguments);
|
return watch.apply(null, arguments);
|
||||||
|
|||||||
@@ -5,7 +5,8 @@
|
|||||||
"author": "Microsoft ",
|
"author": "Microsoft ",
|
||||||
"private": true,
|
"private": true,
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"devDependencies": {
|
"devDependencies": {},
|
||||||
"gulp-watch": "5.0.1"
|
"dependencies": {
|
||||||
|
"vscode-gulp-watch": "^5.0.2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
@@ -43,7 +43,7 @@ declare namespace monaco {
|
|||||||
}
|
}
|
||||||
|
|
||||||
declare namespace monaco.editor {
|
declare namespace monaco.editor {
|
||||||
|
#include(vs/editor/browser/widget/diffNavigator): IDiffNavigator
|
||||||
#includeAll(vs/editor/standalone/browser/standaloneEditor;modes.=>languages.;editorCommon.=>):
|
#includeAll(vs/editor/standalone/browser/standaloneEditor;modes.=>languages.;editorCommon.=>):
|
||||||
#include(vs/editor/standalone/common/standaloneThemeService): BuiltinTheme, IStandaloneThemeData, IColors
|
#include(vs/editor/standalone/common/standaloneThemeService): BuiltinTheme, IStandaloneThemeData, IColors
|
||||||
#include(vs/editor/common/modes/supports/tokenization): ITokenThemeRule
|
#include(vs/editor/common/modes/supports/tokenization): ITokenThemeRule
|
||||||
|
|||||||
@@ -2,38 +2,17 @@
|
|||||||
// This file is adding references to various symbols which should not be removed via tree shaking
|
// This file is adding references to various symbols which should not be removed via tree shaking
|
||||||
|
|
||||||
import { ServiceIdentifier } from './vs/platform/instantiation/common/instantiation';
|
import { ServiceIdentifier } from './vs/platform/instantiation/common/instantiation';
|
||||||
import { IContextViewService } from './vs/platform/contextview/browser/contextView';
|
import { create as create1 } from './vs/base/common/worker/simpleWorker';
|
||||||
import { IHighlight } from './vs/base/parts/quickopen/browser/quickOpenModel';
|
|
||||||
import { IWorkspaceContextService } from './vs/platform/workspace/common/workspace';
|
|
||||||
import { IEnvironmentService } from './vs/platform/environment/common/environment';
|
|
||||||
import { CountBadge } from './vs/base/browser/ui/countBadge/countBadge';
|
|
||||||
import { SimpleWorkerClient, create as create1 } from './vs/base/common/worker/simpleWorker';
|
|
||||||
import { create as create2 } from './vs/editor/common/services/editorSimpleWorker';
|
import { create as create2 } from './vs/editor/common/services/editorSimpleWorker';
|
||||||
import { QuickOpenWidget } from './vs/base/parts/quickopen/browser/quickOpenWidget';
|
|
||||||
import { WorkbenchAsyncDataTree } from './vs/platform/list/browser/listService';
|
|
||||||
import { SyncDescriptor0, SyncDescriptor1, SyncDescriptor2, SyncDescriptor3, SyncDescriptor4, SyncDescriptor5, SyncDescriptor6, SyncDescriptor7, SyncDescriptor8 } from './vs/platform/instantiation/common/descriptors';
|
import { SyncDescriptor0, SyncDescriptor1, SyncDescriptor2, SyncDescriptor3, SyncDescriptor4, SyncDescriptor5, SyncDescriptor6, SyncDescriptor7, SyncDescriptor8 } from './vs/platform/instantiation/common/descriptors';
|
||||||
import { DiffNavigator } from './vs/editor/browser/widget/diffNavigator';
|
|
||||||
import { DocumentRangeFormattingEditProvider } from './vs/editor/common/modes';
|
|
||||||
import * as editorAPI from './vs/editor/editor.api';
|
import * as editorAPI from './vs/editor/editor.api';
|
||||||
|
|
||||||
(function () {
|
(function () {
|
||||||
var a: any;
|
var a: any;
|
||||||
var b: any;
|
var b: any;
|
||||||
a = (<IContextViewService>b).layout; // IContextViewProvider
|
|
||||||
a = (<IWorkspaceContextService>b).getWorkspaceFolder; // IWorkspaceFolderProvider
|
|
||||||
a = (<IWorkspaceContextService>b).getWorkspace; // IWorkspaceFolderProvider
|
|
||||||
a = (<CountBadge>b).style; // IThemable
|
|
||||||
a = (<QuickOpenWidget>b).style; // IThemable
|
|
||||||
a = (<WorkbenchAsyncDataTree<any,any>>b).style; // IThemable
|
|
||||||
a = (<IEnvironmentService>b).userHome; // IUserHomeProvider
|
|
||||||
a = (<DiffNavigator>b).previous; // IDiffNavigator
|
|
||||||
a = (<ServiceIdentifier<any>>b).type;
|
a = (<ServiceIdentifier<any>>b).type;
|
||||||
a = (<IHighlight>b).start;
|
|
||||||
a = (<IHighlight>b).end;
|
|
||||||
a = (<SimpleWorkerClient<any, any>>b).getProxyObject; // IWorkerClient
|
|
||||||
a = create1;
|
a = create1;
|
||||||
a = create2;
|
a = create2;
|
||||||
a = (<DocumentRangeFormattingEditProvider>b).extensionId;
|
|
||||||
|
|
||||||
// injection madness
|
// injection madness
|
||||||
a = (<SyncDescriptor0<any>>b).ctor;
|
a = (<SyncDescriptor0<any>>b).ctor;
|
||||||
|
|||||||
@@ -73,48 +73,3 @@ yarnInstall(`build`); // node modules required for build
|
|||||||
yarnInstall('test/automation'); // node modules required for smoketest
|
yarnInstall('test/automation'); // node modules required for smoketest
|
||||||
yarnInstall('test/smoke'); // node modules required for smoketest
|
yarnInstall('test/smoke'); // node modules required for smoketest
|
||||||
yarnInstallBuildDependencies(); // node modules for watching, specific to host node version, not electron
|
yarnInstallBuildDependencies(); // node modules for watching, specific to host node version, not electron
|
||||||
|
|
||||||
// Remove the windows process tree typings as this causes duplicate identifier errors in tsc builds
|
|
||||||
const processTreeDts = path.join('node_modules', 'windows-process-tree', 'typings', 'windows-process-tree.d.ts');
|
|
||||||
if (fs.existsSync(processTreeDts)) {
|
|
||||||
console.log('Removing windows-process-tree.d.ts');
|
|
||||||
fs.unlinkSync(processTreeDts);
|
|
||||||
}
|
|
||||||
|
|
||||||
function getInstalledVersion(packageName, cwd) {
|
|
||||||
const opts = {};
|
|
||||||
if (cwd) {
|
|
||||||
opts.cwd = cwd;
|
|
||||||
}
|
|
||||||
|
|
||||||
const result = cp.spawnSync(yarn, ['list', '--pattern', packageName], opts);
|
|
||||||
const stdout = result.stdout.toString();
|
|
||||||
const match = stdout.match(new RegExp(packageName + '@(\\S+)'));
|
|
||||||
if (!match || !match[1]) {
|
|
||||||
throw new Error('Unexpected output from yarn list: ' + stdout);
|
|
||||||
}
|
|
||||||
|
|
||||||
return match[1];
|
|
||||||
}
|
|
||||||
|
|
||||||
function assertSameVersionsBetweenFolders(packageName, otherFolder) {
|
|
||||||
const baseVersion = getInstalledVersion(packageName);
|
|
||||||
const otherVersion = getInstalledVersion(packageName, otherFolder);
|
|
||||||
|
|
||||||
if (baseVersion !== otherVersion) {
|
|
||||||
throw new Error(`Mismatched versions installed for ${packageName}: root has ${baseVersion}, ./${otherFolder} has ${otherVersion}. These should be the same!`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check that modules in both the base package.json and remote/ have the same version installed
|
|
||||||
const requireSameVersionsInRemote = [
|
|
||||||
'xterm',
|
|
||||||
'xterm-addon-search',
|
|
||||||
'xterm-addon-web-links',
|
|
||||||
'node-pty',
|
|
||||||
'vscode-ripgrep'
|
|
||||||
];
|
|
||||||
|
|
||||||
requireSameVersionsInRemote.forEach(packageName => {
|
|
||||||
assertSameVersionsBetweenFolders(packageName, 'remote');
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -7,8 +7,8 @@ let err = false;
|
|||||||
|
|
||||||
const majorNodeVersion = parseInt(/^(\d+)\./.exec(process.versions.node)[1]);
|
const majorNodeVersion = parseInt(/^(\d+)\./.exec(process.versions.node)[1]);
|
||||||
|
|
||||||
if (majorNodeVersion < 8 || majorNodeVersion >= 11) {
|
if (majorNodeVersion < 10 || majorNodeVersion >= 13) {
|
||||||
console.error('\033[1;31m*** Please use node >=8 and <11.\033[0;0m');
|
console.error('\033[1;31m*** Please use node >=10 and <=12.\033[0;0m');
|
||||||
err = true;
|
err = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -8,11 +8,11 @@ const path = require('path');
|
|||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
|
|
||||||
const rootPath = path.dirname(path.dirname(path.dirname(__dirname)));
|
const rootPath = path.dirname(path.dirname(path.dirname(__dirname)));
|
||||||
const vscodePath = path.join(rootPath, 'vscode');
|
const vscodePath = path.join(rootPath, 'azuredatastudio'); // {{SQL CARBON EDIT}} replace vscode
|
||||||
const distroPath = path.join(rootPath, 'vscode-distro');
|
const distroPath = path.join(rootPath, 'azuredatastudio-release'); // {{SQL CARBON EDIT}} replace vscode
|
||||||
const commit = cp.execSync('git rev-parse HEAD', { cwd: distroPath, encoding: 'utf8' }).trim();
|
const commit = cp.execSync('git rev-parse HEAD', { cwd: distroPath, encoding: 'utf8' }).trim();
|
||||||
const packageJsonPath = path.join(vscodePath, 'package.json');
|
const packageJsonPath = path.join(vscodePath, 'package.json');
|
||||||
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
|
const packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
|
||||||
|
|
||||||
packageJson.distro = commit;
|
packageJson.distro = commit;
|
||||||
fs.writeFileSync(packageJsonPath, JSON.stringify(packageJson, null, 2));
|
fs.writeFileSync(packageJsonPath, JSON.stringify(packageJson, null, 2));
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
"@types/ansi-colors": "^3.2.0",
|
"@types/ansi-colors": "^3.2.0",
|
||||||
"@types/azure": "0.9.19",
|
"@types/azure": "0.9.19",
|
||||||
"@types/debounce": "^1.0.0",
|
"@types/debounce": "^1.0.0",
|
||||||
"@types/documentdb": "1.10.2",
|
"@types/documentdb": "^1.10.5",
|
||||||
"@types/fancy-log": "^1.3.0",
|
"@types/fancy-log": "^1.3.0",
|
||||||
"@types/glob": "^7.1.1",
|
"@types/glob": "^7.1.1",
|
||||||
"@types/gulp": "^4.0.5",
|
"@types/gulp": "^4.0.5",
|
||||||
@@ -44,10 +44,10 @@
|
|||||||
"rollup": "^1.20.3",
|
"rollup": "^1.20.3",
|
||||||
"rollup-plugin-commonjs": "^10.1.0",
|
"rollup-plugin-commonjs": "^10.1.0",
|
||||||
"rollup-plugin-node-resolve": "^5.2.0",
|
"rollup-plugin-node-resolve": "^5.2.0",
|
||||||
|
"service-downloader": "github:anthonydresser/service-downloader#0.1.7",
|
||||||
"terser": "4.3.8",
|
"terser": "4.3.8",
|
||||||
"tslint": "^5.9.1",
|
"tslint": "^5.9.1",
|
||||||
"service-downloader": "github:anthonydresser/service-downloader#0.1.7",
|
"typescript": "3.7.3",
|
||||||
"typescript": "3.7.0-dev.20191017",
|
|
||||||
"vsce": "1.48.0",
|
"vsce": "1.48.0",
|
||||||
"vscode-telemetry-extractor": "^1.5.4",
|
"vscode-telemetry-extractor": "^1.5.4",
|
||||||
"xml2js": "^0.4.17"
|
"xml2js": "^0.4.17"
|
||||||
@@ -57,5 +57,8 @@
|
|||||||
"watch": "tsc -p tsconfig.build.json --watch",
|
"watch": "tsc -p tsconfig.build.json --watch",
|
||||||
"postinstall": "npm run compile",
|
"postinstall": "npm run compile",
|
||||||
"npmCheckJs": "tsc --noEmit"
|
"npmCheckJs": "tsc --noEmit"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@azure/cosmos": "^3.4.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,4 +5,5 @@ AssociateWithFiles=%1 als Editor f
|
|||||||
AddToPath=Zu PATH hinzufügen (nach dem Neustart verfügbar)
|
AddToPath=Zu PATH hinzufügen (nach dem Neustart verfügbar)
|
||||||
RunAfter=%1 nach der Installation ausführen
|
RunAfter=%1 nach der Installation ausführen
|
||||||
Other=Andere:
|
Other=Andere:
|
||||||
SourceFile=%1-Quelldatei
|
SourceFile=%1-Quelldatei
|
||||||
|
OpenWithCodeContextMenu=Mit %1 öffnen
|
||||||
@@ -5,4 +5,5 @@ AssociateWithFiles=Register %1 as an editor for supported file types
|
|||||||
AddToPath=Add to PATH (requires shell restart)
|
AddToPath=Add to PATH (requires shell restart)
|
||||||
RunAfter=Run %1 after installation
|
RunAfter=Run %1 after installation
|
||||||
Other=Other:
|
Other=Other:
|
||||||
SourceFile=%1 Source File
|
SourceFile=%1 Source File
|
||||||
|
OpenWithCodeContextMenu=Open with %1
|
||||||
@@ -5,4 +5,5 @@ AssociateWithFiles=Registrar %1 como editor para tipos de archivo admitidos
|
|||||||
AddToPath=Agregar a PATH (disponible después de reiniciar)
|
AddToPath=Agregar a PATH (disponible después de reiniciar)
|
||||||
RunAfter=Ejecutar %1 después de la instalación
|
RunAfter=Ejecutar %1 después de la instalación
|
||||||
Other=Otros:
|
Other=Otros:
|
||||||
SourceFile=Archivo de origen %1
|
SourceFile=Archivo de origen %1
|
||||||
|
OpenWithCodeContextMenu=Abrir con %1
|
||||||
@@ -5,4 +5,5 @@ AssociateWithFiles=Inscrire %1 en tant qu'
|
|||||||
AddToPath=Ajouter à PATH (disponible après le redémarrage)
|
AddToPath=Ajouter à PATH (disponible après le redémarrage)
|
||||||
RunAfter=Exécuter %1 après l'installation
|
RunAfter=Exécuter %1 après l'installation
|
||||||
Other=Autre :
|
Other=Autre :
|
||||||
SourceFile=Fichier source %1
|
SourceFile=Fichier source %1
|
||||||
|
OpenWithCodeContextMenu=Ouvrir avec %1
|
||||||
@@ -5,4 +5,5 @@ AssociateWithFiles=%1 regisztr
|
|||||||
AddToPath=Hozzáadás a PATH-hoz (újraindítás után lesz elérhető)
|
AddToPath=Hozzáadás a PATH-hoz (újraindítás után lesz elérhető)
|
||||||
RunAfter=%1 indítása a telepítés után
|
RunAfter=%1 indítása a telepítés után
|
||||||
Other=Egyéb:
|
Other=Egyéb:
|
||||||
SourceFile=%1 forrásfájl
|
SourceFile=%1 forrásfájl
|
||||||
|
OpenWithCodeContextMenu=Megnyitás a következővel: %1
|
||||||
@@ -5,4 +5,5 @@ AssociateWithFiles=Registra %1 come editor per i tipi di file supportati
|
|||||||
AddToPath=Aggiungi a PATH (disponibile dopo il riavvio)
|
AddToPath=Aggiungi a PATH (disponibile dopo il riavvio)
|
||||||
RunAfter=Esegui %1 dopo l'installazione
|
RunAfter=Esegui %1 dopo l'installazione
|
||||||
Other=Altro:
|
Other=Altro:
|
||||||
SourceFile=File di origine %1
|
SourceFile=File di origine %1
|
||||||
|
OpenWithCodeContextMenu=Apri con %1
|
||||||
@@ -5,4 +5,5 @@ AssociateWithFiles=
|
|||||||
AddToPath=PATH への追加(再起動後に使用可能)
|
AddToPath=PATH への追加(再起動後に使用可能)
|
||||||
RunAfter=インストール後に %1 を実行する
|
RunAfter=インストール後に %1 を実行する
|
||||||
Other=その他:
|
Other=その他:
|
||||||
SourceFile=%1 ソース ファイル
|
SourceFile=%1 ソース ファイル
|
||||||
|
OpenWithCodeContextMenu=%1 で開く
|
||||||
@@ -5,4 +5,5 @@ AssociateWithFiles=%1
|
|||||||
AddToPath=PATH에 추가(다시 시작한 후 사용 가능)
|
AddToPath=PATH에 추가(다시 시작한 후 사용 가능)
|
||||||
RunAfter=설치 후 %1 실행
|
RunAfter=설치 후 %1 실행
|
||||||
Other=기타:
|
Other=기타:
|
||||||
SourceFile=%1 원본 파일
|
SourceFile=%1 원본 파일
|
||||||
|
OpenWithCodeContextMenu=%1(으)로 열기
|
||||||
@@ -5,4 +5,5 @@ AssociateWithFiles=Registre %1 como um editor para tipos de arquivos suportados
|
|||||||
AddToPath=Adicione em PATH (disponível após reiniciar)
|
AddToPath=Adicione em PATH (disponível após reiniciar)
|
||||||
RunAfter=Executar %1 após a instalação
|
RunAfter=Executar %1 após a instalação
|
||||||
Other=Outros:
|
Other=Outros:
|
||||||
SourceFile=Arquivo Fonte %1
|
SourceFile=Arquivo Fonte %1
|
||||||
|
OpenWithCodeContextMenu=Abrir com %1
|
||||||
@@ -5,4 +5,5 @@ AssociateWithFiles=
|
|||||||
AddToPath=Добавить в PATH (доступно после перезагрузки)
|
AddToPath=Добавить в PATH (доступно после перезагрузки)
|
||||||
RunAfter=Запустить %1 после установки
|
RunAfter=Запустить %1 после установки
|
||||||
Other=Другое:
|
Other=Другое:
|
||||||
SourceFile=Исходный файл %1
|
SourceFile=Исходный файл %1
|
||||||
|
OpenWithCodeContextMenu=Открыть с помощью %1
|
||||||
@@ -5,4 +5,5 @@ AssociateWithFiles=%1 uygulamas
|
|||||||
AddToPath=PATH'e ekle (yeniden başlattıktan sonra kullanılabilir)
|
AddToPath=PATH'e ekle (yeniden başlattıktan sonra kullanılabilir)
|
||||||
RunAfter=Kurulumdan sonra %1 uygulamasını çalıştır.
|
RunAfter=Kurulumdan sonra %1 uygulamasını çalıştır.
|
||||||
Other=Diğer:
|
Other=Diğer:
|
||||||
SourceFile=%1 Kaynak Dosyası
|
SourceFile=%1 Kaynak Dosyası
|
||||||
|
OpenWithCodeContextMenu=%1 İle Aç
|
||||||
@@ -5,4 +5,5 @@ AssociateWithFiles=
|
|||||||
AddToPath=添加到 PATH (重启后生效)
|
AddToPath=添加到 PATH (重启后生效)
|
||||||
RunAfter=安装后运行 %1
|
RunAfter=安装后运行 %1
|
||||||
Other=其他:
|
Other=其他:
|
||||||
SourceFile=%1 源文件
|
SourceFile=%1 源文件
|
||||||
|
OpenWithCodeContextMenu=通过 %1 打开
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user