mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-17 11:01:37 -05:00
Compare commits
654 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f9b968c1ae | ||
|
|
e3f6feb135 | ||
|
|
7b0be2e773 | ||
|
|
9fec7edba7 | ||
|
|
27468f75a5 | ||
|
|
79ad848216 | ||
|
|
1a2c6f1578 | ||
|
|
17e4d3bfa3 | ||
|
|
2599fb1252 | ||
|
|
26d59b528e | ||
|
|
093f44a1e7 | ||
|
|
290f43dbd7 | ||
|
|
fb2486a54b | ||
|
|
7e02c16fd7 | ||
|
|
6638db1f35 | ||
|
|
f0dde491be | ||
|
|
994a2382ad | ||
|
|
856fec4243 | ||
|
|
d1c594cfd0 | ||
|
|
86da9852ca | ||
|
|
4ba6a979ba | ||
|
|
82974a2135 | ||
|
|
585c18ef4d | ||
|
|
7ec516d851 | ||
|
|
7e970d04ca | ||
|
|
808ce4366d | ||
|
|
8271226487 | ||
|
|
bbb7a67bd2 | ||
|
|
698b4fce41 | ||
|
|
302e8305ef | ||
|
|
659f392196 | ||
|
|
775a25d944 | ||
|
|
bceb766d28 | ||
|
|
36fd618ed4 | ||
|
|
99c473cdf6 | ||
|
|
88b55d0e06 | ||
|
|
184d4bbe27 | ||
|
|
4fc6f4a13e | ||
|
|
adad11c725 | ||
|
|
a1b5af0445 | ||
|
|
db4f512991 | ||
|
|
42ff30515c | ||
|
|
62565e0577 | ||
|
|
de177c0335 | ||
|
|
d614116b63 | ||
|
|
a7ff238653 | ||
|
|
6fb120f5dd | ||
|
|
6e8cc3aaca | ||
|
|
18ab73cc1d | ||
|
|
d1c7370f1c | ||
|
|
794f7a14c0 | ||
|
|
7cd2a6d6aa | ||
|
|
febf6b9e70 | ||
|
|
7201025a15 | ||
|
|
4787d7ba5c | ||
|
|
3de95af25c | ||
|
|
0bf4790a64 | ||
|
|
0d9353d99e | ||
|
|
a898c46e74 | ||
|
|
493e7087cf | ||
|
|
f5ce7fb2a5 | ||
|
|
a8818ab0df | ||
|
|
9691fab917 | ||
|
|
a7f5741608 | ||
|
|
c34869c243 | ||
|
|
ab94b9785e | ||
|
|
ee94524ab1 | ||
|
|
6cce532ca4 | ||
|
|
7f16a4d857 | ||
|
|
255ea1945b | ||
|
|
b38b53b658 | ||
|
|
82c60a23c0 | ||
|
|
c93ea20b75 | ||
|
|
6a4e4fc07b | ||
|
|
d358cdac1e | ||
|
|
4f8ced1f6b | ||
|
|
8cc60fde90 | ||
|
|
b8bc629970 | ||
|
|
9a83dfc022 | ||
|
|
d5e26527a6 | ||
|
|
c9226a07c5 | ||
|
|
d451528b36 | ||
|
|
48b2cbb0bf | ||
|
|
39e6b9933d | ||
|
|
d08fb1aee2 | ||
|
|
5235a1d029 | ||
|
|
3135b8525b | ||
|
|
0e9797c394 | ||
|
|
4145ecfb32 | ||
|
|
20e9b329b1 | ||
|
|
d1ccbf028f | ||
|
|
a1fc621e1b | ||
|
|
9ef6bec960 | ||
|
|
b631530753 | ||
|
|
d9997cebfc | ||
|
|
ffee69a765 | ||
|
|
7a38943412 | ||
|
|
c970173fc0 | ||
|
|
0979ce8de6 | ||
|
|
878bcc0d92 | ||
|
|
68b2f1a8e4 | ||
|
|
8cd06f74b9 | ||
|
|
43387f0d0b | ||
|
|
f3a6fc6f88 | ||
|
|
f2bc367e78 | ||
|
|
46a8410fc5 | ||
|
|
be7c26ede5 | ||
|
|
fb3b7be9e5 | ||
|
|
78731e0c8c | ||
|
|
6b67f27cac | ||
|
|
56182a53d1 | ||
|
|
66e1c01793 | ||
|
|
f0039a64a7 | ||
|
|
22501a09a1 | ||
|
|
c03cce7f60 | ||
|
|
98abf4a758 | ||
|
|
c19bc54877 | ||
|
|
e17d4e96ae | ||
|
|
b333788c3c | ||
|
|
632ca0685e | ||
|
|
52de2b4751 | ||
|
|
fc0c05c755 | ||
|
|
22b8ebd281 | ||
|
|
ec91d3eda0 | ||
|
|
7b31ee27d8 | ||
|
|
927120fa3b | ||
|
|
f1ca2a35ef | ||
|
|
1760af13d1 | ||
|
|
183cb84fbc | ||
|
|
019d5088ec | ||
|
|
15913e5e48 | ||
|
|
e6ffb97a7b | ||
|
|
8655044dfb | ||
|
|
f26c790736 | ||
|
|
7e553031ce | ||
|
|
164ec41fb1 | ||
|
|
8cd9097526 | ||
|
|
134b0b32c6 | ||
|
|
de4b7af1ad | ||
|
|
f976fc9418 | ||
|
|
55059907a3 | ||
|
|
8ca0082ec4 | ||
|
|
5b50696a1b | ||
|
|
840683e3f0 | ||
|
|
ee5dbdffb9 | ||
|
|
43f6a5576d | ||
|
|
8a44de27e7 | ||
|
|
fa9bbd4e1e | ||
|
|
66048f1d63 | ||
|
|
bafd9fd437 | ||
|
|
dae71c3bf4 | ||
|
|
ae8304fc33 | ||
|
|
d6ef42c8b0 | ||
|
|
131b0b93bf | ||
|
|
82185f75d7 | ||
|
|
3769b5066f | ||
|
|
ba8c331356 | ||
|
|
80248846bb | ||
|
|
cf5297958a | ||
|
|
22996cbce7 | ||
|
|
7563416754 | ||
|
|
02b1673c71 | ||
|
|
6ef87d7067 | ||
|
|
e3921c6d14 | ||
|
|
f9ef9d85f4 | ||
|
|
7a2c30e159 | ||
|
|
6438967202 | ||
|
|
63bf82ad84 | ||
|
|
18ab2ae799 | ||
|
|
86d6295bf0 | ||
|
|
3f306d2396 | ||
|
|
2f1f5b2376 | ||
|
|
62d7c71093 | ||
|
|
4f69ed5745 | ||
|
|
ddddf3beb4 | ||
|
|
0ae525cbd5 | ||
|
|
0520870754 | ||
|
|
8b17b77010 | ||
|
|
e2ef1f8a89 | ||
|
|
7f7052ad42 | ||
|
|
738ca479e4 | ||
|
|
34a274a7d1 | ||
|
|
b1496aa12f | ||
|
|
30acba7921 | ||
|
|
b5c0c37a23 | ||
|
|
ef0a92d83f | ||
|
|
b364e32beb | ||
|
|
7f51921176 | ||
|
|
efebd681b6 | ||
|
|
d635390b33 | ||
|
|
61f0d614ce | ||
|
|
27b80804f5 | ||
|
|
df0c505452 | ||
|
|
564f78b6f6 | ||
|
|
df6b6ded33 | ||
|
|
e801a04bcf | ||
|
|
3b1eaca58e | ||
|
|
22a427f934 | ||
|
|
4645a8ba6b | ||
|
|
1b88c10197 | ||
|
|
5a392dfd58 | ||
|
|
e49ff93122 | ||
|
|
399788ccc1 | ||
|
|
08fde8719d | ||
|
|
f7bef3f87b | ||
|
|
c70e7794eb | ||
|
|
9eb438bb24 | ||
|
|
38decaea90 | ||
|
|
ade68b184d | ||
|
|
3c702c15e2 | ||
|
|
76e8805a6b | ||
|
|
5dc7049f8c | ||
|
|
97f852c3d6 | ||
|
|
a8eed6114b | ||
|
|
6864d39f85 | ||
|
|
6f47c1fcda | ||
|
|
c2c64293f5 | ||
|
|
5bbc17be5c | ||
|
|
abbb1e54da | ||
|
|
08d81927b4 | ||
|
|
3d718068d1 | ||
|
|
1d31a6ef98 | ||
|
|
9c8f36e463 | ||
|
|
bd988f62a2 | ||
|
|
29a46b9f8b | ||
|
|
55acb36e33 | ||
|
|
a8b442a274 | ||
|
|
330f690628 | ||
|
|
7cc430d199 | ||
|
|
2558d6bff6 | ||
|
|
5aff7ef4c7 | ||
|
|
7569f7fa32 | ||
|
|
333f634e94 | ||
|
|
b62c0cf2ab | ||
|
|
572a83dac7 | ||
|
|
dbf834c00f | ||
|
|
24a6897836 | ||
|
|
23da6155dd | ||
|
|
ef9321ef2c | ||
|
|
836bf1d28a | ||
|
|
9eb319f392 | ||
|
|
79b6a14d64 | ||
|
|
0dec2ff9b5 | ||
|
|
dd270a78fc | ||
|
|
827e6162c7 | ||
|
|
9f54fbc8cc | ||
|
|
f8858a3511 | ||
|
|
82e5221024 | ||
|
|
004297aea6 | ||
|
|
f7b8a019cd | ||
|
|
a89788a020 | ||
|
|
62af81e88c | ||
|
|
ab736466cd | ||
|
|
6a6f30523c | ||
|
|
8e3fa0a26d | ||
|
|
72c088e137 | ||
|
|
ce5eb00177 | ||
|
|
5629356c66 | ||
|
|
6e7311ca87 | ||
|
|
d315ccff68 | ||
|
|
789ee4b133 | ||
|
|
428745e929 | ||
|
|
cea8d62051 | ||
|
|
fa79e5b016 | ||
|
|
15fd37e049 | ||
|
|
1dd4ea19a3 | ||
|
|
067af76904 | ||
|
|
a7f597c943 | ||
|
|
833adf3515 | ||
|
|
bd15a96b83 | ||
|
|
024bd00d93 | ||
|
|
eb1aafa639 | ||
|
|
bf6b68c614 | ||
|
|
a895f53029 | ||
|
|
78d3b9d555 | ||
|
|
42e1b28130 | ||
|
|
5590d60c5a | ||
|
|
d79423c728 | ||
|
|
d1a0ae43c8 | ||
|
|
e4b0371b2a | ||
|
|
e191556d3b | ||
|
|
5a269fc49a | ||
|
|
613cd58aa3 | ||
|
|
af9984f73b | ||
|
|
3b1c9e910d | ||
|
|
5b29aef5f3 | ||
|
|
b65a7795df | ||
|
|
c6a78456b8 | ||
|
|
f8067ffada | ||
|
|
f7059a2365 | ||
|
|
d013b594b1 | ||
|
|
c5d427ebb1 | ||
|
|
240b90610f | ||
|
|
5cfad825fc | ||
|
|
8918d1593c | ||
|
|
b1e0b7c1e3 | ||
|
|
3a5e4cbeac | ||
|
|
7bfa6e611e | ||
|
|
004c177f7b | ||
|
|
86cc3f77ee | ||
|
|
a33820ecdd | ||
|
|
7babd6f3d0 | ||
|
|
e28ecf44cb | ||
|
|
93685d3a09 | ||
|
|
d660405e73 | ||
|
|
684fb2566b | ||
|
|
696f6841cb | ||
|
|
cef60f3ae5 | ||
|
|
34fe2b44cc | ||
|
|
a16bfbfedd | ||
|
|
fb4fccf2d5 | ||
|
|
b53cad78bd | ||
|
|
a431ca7ef2 | ||
|
|
c2022cac57 | ||
|
|
806d807eae | ||
|
|
24e3b1c5e6 | ||
|
|
bbe3605317 | ||
|
|
06d67f5ad2 | ||
|
|
41f9f22e38 | ||
|
|
a94cbb528e | ||
|
|
4a68ab4659 | ||
|
|
4c24043cc8 | ||
|
|
2ca5d18855 | ||
|
|
37426b0794 | ||
|
|
a70ebeed1c | ||
|
|
8f2113e6b5 | ||
|
|
03cb0565d4 | ||
|
|
dd14f9b93d | ||
|
|
4dd15fb479 | ||
|
|
397f6afaf1 | ||
|
|
65fb77ef5c | ||
|
|
1e22f47304 | ||
|
|
7c9be74970 | ||
|
|
5efb2cf918 | ||
|
|
98505110a4 | ||
|
|
1a864584b6 | ||
|
|
8aa8dc29a1 | ||
|
|
06e86e57e7 | ||
|
|
6a375fdd8c | ||
|
|
b8ad7e3072 | ||
|
|
597a0cad6b | ||
|
|
a646af2ad2 | ||
|
|
143b70c6a8 | ||
|
|
c1e95a2246 | ||
|
|
53a081262d | ||
|
|
8b46143d48 | ||
|
|
a05edc619c | ||
|
|
6c5aa6b367 | ||
|
|
0246c3f895 | ||
|
|
e3ae5263c6 | ||
|
|
0bfb1aab7e | ||
|
|
d120102805 | ||
|
|
7e5b864299 | ||
|
|
4dd6db57ee | ||
|
|
a2f105a913 | ||
|
|
ab31a7b964 | ||
|
|
203ff3872f | ||
|
|
2ee3840650 | ||
|
|
fa80dbfb27 | ||
|
|
2237d286b6 | ||
|
|
4124f6b1ad | ||
|
|
23361d3d56 | ||
|
|
4c8f3ddfd3 | ||
|
|
f631a8aa9a | ||
|
|
bbc6460d3f | ||
|
|
09d78544cf | ||
|
|
a791aff0a2 | ||
|
|
ce318f123f | ||
|
|
9374056e61 | ||
|
|
5ef19affd0 | ||
|
|
d40abf4add | ||
|
|
86f8b3f9ec | ||
|
|
385d7f2803 | ||
|
|
2ee04e0cf0 | ||
|
|
9bdaba3b65 | ||
|
|
80d46fb8a4 | ||
|
|
268f9ef725 | ||
|
|
3813d9385b | ||
|
|
48bf72bfc4 | ||
|
|
fa1d5cc49d | ||
|
|
de5fd11155 | ||
|
|
cd30a8cbc0 | ||
|
|
ec1e54db9a | ||
|
|
82963ad075 | ||
|
|
b24671bbf6 | ||
|
|
2ab7a47353 | ||
|
|
5d4da455bd | ||
|
|
0b039830ea | ||
|
|
77e1ca59ed | ||
|
|
e8e8ee5941 | ||
|
|
23861bd369 | ||
|
|
4c946b21a9 | ||
|
|
e74538b40d | ||
|
|
b6ef5469de | ||
|
|
ee2850f2e2 | ||
|
|
3387a762c4 | ||
|
|
75388cc3af | ||
|
|
087f7fc43d | ||
|
|
5da0e16e44 | ||
|
|
eb465fde1a | ||
|
|
26ece1ee86 | ||
|
|
f18b65a690 | ||
|
|
6851b2091f | ||
|
|
74396c1558 | ||
|
|
d02c680dab | ||
|
|
888327f5bc | ||
|
|
2623e7da88 | ||
|
|
0d2a3bc2d7 | ||
|
|
5eeaa5710c | ||
|
|
92e1f83046 | ||
|
|
9a3f72591e | ||
|
|
12d824d791 | ||
|
|
613fef5e73 | ||
|
|
1d4babefba | ||
|
|
6e9e81e3a1 | ||
|
|
c292561eb1 | ||
|
|
248464191d | ||
|
|
f1cdfb768d | ||
|
|
777c188a3f | ||
|
|
bf00a6b695 | ||
|
|
b58927fea1 | ||
|
|
2aa7a145d4 | ||
|
|
4d618c5ef1 | ||
|
|
543e3e2c09 | ||
|
|
93c9426f25 | ||
|
|
d4feb903b0 | ||
|
|
5a1183a457 | ||
|
|
22774f28c0 | ||
|
|
1936e0dbbd | ||
|
|
df6e86554c | ||
|
|
33218bb0e5 | ||
|
|
f475c04ce3 | ||
|
|
0788796f1a | ||
|
|
3a01f960a7 | ||
|
|
d37105ada2 | ||
|
|
66fda57513 | ||
|
|
2fe82e4b2f | ||
|
|
eee7e52bd4 | ||
|
|
ecd76eb870 | ||
|
|
bcaa09e910 | ||
|
|
32df727ff9 | ||
|
|
e1bfe6cdda | ||
|
|
e4e71af597 | ||
|
|
749ddc30c7 | ||
|
|
c4965c7fe9 | ||
|
|
de72ab176c | ||
|
|
b453c3a48e | ||
|
|
857c658888 | ||
|
|
d91488da62 | ||
|
|
16fbd4abfd | ||
|
|
285f8bc28c | ||
|
|
3fb4877859 | ||
|
|
e25cbdf4b9 | ||
|
|
b1db9a8cf1 | ||
|
|
f418104b7a | ||
|
|
5454917569 | ||
|
|
effa50a9bd | ||
|
|
ac87346507 | ||
|
|
cacbcb5415 | ||
|
|
0b2a2ad0ed | ||
|
|
44bc7a89df | ||
|
|
bcb5384639 | ||
|
|
b23e577ccc | ||
|
|
96a28f2c4d | ||
|
|
826c4115a7 | ||
|
|
c95ea16a44 | ||
|
|
8e1a2248e4 | ||
|
|
6b29fd05bd | ||
|
|
080d9bbaa6 | ||
|
|
263d342a79 | ||
|
|
0d2dcb3d25 | ||
|
|
9dd35c8c0d | ||
|
|
e85f93abec | ||
|
|
18c12dac9a | ||
|
|
a0f1d68cfb | ||
|
|
37f651fe08 | ||
|
|
d2e4e94aec | ||
|
|
cf1a09aeaf | ||
|
|
32897d3e07 | ||
|
|
3f2a728ed0 | ||
|
|
af24a9d002 | ||
|
|
6582debd73 | ||
|
|
575d1c8543 | ||
|
|
08b78c3ca5 | ||
|
|
e0a867a184 | ||
|
|
2b8508574d | ||
|
|
b8976785fd | ||
|
|
79e2c56ec8 | ||
|
|
1ea09c7add | ||
|
|
7489a65bbe | ||
|
|
faabdb8d88 | ||
|
|
57c5d98bdc | ||
|
|
b5c249c25d | ||
|
|
60a244888d | ||
|
|
c4dfc5cf70 | ||
|
|
4c2ffdfc68 | ||
|
|
704c5174f9 | ||
|
|
b708b4a42b | ||
|
|
ee98ce5c18 | ||
|
|
7162272f1e | ||
|
|
faee6b45e0 | ||
|
|
aef69ab12a | ||
|
|
a712426185 | ||
|
|
19be0d0ff3 | ||
|
|
3202e46930 | ||
|
|
5b95d6777f | ||
|
|
084524cd2d | ||
|
|
6ab03053a0 | ||
|
|
9f065b2b5a | ||
|
|
5327ed84c1 | ||
|
|
4018a29a16 | ||
|
|
7cbc268c52 | ||
|
|
bca7c8e6bd | ||
|
|
d0fb6de390 | ||
|
|
63f3d9862f | ||
|
|
00f8dcb23e | ||
|
|
bc4165037c | ||
|
|
07109617b5 | ||
|
|
6385443a4c | ||
|
|
6ef415d0e6 | ||
|
|
ba8ba9f68d | ||
|
|
b30252021b | ||
|
|
db57171ece | ||
|
|
3688e9981d | ||
|
|
b4de26a801 | ||
|
|
2a15768a25 | ||
|
|
f971417746 | ||
|
|
33854d42e4 | ||
|
|
2d9f6dcd86 | ||
|
|
e3c347e148 | ||
|
|
f7c468d6f0 | ||
|
|
e6cac8cc14 | ||
|
|
79d0239362 | ||
|
|
a0e31fc723 | ||
|
|
6a6048d40f | ||
|
|
a29ae4d3b9 | ||
|
|
82b19614e1 | ||
|
|
49851daf0d | ||
|
|
d815ae0e83 | ||
|
|
cb50fae12d | ||
|
|
26072af82f | ||
|
|
89c1c4897a | ||
|
|
5e5563f974 | ||
|
|
5df68e5942 | ||
|
|
d895de0bc1 | ||
|
|
2ec4a0c8a8 | ||
|
|
5e3ec6ea39 | ||
|
|
6f06ab440a | ||
|
|
c3bb7a66e0 | ||
|
|
aadc871124 | ||
|
|
cb2cea4ebd | ||
|
|
6125e68c1f | ||
|
|
71b80e0817 | ||
|
|
ac6a4e590d | ||
|
|
1f61a2581c | ||
|
|
bf23a52ba4 | ||
|
|
db498db0a8 | ||
|
|
9d3d64eef3 | ||
|
|
e694e0273b | ||
|
|
ced882a2e5 | ||
|
|
754c643b1b | ||
|
|
6a136854b0 | ||
|
|
a584aca969 | ||
|
|
b3fbb29bf2 | ||
|
|
b7299e5eec | ||
|
|
510c45b9b7 | ||
|
|
aad9c0f965 | ||
|
|
373828d76f | ||
|
|
86a9a2c069 | ||
|
|
6e7e6ee434 | ||
|
|
6af544afde | ||
|
|
dca21bd3be | ||
|
|
9b82b101cd | ||
|
|
47a14bbbff | ||
|
|
094d6f2339 | ||
|
|
290dd9531f | ||
|
|
141226332c | ||
|
|
7e0a5205b2 | ||
|
|
c6c863cd84 | ||
|
|
28d453fced | ||
|
|
4d62983680 | ||
|
|
d3ea9c3168 | ||
|
|
603a79d094 | ||
|
|
16481927e8 | ||
|
|
6f06c18014 | ||
|
|
7868afb4fd | ||
|
|
66d4d5c73f | ||
|
|
075479274d | ||
|
|
bae797f975 | ||
|
|
ea0f9e6ce9 | ||
|
|
fa6c52699e | ||
|
|
209d7e48d8 | ||
|
|
0bd3e1b0e1 | ||
|
|
9229b26b9e | ||
|
|
29dbce079b | ||
|
|
86df538db9 | ||
|
|
d9c5b7ea9e | ||
|
|
c9128d56c0 | ||
|
|
888755e842 | ||
|
|
3ac096b3b1 | ||
|
|
7ebd1eb053 | ||
|
|
2128851bdf | ||
|
|
823d136a00 | ||
|
|
a67e62b2d0 | ||
|
|
d262ea21e3 | ||
|
|
aaf115a5c8 | ||
|
|
206c5146e1 | ||
|
|
abe917f3c1 | ||
|
|
0793e11b04 | ||
|
|
9df66deb81 | ||
|
|
9765b0ed8e | ||
|
|
83c9c3f618 | ||
|
|
dd5dd12ee6 | ||
|
|
b68fd91a02 | ||
|
|
4270547147 | ||
|
|
c4b90360a5 | ||
|
|
7d49e75e46 | ||
|
|
8db5bd438e | ||
|
|
f6f18b68b5 | ||
|
|
ab8a9509b8 | ||
|
|
4dda5ee549 | ||
|
|
5ae8017233 | ||
|
|
66cdbbb335 | ||
|
|
3e9b694e6f | ||
|
|
b1eef13bb0 | ||
|
|
82f93f7da5 | ||
|
|
119008d05d | ||
|
|
aeaac4bc17 | ||
|
|
789e26ae60 | ||
|
|
b813ace79c | ||
|
|
02f497712d | ||
|
|
fda4ba81c3 | ||
|
|
e7a9d34ecd | ||
|
|
485cb43a34 | ||
|
|
3281d28de7 | ||
|
|
856833dbc4 | ||
|
|
15f1945f31 | ||
|
|
b1f29a8c92 | ||
|
|
cbbd4ffbb6 | ||
|
|
e2ea397fb9 | ||
|
|
6a4c9b4108 | ||
|
|
a61c86bff5 | ||
|
|
35b09542e2 | ||
|
|
a57536be4b | ||
|
|
fbb2accacb | ||
|
|
0a393400b2 | ||
|
|
8b5ce753e4 | ||
|
|
f05b9396e8 | ||
|
|
6670fe8c1c | ||
|
|
b8518f5795 | ||
|
|
c94291af52 | ||
|
|
1d6f48806e | ||
|
|
4626f37671 | ||
|
|
33a7fe38e1 |
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -2,7 +2,7 @@
|
|||||||
name: Bug report
|
name: Bug report
|
||||||
about: Create a report to help us improve
|
about: Create a report to help us improve
|
||||||
title: ''
|
title: ''
|
||||||
labels: Bug
|
labels: ''
|
||||||
assignees: ''
|
assignees: ''
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|||||||
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
blank_issues_enabled: false
|
||||||
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -2,7 +2,7 @@
|
|||||||
name: Feature request
|
name: Feature request
|
||||||
about: Suggest an idea for this project
|
about: Suggest an idea for this project
|
||||||
title: ''
|
title: ''
|
||||||
labels: Enhancement
|
labels: ''
|
||||||
assignees: ''
|
assignees: ''
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|||||||
73
.github/classifier.yml
vendored
73
.github/classifier.yml
vendored
@@ -1,49 +1,36 @@
|
|||||||
{
|
{
|
||||||
perform: false,
|
perform: true,
|
||||||
alwaysRequireAssignee: false,
|
alwaysRequireAssignee: false,
|
||||||
labelsRequiringAssignee: [],
|
labelsRequiringAssignee: [],
|
||||||
|
defaultLabel: 'Triage: Needed',
|
||||||
|
defaultAssignee: '',
|
||||||
autoAssignees: {
|
autoAssignees: {
|
||||||
accessibility: [],
|
Area - Acquisition: [],
|
||||||
acquisition: [],
|
Area - Azure: [],
|
||||||
agent: [],
|
Area - Backup\Restore: [],
|
||||||
azure: [],
|
Area - Charting\Insights: [],
|
||||||
backup: [],
|
Area - Connection: [ charles-gagnon ],
|
||||||
bcdr: [],
|
Area - DacFX: [],
|
||||||
'chart viewer': [],
|
Area - Dashboard: [],
|
||||||
connection: [],
|
Area - Data Explorer: [],
|
||||||
dacfx: [],
|
Area - Edit Data: [],
|
||||||
dashboard: [],
|
Area - Extensibility: [],
|
||||||
'data explorer': [],
|
Area - External Table: [],
|
||||||
documentation: [],
|
Area - Fundamentals: [],
|
||||||
'edit data': [],
|
Area - Language Service: [ charles-gagnon ],
|
||||||
export: [],
|
Area - Localization: [],
|
||||||
extensibility: [],
|
Area - Notebooks: [ chlafreniere ],
|
||||||
extensionManager: [],
|
Area - Performance: [],
|
||||||
globalization: [],
|
Area - Query Editor: [ anthonydresser ],
|
||||||
grid: [],
|
Area - Query Plan: [],
|
||||||
import: [],
|
Area - Reliability: [],
|
||||||
insights: [],
|
Area - Resource Deployment: [],
|
||||||
intellisense: [],
|
Area - Schema Compare: [],
|
||||||
localization: [],
|
Area - Shell: [],
|
||||||
'managed instance': [],
|
Area - SQL Agent: [],
|
||||||
notebooks: [],
|
Area - SQL Import: [],
|
||||||
'object explorer': [],
|
Area - SQL Profiler: [],
|
||||||
performance: [],
|
Area - SQL 2019: [],
|
||||||
profiler: [],
|
Area - SSMS Integration: []
|
||||||
'query editor': [],
|
|
||||||
'query execution': [],
|
|
||||||
reliability: [],
|
|
||||||
restore: [],
|
|
||||||
scripting: [],
|
|
||||||
'server group': [],
|
|
||||||
settings: [],
|
|
||||||
setup: [],
|
|
||||||
shell: [],
|
|
||||||
showplan: [],
|
|
||||||
snippet: [],
|
|
||||||
sql2019Preview: [],
|
|
||||||
sqldw: [],
|
|
||||||
supportability: [],
|
|
||||||
ux: []
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
9
.github/pull_request_template.md
vendored
Normal file
9
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
<!-- Thank you for submitting a Pull Request. Please:
|
||||||
|
* Read our Pull Request guidelines:
|
||||||
|
https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute#pull-requests.
|
||||||
|
* Associate an issue with the Pull Request.
|
||||||
|
* Ensure that the code is up-to-date with the `master` branch.
|
||||||
|
* Include a description of the proposed changes and how to test them.
|
||||||
|
-->
|
||||||
|
|
||||||
|
This PR fixes #
|
||||||
6
.github/stale.yml
vendored
Normal file
6
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
perform: true,
|
||||||
|
label: 'Stale PR',
|
||||||
|
daysSinceLastUpdate: 7,
|
||||||
|
ignoredLabels: ['Do Not Merge']
|
||||||
|
}
|
||||||
118
.github/workflows/ci.yml
vendored
Normal file
118
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
name: CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
- release/*
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
- release/*
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
linux:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
CHILD_CONCURRENCY: "1"
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
# TODO: rename azure-pipelines/linux/xvfb.init to github-actions
|
||||||
|
- run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev # {{SQL CARBON EDIT}} add kerberos dep
|
||||||
|
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||||
|
sudo chmod +x /etc/init.d/xvfb
|
||||||
|
sudo update-rc.d xvfb defaults
|
||||||
|
sudo service xvfb start
|
||||||
|
name: Setup Build Environment
|
||||||
|
- uses: actions/setup-node@v1
|
||||||
|
with:
|
||||||
|
node-version: 10
|
||||||
|
# TODO: cache node modules
|
||||||
|
- run: yarn --frozen-lockfile
|
||||||
|
name: Install Dependencies
|
||||||
|
- run: yarn electron x64
|
||||||
|
name: Download Electron
|
||||||
|
- run: yarn gulp hygiene --skip-tslint
|
||||||
|
name: Run Hygiene Checks
|
||||||
|
- run: yarn gulp tslint
|
||||||
|
name: Run TSLint Checks
|
||||||
|
- run: yarn strict-null-check # {{SQL CARBON EDIT}} add step
|
||||||
|
name: Run Strict Null Check
|
||||||
|
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
|
||||||
|
# name: Run Monaco Editor Checks
|
||||||
|
- run: yarn compile
|
||||||
|
name: Compile Sources
|
||||||
|
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
|
||||||
|
# name: Download Built-in Extensions
|
||||||
|
- run: DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
|
||||||
|
name: Run Unit Tests
|
||||||
|
# - run: DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
|
||||||
|
# name: Run Integration Tests
|
||||||
|
|
||||||
|
windows:
|
||||||
|
runs-on: windows-2016
|
||||||
|
env:
|
||||||
|
CHILD_CONCURRENCY: "1"
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
- uses: actions/setup-node@v1
|
||||||
|
with:
|
||||||
|
node-version: 10
|
||||||
|
- uses: actions/setup-python@v1
|
||||||
|
with:
|
||||||
|
python-version: '2.x'
|
||||||
|
- run: yarn --frozen-lockfile
|
||||||
|
name: Install Dependencies
|
||||||
|
- run: yarn electron
|
||||||
|
name: Download Electron
|
||||||
|
- run: yarn gulp hygiene --skip-tslint
|
||||||
|
name: Run Hygiene Checks
|
||||||
|
- run: yarn gulp tslint
|
||||||
|
name: Run TSLint Checks
|
||||||
|
- run: yarn strict-null-check # {{SQL CARBON EDIT}} add step
|
||||||
|
name: Run Strict Null Check
|
||||||
|
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
|
||||||
|
# name: Run Monaco Editor Checks
|
||||||
|
- run: yarn compile
|
||||||
|
name: Compile Sources
|
||||||
|
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
|
||||||
|
# name: Download Built-in Extensions
|
||||||
|
- run: .\scripts\test.bat --tfs "Unit Tests"
|
||||||
|
name: Run Unit Tests
|
||||||
|
# - run: .\scripts\test-integration.bat --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
|
||||||
|
# name: Run Integration Tests
|
||||||
|
|
||||||
|
darwin:
|
||||||
|
runs-on: macos-latest
|
||||||
|
env:
|
||||||
|
CHILD_CONCURRENCY: "1"
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
- uses: actions/setup-node@v1
|
||||||
|
with:
|
||||||
|
node-version: 10
|
||||||
|
- run: yarn --frozen-lockfile
|
||||||
|
name: Install Dependencies
|
||||||
|
- run: yarn electron x64
|
||||||
|
name: Download Electron
|
||||||
|
- run: yarn gulp hygiene --skip-tslint
|
||||||
|
name: Run Hygiene Checks
|
||||||
|
- run: yarn gulp tslint
|
||||||
|
name: Run TSLint Checks
|
||||||
|
- run: yarn strict-null-check # {{SQL CARBON EDIT}} add step
|
||||||
|
name: Run Strict Null Check
|
||||||
|
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
|
||||||
|
# name: Run Monaco Editor Checks
|
||||||
|
- run: yarn compile
|
||||||
|
name: Compile Sources
|
||||||
|
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
|
||||||
|
# name: Download Built-in Extensions
|
||||||
|
- run: ./scripts/test.sh --tfs "Unit Tests"
|
||||||
|
name: Run Unit Tests
|
||||||
|
# - run: ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
|
||||||
|
# name: Run Integration Tests
|
||||||
13
.github/workflows/tslint.yml
vendored
Normal file
13
.github/workflows/tslint.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
name: TSLint Enforcement
|
||||||
|
on: [pull_request]
|
||||||
|
jobs:
|
||||||
|
job:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 5
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v1
|
||||||
|
- name: TSLint
|
||||||
|
uses: aaomidi/gh-action-tslint@master
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
tslint_config: 'tslint-sql.json'
|
||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -30,3 +30,4 @@ coverage/
|
|||||||
test_data/
|
test_data/
|
||||||
test-results/
|
test-results/
|
||||||
yarn-error.log
|
yarn-error.log
|
||||||
|
*.vsix
|
||||||
|
|||||||
33
.lgtm/javascript-queries/promises.ql
Normal file
33
.lgtm/javascript-queries/promises.ql
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
/**
|
||||||
|
* @name No floating promises
|
||||||
|
* @kind problem
|
||||||
|
* @problem.severity error
|
||||||
|
* @id js/experimental/floating-promise
|
||||||
|
*/
|
||||||
|
import javascript
|
||||||
|
|
||||||
|
private predicate isEscapingPromise(PromiseDefinition promise) {
|
||||||
|
exists (DataFlow::Node escape | promise.flowsTo(escape) |
|
||||||
|
escape = any(DataFlow::InvokeNode invk).getAnArgument()
|
||||||
|
or
|
||||||
|
escape = any(DataFlow::FunctionNode fun).getAReturn()
|
||||||
|
or
|
||||||
|
escape = any(ThrowStmt t).getExpr().flow()
|
||||||
|
or
|
||||||
|
escape = any(GlobalVariable v).getAnAssignedExpr().flow()
|
||||||
|
or
|
||||||
|
escape = any(DataFlow::PropWrite write).getRhs()
|
||||||
|
or
|
||||||
|
exists(WithStmt with, Assignment assign |
|
||||||
|
with.mayAffect(assign.getLhs()) and
|
||||||
|
assign.getRhs().flow() = escape
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
from PromiseDefinition promise
|
||||||
|
where
|
||||||
|
not exists(promise.getAMethodCall(any(string m | m = "then" or m = "catch" or m = "finally"))) and
|
||||||
|
not exists (AwaitExpr e | promise.flowsTo(e.getOperand().flow())) and
|
||||||
|
not isEscapingPromise(promise)
|
||||||
|
select promise, "This promise appears to be a floating promise"
|
||||||
6
.prettierrc.json
Normal file
6
.prettierrc.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"useTabs": true,
|
||||||
|
"printWidth": 120,
|
||||||
|
"semi": true,
|
||||||
|
"singleQuote": true
|
||||||
|
}
|
||||||
88
.vscode/launch.json
vendored
88
.vscode/launch.json
vendored
@@ -16,6 +16,7 @@
|
|||||||
"request": "attach",
|
"request": "attach",
|
||||||
"name": "Attach to Extension Host",
|
"name": "Attach to Extension Host",
|
||||||
"port": 5870,
|
"port": 5870,
|
||||||
|
"timeout": 30000,
|
||||||
"restart": true,
|
"restart": true,
|
||||||
"outFiles": [
|
"outFiles": [
|
||||||
"${workspaceFolder}/out/**/*.js"
|
"${workspaceFolder}/out/**/*.js"
|
||||||
@@ -66,17 +67,16 @@
|
|||||||
"request": "launch",
|
"request": "launch",
|
||||||
"name": "Launch azuredatastudio",
|
"name": "Launch azuredatastudio",
|
||||||
"windows": {
|
"windows": {
|
||||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat",
|
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat"
|
||||||
"timeout": 20000
|
|
||||||
},
|
},
|
||||||
"osx": {
|
"osx": {
|
||||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
|
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||||
"timeout": 20000
|
|
||||||
},
|
},
|
||||||
"linux": {
|
"linux": {
|
||||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
|
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||||
"timeout": 20000
|
|
||||||
},
|
},
|
||||||
|
"port": 9222,
|
||||||
|
"timeout": 20000,
|
||||||
"env": {
|
"env": {
|
||||||
"VSCODE_EXTHOST_WILL_SEND_SOCKET": null
|
"VSCODE_EXTHOST_WILL_SEND_SOCKET": null
|
||||||
},
|
},
|
||||||
@@ -127,6 +127,33 @@
|
|||||||
"webRoot": "${workspaceFolder}",
|
"webRoot": "${workspaceFolder}",
|
||||||
"timeout": 45000
|
"timeout": 45000
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"type": "chrome",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "Launch ADS (Web) (TBD)",
|
||||||
|
"runtimeExecutable": "yarn",
|
||||||
|
"runtimeArgs": [
|
||||||
|
"web"
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "chrome",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "Launch ADS (Web, Chrome) (TBD)",
|
||||||
|
"url": "http://localhost:8080",
|
||||||
|
"preLaunchTask": "Run web"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "node",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "Git Unit Tests",
|
||||||
|
"program": "${workspaceFolder}/extensions/git/node_modules/mocha/bin/_mocha",
|
||||||
|
"stopOnEntry": false,
|
||||||
|
"cwd": "${workspaceFolder}/extensions/git",
|
||||||
|
"outFiles": [
|
||||||
|
"${workspaceFolder}/extensions/git/out/**/*.js"
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "Launch Built-in Extension",
|
"name": "Launch Built-in Extension",
|
||||||
"type": "extensionHost",
|
"type": "extensionHost",
|
||||||
@@ -165,7 +192,10 @@
|
|||||||
"cwd": "${workspaceFolder}",
|
"cwd": "${workspaceFolder}",
|
||||||
"outFiles": [
|
"outFiles": [
|
||||||
"${workspaceFolder}/out/**/*.js"
|
"${workspaceFolder}/out/**/*.js"
|
||||||
]
|
],
|
||||||
|
"env": {
|
||||||
|
"MOCHA_COLORS": "true"
|
||||||
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "chrome",
|
"type": "chrome",
|
||||||
@@ -183,6 +213,22 @@
|
|||||||
"webRoot": "${workspaceFolder}",
|
"webRoot": "${workspaceFolder}",
|
||||||
"timeout": 45000
|
"timeout": 45000
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"type": "chrome",
|
||||||
|
"request": "launch",
|
||||||
|
"name": "Run Extension Integration Tests",
|
||||||
|
"windows": {
|
||||||
|
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.bat"
|
||||||
|
},
|
||||||
|
"osx": {
|
||||||
|
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.sh"
|
||||||
|
},
|
||||||
|
"linux": {
|
||||||
|
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.sh"
|
||||||
|
},
|
||||||
|
"webRoot": "${workspaceFolder}",
|
||||||
|
"timeout": 45000
|
||||||
|
},
|
||||||
],
|
],
|
||||||
"compounds": [
|
"compounds": [
|
||||||
{
|
{
|
||||||
@@ -199,6 +245,13 @@
|
|||||||
"Run Extension Unit Tests"
|
"Run Extension Unit Tests"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug Extension Integration Tests",
|
||||||
|
"configurations": [
|
||||||
|
"Attach to Extension Host",
|
||||||
|
"Run Extension Integration Tests"
|
||||||
|
]
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "Debug azuredatastudio Main and Renderer",
|
"name": "Debug azuredatastudio Main and Renderer",
|
||||||
"configurations": [
|
"configurations": [
|
||||||
@@ -207,18 +260,33 @@
|
|||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "Search and Renderer processes",
|
"name": "Debug azuredatastudio Main, Renderer & Extension Host",
|
||||||
|
"configurations": [
|
||||||
|
"Launch azuredatastudio",
|
||||||
|
"Attach to Main Process",
|
||||||
|
"Attach to Extension Host"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug Renderer and search processes",
|
||||||
"configurations": [
|
"configurations": [
|
||||||
"Launch azuredatastudio",
|
"Launch azuredatastudio",
|
||||||
"Attach to Search Process"
|
"Attach to Search Process"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "Renderer and Extension Host processes",
|
"name": "Debug Renderer and Extension Host processes",
|
||||||
"configurations": [
|
"configurations": [
|
||||||
"Launch azuredatastudio",
|
"Launch azuredatastudio",
|
||||||
"Attach to Extension Host"
|
"Attach to Extension Host"
|
||||||
]
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Attach Renderer and Extension Host",
|
||||||
|
"configurations": [
|
||||||
|
"Attach to azuredatastudio",
|
||||||
|
"Attach to Extension Host"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
4
.vscode/settings.json
vendored
4
.vscode/settings.json
vendored
@@ -39,6 +39,7 @@
|
|||||||
],
|
],
|
||||||
"typescript.tsdk": "node_modules/typescript/lib",
|
"typescript.tsdk": "node_modules/typescript/lib",
|
||||||
"npm.exclude": "**/extensions/**",
|
"npm.exclude": "**/extensions/**",
|
||||||
|
"npm.packageManager": "yarn",
|
||||||
"emmet.excludeLanguages": [],
|
"emmet.excludeLanguages": [],
|
||||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||||
"typescript.preferences.quoteStyle": "single",
|
"typescript.preferences.quoteStyle": "single",
|
||||||
@@ -60,5 +61,6 @@
|
|||||||
"remote.extensionKind": {
|
"remote.extensionKind": {
|
||||||
"msjsdiag.debugger-for-chrome": "workspace"
|
"msjsdiag.debugger-for-chrome": "workspace"
|
||||||
},
|
},
|
||||||
|
"gulp.autoDetect": "off",
|
||||||
"files.insertFinalNewline": true
|
"files.insertFinalNewline": true
|
||||||
}
|
}
|
||||||
|
|||||||
37
.vscode/tasks.json
vendored
37
.vscode/tasks.json
vendored
@@ -5,7 +5,10 @@
|
|||||||
"type": "npm",
|
"type": "npm",
|
||||||
"script": "watch",
|
"script": "watch",
|
||||||
"label": "Build VS Code",
|
"label": "Build VS Code",
|
||||||
"group": "build",
|
"group": {
|
||||||
|
"kind": "build",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
"isBackground": true,
|
"isBackground": true,
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "never"
|
"reveal": "never"
|
||||||
@@ -30,22 +33,22 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "npm",
|
"type": "npm",
|
||||||
"script": "strict-initialization-watch",
|
"script": "strict-function-types-watch",
|
||||||
"label": "TS - Strict Initialization",
|
"label": "TS - Strict Function Types",
|
||||||
"isBackground": true,
|
"isBackground": true,
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "never"
|
"reveal": "never"
|
||||||
},
|
},
|
||||||
"problemMatcher": {
|
"problemMatcher": {
|
||||||
"base": "$tsc-watch",
|
"base": "$tsc-watch",
|
||||||
"owner": "typescript-strict-initialization",
|
"owner": "typescript-function-types",
|
||||||
"applyTo": "allDocuments"
|
"applyTo": "allDocuments"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "npm",
|
"type": "npm",
|
||||||
"script": "strict-null-check-watch",
|
"script": "strict-null-check-watch",
|
||||||
"label": "TS - Strict Null Cheks",
|
"label": "TS - Strict Null Checks",
|
||||||
"isBackground": true,
|
"isBackground": true,
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"reveal": "never"
|
"reveal": "never"
|
||||||
@@ -87,8 +90,8 @@
|
|||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"type": "gulp",
|
"type": "npm",
|
||||||
"task": "electron",
|
"script": "electron",
|
||||||
"label": "Download electron"
|
"label": "Download electron"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -96,6 +99,24 @@
|
|||||||
"task": "hygiene",
|
"task": "hygiene",
|
||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"type": "shell",
|
||||||
|
"command": "yarn web -- --no-launch",
|
||||||
|
"label": "Run web",
|
||||||
|
"isBackground": true,
|
||||||
|
// This section to make error go away when launching the debug config
|
||||||
|
"problemMatcher": {
|
||||||
|
"pattern": {
|
||||||
|
"regexp": ""
|
||||||
|
},
|
||||||
|
"background": {
|
||||||
|
"beginsPattern": ".*node .*",
|
||||||
|
"endsPattern": "Web UI available at .*"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "never"
|
||||||
|
}
|
||||||
|
},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
2
.yarnrc
2
.yarnrc
@@ -1,3 +1,3 @@
|
|||||||
disturl "https://atom.io/download/electron"
|
disturl "https://atom.io/download/electron"
|
||||||
target "4.2.9"
|
target "6.1.5"
|
||||||
runtime "electron"
|
runtime "electron"
|
||||||
|
|||||||
45
CHANGELOG.md
45
CHANGELOG.md
@@ -1,5 +1,48 @@
|
|||||||
# Change Log
|
# Change Log
|
||||||
|
|
||||||
|
## Version 1.13.1
|
||||||
|
* Release date: November 15, 2019
|
||||||
|
* Release status: General Availability
|
||||||
|
* Resolved [#8210 Copy/Paste results are out of order](https://github.com/microsoft/azuredatastudio/issues/8210).
|
||||||
|
|
||||||
|
## Version 1.13.0
|
||||||
|
* Release date: November 4, 2019
|
||||||
|
* Release status: General Availability
|
||||||
|
* General Availability release for Schema Compare and DACPAC extensions
|
||||||
|
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/43?closed=1).
|
||||||
|
|
||||||
|
## Contributions and "thank you"
|
||||||
|
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||||
|
* aspnerd for `Use selected DB for import wizard schema list` [#7878](https://github.com/microsoft/azuredatastudio/pull/7878)
|
||||||
|
|
||||||
|
## Version 1.12.2
|
||||||
|
* Release date: October 11, 2019
|
||||||
|
* Release status: General Availability
|
||||||
|
* Hotfix release (1.12.2): `Disable automatically starting the EH in inspect mode` https://github.com/microsoft/azuredatastudio/commit/c9bef82ace6c67190d0e83820011a2bbd1f793c1
|
||||||
|
|
||||||
|
## Version 1.12.1
|
||||||
|
* Release date: October 7, 2019
|
||||||
|
* Release status: General Availability
|
||||||
|
* Hotfix release: `Notebooks: Ensure quotes and backslashes are escaped properly in text editor model` https://github.com/microsoft/azuredatastudio/pull/7540
|
||||||
|
|
||||||
|
## Version 1.12.0
|
||||||
|
* Release date: October 2, 2019
|
||||||
|
* Release status: General Availability
|
||||||
|
|
||||||
|
## What's new in this version
|
||||||
|
* Announcing the Query History panel
|
||||||
|
* Improved Query Results Grid copy selection support
|
||||||
|
* TempDB page added to Server Reports extension
|
||||||
|
* PowerShell extension update
|
||||||
|
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/42?closed=1).
|
||||||
|
|
||||||
|
## Version 1.11.0
|
||||||
|
* Release date: September 10, 2019
|
||||||
|
* Release status: General Availability
|
||||||
|
|
||||||
|
## What's new in this version
|
||||||
|
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/41?closed=1).
|
||||||
|
|
||||||
## Version 1.10.0
|
## Version 1.10.0
|
||||||
* Release date: August 14, 2019
|
* Release date: August 14, 2019
|
||||||
* Release status: General Availability
|
* Release status: General Availability
|
||||||
@@ -197,7 +240,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
|||||||
|
|
||||||
## What's new in this version
|
## What's new in this version
|
||||||
* Announcing the SQL Server 2019 Preview extension.
|
* Announcing the SQL Server 2019 Preview extension.
|
||||||
* Support for SQL Server 2019 preview features including big data cluster support.
|
* Support for SQL Server 2019 preview features including Big Data Cluster support.
|
||||||
* Azure Data Studio Notebooks
|
* Azure Data Studio Notebooks
|
||||||
* The Azure Resource Explorer viewlets you browse data-related endpoints for your Azure accounts and create connections to them in Object Explorer. In this release Azure SQL Databases and servers are supported.
|
* The Azure Resource Explorer viewlets you browse data-related endpoints for your Azure accounts and create connections to them in Object Explorer. In this release Azure SQL Databases and servers are supported.
|
||||||
* SQL Server Polybase Create External Table Wizard
|
* SQL Server Polybase Create External Table Wizard
|
||||||
|
|||||||
21
README.md
21
README.md
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
[](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
[](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||||
[](https://dev.azure.com/azuredatastudio/azuredatastudio/_build/latest?definitionId=4&branchName=master)
|
[](https://dev.azure.com/azuredatastudio/azuredatastudio/_build/latest?definitionId=4&branchName=master)
|
||||||
|
[](https://twitter.com/azuredatastudio)
|
||||||
|
|
||||||
Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
|
Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
|
||||||
|
|
||||||
@@ -9,13 +10,13 @@ Azure Data Studio is a data management tool that enables you to work with SQL Se
|
|||||||
|
|
||||||
Platform | Link
|
Platform | Link
|
||||||
-- | --
|
-- | --
|
||||||
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2100710
|
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2109256
|
||||||
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2100711
|
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2109085
|
||||||
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2100712
|
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2109255
|
||||||
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2100809
|
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2109180
|
||||||
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2100714
|
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2109179
|
||||||
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2100810
|
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2109178
|
||||||
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2100672
|
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2109254
|
||||||
|
|
||||||
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
|
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
|
||||||
|
|
||||||
@@ -68,6 +69,12 @@ The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.micro
|
|||||||
## Contributions and "Thank You"
|
## Contributions and "Thank You"
|
||||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||||
|
|
||||||
|
* eulercamposbarros for `Prevent connections from moving on click (#7528)`
|
||||||
|
* AlexFsmn for `Fixed issue where task icons got hidden if text was too long`
|
||||||
|
* jamesrod817 for `Tempdb (#7022)`
|
||||||
|
* dzsquared for `fix(snippets): ads parenthesis to sqlcreateindex snippet #7020`
|
||||||
|
* devmattrick for `Update row count as updates are received #6642`
|
||||||
|
* mottykohn for `In Message panel onclick scroll to line #6417`
|
||||||
* Stevoni for `Corrected Keyboard Shortcut Execution Issue #5480`
|
* Stevoni for `Corrected Keyboard Shortcut Execution Issue #5480`
|
||||||
* yamatoya for `fix the format #4899`
|
* yamatoya for `fix the format #4899`
|
||||||
* GeoffYoung for `Fix sqlDropColumn description #4422`
|
* GeoffYoung for `Fix sqlDropColumn description #4422`
|
||||||
|
|||||||
@@ -36,6 +36,7 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
|||||||
jquery-ui: https://github.com/jquery/jquery-ui
|
jquery-ui: https://github.com/jquery/jquery-ui
|
||||||
jquery.event.drag: https://github.com/devongovett/jquery.event.drag
|
jquery.event.drag: https://github.com/devongovett/jquery.event.drag
|
||||||
jschardet: https://github.com/aadsm/jschardet
|
jschardet: https://github.com/aadsm/jschardet
|
||||||
|
jupyter-powershell: https://github.com/vors/jupyter-powershell
|
||||||
JupyterLab: https://github.com/jupyterlab/jupyterlab
|
JupyterLab: https://github.com/jupyterlab/jupyterlab
|
||||||
make-error: https://github.com/JsCommunity/make-error
|
make-error: https://github.com/JsCommunity/make-error
|
||||||
minimist: https://github.com/substack/minimist
|
minimist: https://github.com/substack/minimist
|
||||||
@@ -1175,7 +1176,35 @@ That's all there is to it!
|
|||||||
=========================================
|
=========================================
|
||||||
END OF jschardet NOTICES AND INFORMATION
|
END OF jschardet NOTICES AND INFORMATION
|
||||||
|
|
||||||
|
%% jupyter-powershell NOTICES AND INFORMATION BEGIN HERE
|
||||||
|
=========================================
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2016 Sergei Vorobev
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
||||||
|
=========================================
|
||||||
|
END OF jupyter-powershell NOTICES AND INFORMATION
|
||||||
|
|
||||||
%% JupyterLab NOTICES AND INFORMATION BEGIN HERE
|
%% JupyterLab NOTICES AND INFORMATION BEGIN HERE
|
||||||
|
=========================================
|
||||||
Copyright (c) 2015 Project Jupyter Contributors
|
Copyright (c) 2015 Project Jupyter Contributors
|
||||||
All rights reserved.
|
All rights reserved.
|
||||||
|
|
||||||
|
|||||||
@@ -1,84 +0,0 @@
|
|||||||
steps:
|
|
||||||
- script: |
|
|
||||||
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:10
|
|
||||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
|
||||||
sudo chmod +x /etc/init.d/xvfb
|
|
||||||
sudo update-rc.d xvfb defaults
|
|
||||||
sudo service xvfb start
|
|
||||||
sudo apt-get install -y libkrb5-dev
|
|
||||||
# sh -e /etc/init.d/xvfb start
|
|
||||||
# sleep 3
|
|
||||||
displayName: "Linux preinstall"
|
|
||||||
condition: eq(variables['Agent.OS'], 'Linux')
|
|
||||||
|
|
||||||
- task: NodeTool@0
|
|
||||||
inputs:
|
|
||||||
versionSpec: "10.15.1"
|
|
||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
|
||||||
inputs:
|
|
||||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
|
||||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
|
||||||
vstsFeed: "$(build-cache)"
|
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
|
||||||
inputs:
|
|
||||||
versionSpec: "1.10.1"
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn --frozen-lockfile
|
|
||||||
displayName: Install Dependencies
|
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: $(GITHUB_TOKEN)
|
|
||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
|
||||||
inputs:
|
|
||||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
|
||||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
|
||||||
vstsFeed: "$(build-cache)"
|
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn gulp electron-x64
|
|
||||||
displayName: Download Electron
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: $(GITHUB_TOKEN)
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn gulp hygiene
|
|
||||||
displayName: Run Hygiene Checks
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn tslint
|
|
||||||
displayName: "Run TSLint"
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn strict-null-check
|
|
||||||
displayName: "Run Strict Null Check"
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn compile
|
|
||||||
displayName: "Compile"
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter
|
|
||||||
displayName: "Tests"
|
|
||||||
condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux'))
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter --coverage
|
|
||||||
displayName: "Tests"
|
|
||||||
condition: and(succeeded(), ne(variables['Agent.OS'], 'Linux'))
|
|
||||||
|
|
||||||
- task: PublishTestResults@2
|
|
||||||
inputs:
|
|
||||||
testResultsFiles: "**/test-results.xml"
|
|
||||||
condition: succeededOrFailed()
|
|
||||||
|
|
||||||
- task: PublishCodeCoverageResults@1
|
|
||||||
inputs:
|
|
||||||
codeCoverageTool: "cobertura"
|
|
||||||
summaryFileLocation: $(System.DefaultWorkingDirectory)/.build/coverage/cobertura-coverage.xml
|
|
||||||
reportDirectory: $(System.DefaultWorkingDirectory)/.build/coverage/lcov-reports
|
|
||||||
condition: ne(variables['Agent.OS'], 'Linux')
|
|
||||||
@@ -1,65 +0,0 @@
|
|||||||
steps:
|
|
||||||
- task: NodeTool@0
|
|
||||||
inputs:
|
|
||||||
versionSpec: "10.15.1"
|
|
||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
|
||||||
inputs:
|
|
||||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
|
||||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
|
||||||
vstsFeed: "$(build-cache)"
|
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
|
||||||
inputs:
|
|
||||||
versionSpec: "1.10.1"
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn --frozen-lockfile
|
|
||||||
displayName: Install Dependencies
|
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: $(GITHUB_TOKEN)
|
|
||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
|
||||||
inputs:
|
|
||||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
|
||||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
|
||||||
vstsFeed: "$(build-cache)"
|
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn gulp electron-x64
|
|
||||||
displayName: "Electron"
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: $(GITHUB_TOKEN)
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn gulp hygiene
|
|
||||||
displayName: Run Hygiene Checks
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn tslint
|
|
||||||
displayName: "Run TSLint"
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn strict-null-check
|
|
||||||
displayName: "Run Strict Null Check"
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
yarn compile
|
|
||||||
displayName: "Compile"
|
|
||||||
|
|
||||||
- script: |
|
|
||||||
.\scripts\test.bat --reporter mocha-junit-reporter --coverage
|
|
||||||
displayName: "Test"
|
|
||||||
|
|
||||||
- task: PublishTestResults@2
|
|
||||||
inputs:
|
|
||||||
testResultsFiles: "test-results.xml"
|
|
||||||
condition: succeededOrFailed()
|
|
||||||
|
|
||||||
- task: PublishCodeCoverageResults@1
|
|
||||||
inputs:
|
|
||||||
codeCoverageTool: "cobertura"
|
|
||||||
summaryFileLocation: $(System.DefaultWorkingDirectory)\.build\coverage\cobertura-coverage.xml
|
|
||||||
reportDirectory: $(System.DefaultWorkingDirectory)\.build\coverage\lcov-report
|
|
||||||
@@ -3,20 +3,20 @@ trigger:
|
|||||||
- release/*
|
- release/*
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
- job: Windows
|
- job: Windows
|
||||||
pool:
|
pool:
|
||||||
vmImage: VS2017-Win2016
|
vmImage: VS2017-Win2016
|
||||||
steps:
|
steps:
|
||||||
- template: azure-pipelines-windows.yml
|
- template: build/azure-pipelines/win32/continuous-build-win32.yml
|
||||||
|
|
||||||
- job: Linux
|
- job: Linux
|
||||||
pool:
|
pool:
|
||||||
vmImage: "Ubuntu-16.04"
|
vmImage: 'Ubuntu-16.04'
|
||||||
steps:
|
steps:
|
||||||
- template: azure-pipelines-linux-mac.yml
|
- template: build/azure-pipelines/linux/continuous-build-linux.yml
|
||||||
|
|
||||||
- job: macOS
|
- job: macOS
|
||||||
pool:
|
pool:
|
||||||
vmImage: macOS 10.13
|
vmImage: macOS 10.13
|
||||||
steps:
|
steps:
|
||||||
- template: azure-pipelines-linux-mac.yml
|
- template: build/azure-pipelines/darwin/continuous-build-darwin.yml
|
||||||
|
|||||||
@@ -1 +1 @@
|
|||||||
2019-07-11T05:47:05.444Z
|
2019-12-01T02:20:58.491Z
|
||||||
|
|||||||
36
build/azure-pipelines/common/copyArtifacts.ts
Normal file
36
build/azure-pipelines/common/copyArtifacts.ts
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
import * as vfs from 'vinyl-fs';
|
||||||
|
|
||||||
|
const files = [
|
||||||
|
'.build/extensions/**/*.vsix', // external extensions
|
||||||
|
'.build/win32-x64/**/*.{exe,zip}', // windows binaries
|
||||||
|
'.build/linux/sha256hashes.txt', // linux hashes
|
||||||
|
'.build/linux/deb/amd64/deb/*', // linux debs
|
||||||
|
'.build/linux/rpm/x86_64/*', // linux rpms
|
||||||
|
'.build/linux/server/*', // linux server
|
||||||
|
'.build/linux/archive/*', // linux archive
|
||||||
|
'.build/docker/**', // docker images
|
||||||
|
'.build/darwin/**', // darwin binaries
|
||||||
|
'.build/version.json' // version information
|
||||||
|
];
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const stream = vfs.src(files, { base: '.build', allowEmpty: true })
|
||||||
|
.pipe(vfs.dest(process.env.BUILD_ARTIFACTSTAGINGDIRECTORY!));
|
||||||
|
|
||||||
|
stream.on('end', () => resolve());
|
||||||
|
stream.on('error', e => reject(e));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch(err => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
132
build/azure-pipelines/common/createAsset.ts
Normal file
132
build/azure-pipelines/common/createAsset.ts
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import { Readable } from 'stream';
|
||||||
|
import * as crypto from 'crypto';
|
||||||
|
import * as azure from 'azure-storage';
|
||||||
|
import * as mime from 'mime';
|
||||||
|
import { CosmosClient } from '@azure/cosmos';
|
||||||
|
|
||||||
|
interface Asset {
|
||||||
|
platform: string;
|
||||||
|
type: string;
|
||||||
|
url: string;
|
||||||
|
mooncakeUrl?: string;
|
||||||
|
hash: string;
|
||||||
|
sha256hash: string;
|
||||||
|
size: number;
|
||||||
|
supportsFastUpdate?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.argv.length !== 6) {
|
||||||
|
console.error('Usage: node createAsset.js PLATFORM TYPE NAME FILE');
|
||||||
|
process.exit(-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
function hashStream(hashName: string, stream: Readable): Promise<string> {
|
||||||
|
return new Promise<string>((c, e) => {
|
||||||
|
const shasum = crypto.createHash(hashName);
|
||||||
|
|
||||||
|
stream
|
||||||
|
.on('data', shasum.update.bind(shasum))
|
||||||
|
.on('error', e)
|
||||||
|
.on('close', () => c(shasum.digest('hex')));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
async function doesAssetExist(blobService: azure.BlobService, quality: string, blobName: string): Promise<boolean | undefined> {
|
||||||
|
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
|
||||||
|
return existsResult.exists;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function uploadBlob(blobService: azure.BlobService, quality: string, blobName: string, file: string): Promise<void> {
|
||||||
|
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
|
||||||
|
contentSettings: {
|
||||||
|
contentType: mime.lookup(file),
|
||||||
|
cacheControl: 'max-age=31536000, public'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||||
|
}
|
||||||
|
|
||||||
|
function getEnv(name: string): string {
|
||||||
|
const result = process.env[name];
|
||||||
|
|
||||||
|
if (typeof result === 'undefined') {
|
||||||
|
throw new Error('Missing env: ' + name);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main(): Promise<void> {
|
||||||
|
const [, , platform, type, name, file] = process.argv;
|
||||||
|
const quality = getEnv('VSCODE_QUALITY');
|
||||||
|
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||||
|
|
||||||
|
console.log('Creating asset...');
|
||||||
|
|
||||||
|
const stat = await new Promise<fs.Stats>((c, e) => fs.stat(file, (err, stat) => err ? e(err) : c(stat)));
|
||||||
|
const size = stat.size;
|
||||||
|
|
||||||
|
console.log('Size:', size);
|
||||||
|
|
||||||
|
const stream = fs.createReadStream(file);
|
||||||
|
const [sha1hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]);
|
||||||
|
|
||||||
|
console.log('SHA1:', sha1hash);
|
||||||
|
console.log('SHA256:', sha256hash);
|
||||||
|
|
||||||
|
const blobName = commit + '/' + name;
|
||||||
|
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2']!;
|
||||||
|
|
||||||
|
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2']!)
|
||||||
|
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||||
|
|
||||||
|
const blobExists = await doesAssetExist(blobService, quality, blobName);
|
||||||
|
|
||||||
|
if (blobExists) {
|
||||||
|
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Uploading blobs to Azure storage...');
|
||||||
|
|
||||||
|
await uploadBlob(blobService, quality, blobName, file);
|
||||||
|
|
||||||
|
console.log('Blobs successfully uploaded.');
|
||||||
|
|
||||||
|
const asset: Asset = {
|
||||||
|
platform,
|
||||||
|
type,
|
||||||
|
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
|
||||||
|
hash: sha1hash,
|
||||||
|
sha256hash,
|
||||||
|
size
|
||||||
|
};
|
||||||
|
|
||||||
|
// Remove this if we ever need to rollback fast updates for windows
|
||||||
|
if (/win32/.test(platform)) {
|
||||||
|
asset.supportsFastUpdate = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
||||||
|
|
||||||
|
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||||
|
const scripts = client.database('builds').container(quality).scripts;
|
||||||
|
await scripts.storedProcedure('createAsset').execute('', [commit, asset, true]);
|
||||||
|
}
|
||||||
|
|
||||||
|
main().then(() => {
|
||||||
|
console.log('Asset successfully created');
|
||||||
|
process.exit(0);
|
||||||
|
}, err => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
60
build/azure-pipelines/common/createBuild.ts
Normal file
60
build/azure-pipelines/common/createBuild.ts
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
import { CosmosClient } from '@azure/cosmos';
|
||||||
|
|
||||||
|
if (process.argv.length !== 3) {
|
||||||
|
console.error('Usage: node createBuild.js VERSION');
|
||||||
|
process.exit(-1);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getEnv(name: string): string {
|
||||||
|
const result = process.env[name];
|
||||||
|
|
||||||
|
if (typeof result === 'undefined') {
|
||||||
|
throw new Error('Missing env: ' + name);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main(): Promise<void> {
|
||||||
|
const [, , _version] = process.argv;
|
||||||
|
const quality = getEnv('VSCODE_QUALITY');
|
||||||
|
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||||
|
const queuedBy = getEnv('BUILD_QUEUEDBY');
|
||||||
|
const sourceBranch = getEnv('BUILD_SOURCEBRANCH');
|
||||||
|
const version = _version + (quality === 'stable' ? '' : `-${quality}`);
|
||||||
|
|
||||||
|
console.log('Creating build...');
|
||||||
|
console.log('Quality:', quality);
|
||||||
|
console.log('Version:', version);
|
||||||
|
console.log('Commit:', commit);
|
||||||
|
|
||||||
|
const build = {
|
||||||
|
id: commit,
|
||||||
|
timestamp: (new Date()).getTime(),
|
||||||
|
version,
|
||||||
|
isReleased: false,
|
||||||
|
sourceBranch,
|
||||||
|
queuedBy,
|
||||||
|
assets: [],
|
||||||
|
updates: {}
|
||||||
|
};
|
||||||
|
|
||||||
|
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||||
|
const scripts = client.database('builds').container(quality).scripts;
|
||||||
|
await scripts.storedProcedure('createBuild').execute('', [{ ...build, _partitionKey: '' }]);
|
||||||
|
}
|
||||||
|
|
||||||
|
main().then(() => {
|
||||||
|
console.log('Build successfully created');
|
||||||
|
process.exit(0);
|
||||||
|
}, err => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
9
build/azure-pipelines/common/publish-webview.sh
Executable file
9
build/azure-pipelines/common/publish-webview.sh
Executable file
@@ -0,0 +1,9 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -e
|
||||||
|
REPO="$(pwd)"
|
||||||
|
|
||||||
|
# Publish webview contents
|
||||||
|
PACKAGEJSON="$REPO/package.json"
|
||||||
|
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||||
|
|
||||||
|
node build/azure-pipelines/common/publish-webview.js "$REPO/src/vs/workbench/contrib/webview/browser/pre/"
|
||||||
87
build/azure-pipelines/common/publish-webview.ts
Normal file
87
build/azure-pipelines/common/publish-webview.ts
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
import * as azure from 'azure-storage';
|
||||||
|
import * as mime from 'mime';
|
||||||
|
import * as minimist from 'minimist';
|
||||||
|
import { basename, join } from 'path';
|
||||||
|
|
||||||
|
const fileNames = [
|
||||||
|
'fake.html',
|
||||||
|
'host.js',
|
||||||
|
'index.html',
|
||||||
|
'main.js',
|
||||||
|
'service-worker.js'
|
||||||
|
];
|
||||||
|
|
||||||
|
async function assertContainer(blobService: azure.BlobService, container: string): Promise<void> {
|
||||||
|
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
|
||||||
|
}
|
||||||
|
|
||||||
|
async function doesBlobExist(blobService: azure.BlobService, container: string, blobName: string): Promise<boolean | undefined> {
|
||||||
|
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
|
||||||
|
return existsResult.exists;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function uploadBlob(blobService: azure.BlobService, container: string, blobName: string, file: string): Promise<void> {
|
||||||
|
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
|
||||||
|
contentSettings: {
|
||||||
|
contentType: mime.lookup(file),
|
||||||
|
cacheControl: 'max-age=31536000, public'
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||||
|
}
|
||||||
|
|
||||||
|
async function publish(commit: string, files: readonly string[]): Promise<void> {
|
||||||
|
|
||||||
|
console.log('Publishing...');
|
||||||
|
console.log('Commit:', commit);
|
||||||
|
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT']!;
|
||||||
|
|
||||||
|
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY']!)
|
||||||
|
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||||
|
|
||||||
|
await assertContainer(blobService, commit);
|
||||||
|
|
||||||
|
for (const file of files) {
|
||||||
|
const blobName = basename(file);
|
||||||
|
const blobExists = await doesBlobExist(blobService, commit, blobName);
|
||||||
|
if (blobExists) {
|
||||||
|
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
console.log('Uploading blob to Azure storage...');
|
||||||
|
await uploadBlob(blobService, commit, blobName, file);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('Blobs successfully uploaded.');
|
||||||
|
}
|
||||||
|
|
||||||
|
function main(): void {
|
||||||
|
const commit = process.env['BUILD_SOURCEVERSION'];
|
||||||
|
|
||||||
|
if (!commit) {
|
||||||
|
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const opts = minimist(process.argv.slice(2));
|
||||||
|
const [directory] = opts._;
|
||||||
|
|
||||||
|
const files = fileNames.map(fileName => join(directory, fileName));
|
||||||
|
|
||||||
|
publish(commit, files).catch(err => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (process.argv.length < 3) {
|
||||||
|
console.error('Usage: node publish.js <directory>');
|
||||||
|
process.exit(-1);
|
||||||
|
}
|
||||||
|
main();
|
||||||
70
build/azure-pipelines/common/releaseBuild.ts
Normal file
70
build/azure-pipelines/common/releaseBuild.ts
Normal file
@@ -0,0 +1,70 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
import { CosmosClient } from '@azure/cosmos';
|
||||||
|
|
||||||
|
function getEnv(name: string): string {
|
||||||
|
const result = process.env[name];
|
||||||
|
|
||||||
|
if (typeof result === 'undefined') {
|
||||||
|
throw new Error('Missing env: ' + name);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Config {
|
||||||
|
id: string;
|
||||||
|
frozen: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
function createDefaultConfig(quality: string): Config {
|
||||||
|
return {
|
||||||
|
id: quality,
|
||||||
|
frozen: false
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getConfig(client: CosmosClient, quality: string): Promise<Config> {
|
||||||
|
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${quality}"`;
|
||||||
|
|
||||||
|
const res = await client.database('builds').container('config').items.query(query).fetchAll();
|
||||||
|
|
||||||
|
if (res.resources.length === 0) {
|
||||||
|
return createDefaultConfig(quality);
|
||||||
|
}
|
||||||
|
|
||||||
|
return res.resources[0] as Config;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main(): Promise<void> {
|
||||||
|
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||||
|
const quality = getEnv('VSCODE_QUALITY');
|
||||||
|
|
||||||
|
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||||
|
const config = await getConfig(client, quality);
|
||||||
|
|
||||||
|
console.log('Quality config:', config);
|
||||||
|
|
||||||
|
if (config.frozen) {
|
||||||
|
console.log(`Skipping release because quality ${quality} is frozen.`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Releasing build ${commit}...`);
|
||||||
|
|
||||||
|
const scripts = client.database('builds').container(quality).scripts;
|
||||||
|
await scripts.storedProcedure('releaseBuild').execute('', [commit]);
|
||||||
|
}
|
||||||
|
|
||||||
|
main().then(() => {
|
||||||
|
console.log('Build successfully released');
|
||||||
|
process.exit(0);
|
||||||
|
}, err => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
@@ -8,7 +8,7 @@
|
|||||||
import * as url from 'url';
|
import * as url from 'url';
|
||||||
import * as azure from 'azure-storage';
|
import * as azure from 'azure-storage';
|
||||||
import * as mime from 'mime';
|
import * as mime from 'mime';
|
||||||
import { DocumentClient, RetrievedDocument } from 'documentdb';
|
import { CosmosClient } from '@azure/cosmos';
|
||||||
|
|
||||||
function log(...args: any[]) {
|
function log(...args: any[]) {
|
||||||
console.log(...[`[${new Date().toISOString()}]`, ...args]);
|
console.log(...[`[${new Date().toISOString()}]`, ...args]);
|
||||||
@@ -23,7 +23,7 @@ if (process.argv.length < 3) {
|
|||||||
process.exit(-1);
|
process.exit(-1);
|
||||||
}
|
}
|
||||||
|
|
||||||
interface Build extends RetrievedDocument {
|
interface Build {
|
||||||
assets: Asset[];
|
assets: Asset[];
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -38,62 +38,20 @@ interface Asset {
|
|||||||
supportsFastUpdate?: boolean;
|
supportsFastUpdate?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
function updateBuild(commit: string, quality: string, platform: string, type: string, asset: Asset): Promise<void> {
|
|
||||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
|
||||||
const collection = 'dbs/builds/colls/' + quality;
|
|
||||||
const updateQuery = {
|
|
||||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
|
||||||
parameters: [{ name: '@id', value: commit }]
|
|
||||||
};
|
|
||||||
|
|
||||||
let updateTries = 0;
|
|
||||||
|
|
||||||
function _update(): Promise<void> {
|
|
||||||
updateTries++;
|
|
||||||
|
|
||||||
return new Promise<void>((c, e) => {
|
|
||||||
client.queryDocuments(collection, updateQuery).toArray((err, results) => {
|
|
||||||
if (err) { return e(err); }
|
|
||||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
|
||||||
|
|
||||||
const release = results[0];
|
|
||||||
|
|
||||||
release.assets = [
|
|
||||||
...release.assets.filter((a: any) => !(a.platform === platform && a.type === type)),
|
|
||||||
asset
|
|
||||||
];
|
|
||||||
|
|
||||||
client.replaceDocument(release._self, release, err => {
|
|
||||||
if (err && err.code === 409 && updateTries < 5) { return c(_update()); }
|
|
||||||
if (err) { return e(err); }
|
|
||||||
|
|
||||||
log('Build successfully updated.');
|
|
||||||
c();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return _update();
|
|
||||||
}
|
|
||||||
|
|
||||||
async function sync(commit: string, quality: string): Promise<void> {
|
async function sync(commit: string, quality: string): Promise<void> {
|
||||||
log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
|
log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
|
||||||
|
|
||||||
const cosmosdb = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||||
const collection = `dbs/builds/colls/${quality}`;
|
const container = client.database('builds').container(quality);
|
||||||
const query = {
|
|
||||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
|
||||||
parameters: [{ name: '@id', value: commit }]
|
|
||||||
};
|
|
||||||
|
|
||||||
const build = await new Promise<Build>((c, e) => {
|
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${commit}"`;
|
||||||
cosmosdb.queryDocuments(collection, query).toArray((err, results) => {
|
const res = await container.items.query<Build>(query, {}).fetchAll();
|
||||||
if (err) { return e(err); }
|
|
||||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
if (res.resources.length !== 1) {
|
||||||
c(results[0] as Build);
|
throw new Error(`No builds found for ${commit}`);
|
||||||
});
|
}
|
||||||
});
|
|
||||||
|
const build = res.resources[0];
|
||||||
|
|
||||||
log(`Found build for ${commit}, with ${build.assets.length} assets`);
|
log(`Found build for ${commit}, with ${build.assets.length} assets`);
|
||||||
|
|
||||||
@@ -140,8 +98,9 @@ async function sync(commit: string, quality: string): Promise<void> {
|
|||||||
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
|
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
|
||||||
|
|
||||||
log(` Updating build in DB...`);
|
log(` Updating build in DB...`);
|
||||||
asset.mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
||||||
await updateBuild(commit, quality, asset.platform, asset.type, asset);
|
await container.scripts.storedProcedure('setAssetMooncakeUrl')
|
||||||
|
.execute('', [commit, asset.platform, asset.type, mooncakeUrl]);
|
||||||
|
|
||||||
log(` Done ✔️`);
|
log(` Done ✔️`);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|||||||
@@ -1,46 +1,55 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
||||||
|
inputs:
|
||||||
|
versionSpec: "1.x"
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
vstsFeed: '$(ArtifactFeed)'
|
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
|
||||||
inputs:
|
|
||||||
versionSpec: "1.10.1"
|
|
||||||
- script: |
|
- script: |
|
||||||
yarn --frozen-lockfile
|
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||||
displayName: Install Dependencies
|
displayName: Install Dependencies
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
vstsFeed: '$(ArtifactFeed)'
|
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
- script: |
|
- script: |
|
||||||
yarn gulp electron-x64
|
yarn electron x64
|
||||||
displayName: Download Electron
|
displayName: Download Electron
|
||||||
- script: |
|
- script: |
|
||||||
yarn gulp hygiene
|
yarn gulp hygiene --skip-tslint
|
||||||
displayName: Run Hygiene Checks
|
displayName: Run Hygiene Checks
|
||||||
- script: |
|
- script: |
|
||||||
yarn monaco-compile-check
|
yarn gulp tslint
|
||||||
displayName: Run Monaco Editor Checks
|
displayName: Run TSLint Checks
|
||||||
|
- script: | # {{SQL CARBON EDIT}} add step
|
||||||
|
yarn strict-null-check
|
||||||
|
displayName: Run Strict Null Check.
|
||||||
|
- script: | # {{SQL CARBON EDIT}} add step
|
||||||
|
yarn tslint
|
||||||
|
displayName: Run TSLint (gci)
|
||||||
|
# - script: | {{SQL CARBON EDIT}} remove step
|
||||||
|
# yarn monaco-compile-check
|
||||||
|
# displayName: Run Monaco Editor Checks
|
||||||
- script: |
|
- script: |
|
||||||
yarn compile
|
yarn compile
|
||||||
displayName: Compile Sources
|
displayName: Compile Sources
|
||||||
- script: |
|
# - script: | {{SQL CARBON EDIT}} remove step
|
||||||
yarn download-builtin-extensions
|
# yarn download-builtin-extensions
|
||||||
displayName: Download Built-in Extensions
|
# displayName: Download Built-in Extensions
|
||||||
- script: |
|
- script: |
|
||||||
./scripts/test.sh --tfs "Unit Tests"
|
./scripts/test.sh --tfs "Unit Tests"
|
||||||
displayName: Run Unit Tests
|
displayName: Run Unit Tests
|
||||||
- script: |
|
# - script: | {{SQL CARBON EDIT}} remove step
|
||||||
./scripts/test-integration.sh --tfs "Integration Tests"
|
# ./scripts/test-integration.sh --tfs "Integration Tests"
|
||||||
displayName: Run Integration Tests
|
# displayName: Run Integration Tests
|
||||||
- task: PublishTestResults@2
|
- task: PublishTestResults@2
|
||||||
displayName: Publish Tests Results
|
displayName: Publish Tests Results
|
||||||
inputs:
|
inputs:
|
||||||
|
|||||||
14
build/azure-pipelines/darwin/createDrop.sh
Executable file
14
build/azure-pipelines/darwin/createDrop.sh
Executable file
@@ -0,0 +1,14 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -e
|
||||||
|
REPO="$(pwd)"
|
||||||
|
|
||||||
|
# ensure drop directories exist
|
||||||
|
mkdir -p $REPO/.build/darwin/{archive,server}
|
||||||
|
|
||||||
|
# remove pkg from archive
|
||||||
|
zip -d $REPO/.build/darwin/archive/azuredatastudio-darwin.zip "*.pkg"
|
||||||
|
|
||||||
|
# package Remote Extension Host
|
||||||
|
pushd .. && mv azuredatastudio-reh-darwin azuredatastudio-server-darwin && zip -Xry $REPO/.build/darwin/server/azuredatastudio-server-darwin.zip azuredatastudio-server-darwin && popd
|
||||||
|
|
||||||
|
node build/azure-pipelines/common/copyArtifacts.js
|
||||||
@@ -21,11 +21,11 @@ steps:
|
|||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "1.10.1"
|
versionSpec: "1.x"
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
@@ -102,20 +102,28 @@ steps:
|
|||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
|
# Figure out the full absolute path of the product we just built
|
||||||
|
# including the remote server and configure the integration tests
|
||||||
|
# to run with these builds instead of running out of sources.
|
||||||
set -e
|
set -e
|
||||||
|
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
|
||||||
|
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||||
|
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
|
||||||
|
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
|
||||||
./scripts/test-integration.sh --build --tfs "Integration Tests"
|
./scripts/test-integration.sh --build --tfs "Integration Tests"
|
||||||
displayName: Run integration tests
|
displayName: Run integration tests
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
# Web Smoke Tests disabled due to https://github.com/microsoft/vscode/issues/80308
|
||||||
set -e
|
# - script: |
|
||||||
cd test/smoke
|
# set -e
|
||||||
yarn compile
|
# cd test/smoke
|
||||||
cd -
|
# yarn compile
|
||||||
yarn smoketest --web --headless
|
# cd -
|
||||||
continueOnError: true
|
# yarn smoketest --web --headless
|
||||||
displayName: Run web smoke tests
|
# continueOnError: true
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
# displayName: Run web smoke tests
|
||||||
|
# condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
|
|||||||
@@ -5,28 +5,20 @@ set -e
|
|||||||
zip -d ../VSCode-darwin.zip "*.pkg"
|
zip -d ../VSCode-darwin.zip "*.pkg"
|
||||||
|
|
||||||
# publish the build
|
# publish the build
|
||||||
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
|
node build/azure-pipelines/common/createAsset.js \
|
||||||
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
|
|
||||||
node build/azure-pipelines/common/publish.js \
|
|
||||||
"$VSCODE_QUALITY" \
|
|
||||||
darwin \
|
darwin \
|
||||||
archive \
|
archive \
|
||||||
"VSCode-darwin-$VSCODE_QUALITY.zip" \
|
"VSCode-darwin-$VSCODE_QUALITY.zip" \
|
||||||
$VERSION \
|
|
||||||
true \
|
|
||||||
../VSCode-darwin.zip
|
../VSCode-darwin.zip
|
||||||
|
|
||||||
# package Remote Extension Host
|
# package Remote Extension Host
|
||||||
pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
|
pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
|
||||||
|
|
||||||
# publish Remote Extension Host
|
# publish Remote Extension Host
|
||||||
node build/azure-pipelines/common/publish.js \
|
node build/azure-pipelines/common/createAsset.js \
|
||||||
"$VSCODE_QUALITY" \
|
|
||||||
server-darwin \
|
server-darwin \
|
||||||
archive-unsigned \
|
archive-unsigned \
|
||||||
"vscode-server-darwin.zip" \
|
"vscode-server-darwin.zip" \
|
||||||
$VERSION \
|
|
||||||
true \
|
|
||||||
../vscode-server-darwin.zip
|
../vscode-server-darwin.zip
|
||||||
|
|
||||||
# publish hockeyapp symbols
|
# publish hockeyapp symbols
|
||||||
|
|||||||
169
build/azure-pipelines/darwin/sql-product-build-darwin.yml
Normal file
169
build/azure-pipelines/darwin/sql-product-build-darwin.yml
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
steps:
|
||||||
|
- script: |
|
||||||
|
mkdir -p .build
|
||||||
|
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||||
|
echo -n $VSCODE_QUALITY > .build/quality
|
||||||
|
displayName: Prepare cache flag
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||||
|
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
platformIndependent: true
|
||||||
|
alias: 'Compilation'
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
exit 1
|
||||||
|
displayName: Check RestoreCache
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- task: NodeTool@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: '10.15.3'
|
||||||
|
|
||||||
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||||
|
inputs:
|
||||||
|
versionSpec: '1.x'
|
||||||
|
|
||||||
|
- task: AzureKeyVault@1
|
||||||
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
|
inputs:
|
||||||
|
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||||
|
KeyVaultName: ado-secrets
|
||||||
|
SecretsFilter: 'github-distro-mixin-password'
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
|
||||||
|
cat << EOF > ~/.netrc
|
||||||
|
machine github.com
|
||||||
|
login azuredatastudio
|
||||||
|
password $(github-distro-mixin-password)
|
||||||
|
EOF
|
||||||
|
|
||||||
|
git config user.email "andresse@microsoft.com"
|
||||||
|
git config user.name "AzureDataStudio"
|
||||||
|
displayName: Prepare tooling
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||||
|
git fetch distro
|
||||||
|
git merge $(node -p "require('./package.json').distro")
|
||||||
|
displayName: Merge distro
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||||
|
displayName: Install dependencies
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn postinstall
|
||||||
|
displayName: Run postinstall scripts
|
||||||
|
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
node build/azure-pipelines/mixin
|
||||||
|
displayName: Mix in quality
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp install-sqltoolsservice
|
||||||
|
displayName: Install sqltoolsservice
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp package-rebuild-extensions
|
||||||
|
yarn gulp vscode-darwin-min-ci
|
||||||
|
yarn gulp vscode-reh-darwin-min-ci
|
||||||
|
yarn gulp vscode-reh-web-darwin-min-ci
|
||||||
|
displayName: Build
|
||||||
|
env:
|
||||||
|
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
./scripts/test.sh --build --coverage --reporter mocha-junit-reporter
|
||||||
|
displayName: Run unit tests
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
mkdir -p .build/darwin/archive
|
||||||
|
pushd ../azuredatastudio-darwin && zip -r -X -y $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip * && popd
|
||||||
|
displayName: 'Archive'
|
||||||
|
|
||||||
|
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||||
|
displayName: 'ESRP CodeSigning'
|
||||||
|
inputs:
|
||||||
|
ConnectedServiceName: 'Code Signing'
|
||||||
|
FolderPath: '$(Build.SourcesDirectory)/.build/darwin/archive'
|
||||||
|
Pattern: 'azuredatastudio-darwin.zip'
|
||||||
|
signConfigType: inlineSignParams
|
||||||
|
inlineOperation: |
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"keyCode": "CP-401337-Apple",
|
||||||
|
"operationSetCode": "MacAppDeveloperSign",
|
||||||
|
"parameters": [],
|
||||||
|
"toolName": "sign",
|
||||||
|
"toolVersion": "1.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
SessionTimeout: 20
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
./build/azure-pipelines/darwin/createDrop.sh
|
||||||
|
displayName: Create Drop
|
||||||
|
|
||||||
|
- task: PublishBuildArtifacts@1
|
||||||
|
displayName: 'Publish Artifact: drop'
|
||||||
|
|
||||||
|
- task: PublishTestResults@2
|
||||||
|
displayName: 'Publish Test Results test-results.xml'
|
||||||
|
inputs:
|
||||||
|
testResultsFiles: 'test-results.xml'
|
||||||
|
searchFolder: '$(Build.SourcesDirectory)'
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- task: PublishTestResults@2
|
||||||
|
displayName: 'Publish Integration and Smoke Test Results'
|
||||||
|
inputs:
|
||||||
|
testResultsFiles: 'dawin-integration-tests-results.xml'
|
||||||
|
searchFolder: '$(Build.ArtifactStagingDirectory)\test-results'
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- task: PublishCodeCoverageResults@1
|
||||||
|
displayName: 'Publish code coverage from $(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||||
|
inputs:
|
||||||
|
codeCoverageTool: Cobertura
|
||||||
|
summaryFileLocation: '$(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||||
|
reportDirectory: '$(Build.SourcesDirectory)/.build/coverage'
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||||
|
displayName: 'Component Detection'
|
||||||
|
inputs:
|
||||||
|
failOnAlert: true
|
||||||
19
build/azure-pipelines/darwin/sql-publish.ps1
Normal file
19
build/azure-pipelines/darwin/sql-publish.ps1
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
Param(
|
||||||
|
[string]$sourcesDir,
|
||||||
|
[string]$artifactsDir,
|
||||||
|
[string]$storageKey,
|
||||||
|
[string]$documentDbKey
|
||||||
|
)
|
||||||
|
|
||||||
|
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
|
||||||
|
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
||||||
|
|
||||||
|
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
|
||||||
|
$Version = $VersionJson.version
|
||||||
|
$Quality = $VersionJson.quality
|
||||||
|
$CommitId = $VersionJson.commit
|
||||||
|
|
||||||
|
$ZipName = "azuredatastudio-darwin.zip"
|
||||||
|
$Zip = "$artifactsDir\darwin\archive\$ZipName"
|
||||||
|
|
||||||
|
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality darwin archive $ZipName $Version true $Zip $CommitId
|
||||||
@@ -1,3 +1,6 @@
|
|||||||
|
pool:
|
||||||
|
vmImage: 'Ubuntu-16.04'
|
||||||
|
|
||||||
trigger:
|
trigger:
|
||||||
branches:
|
branches:
|
||||||
include: ['master', 'release/*']
|
include: ['master', 'release/*']
|
||||||
@@ -8,27 +11,27 @@ pr:
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
inputs:
|
inputs:
|
||||||
azureSubscription: 'vscode-builds-subscription'
|
azureSubscription: 'azuredatastudio-adointegration'
|
||||||
KeyVaultName: vscode
|
KeyVaultName: ado-secrets
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
cat << EOF > ~/.netrc
|
cat << EOF > ~/.netrc
|
||||||
machine github.com
|
machine github.com
|
||||||
login vscode
|
login azuredatastudio
|
||||||
password $(github-distro-mixin-password)
|
password $(github-distro-mixin-password)
|
||||||
EOF
|
EOF
|
||||||
|
|
||||||
git config user.email "vscode@microsoft.com"
|
git config user.email "andresse@microsoft.com"
|
||||||
git config user.name "VSCode"
|
git config user.name "AzureDataStudio"
|
||||||
|
|
||||||
git remote add distro "https://github.com/$VSCODE_MIXIN_REPO.git"
|
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||||
git fetch distro
|
git fetch distro
|
||||||
|
|
||||||
# Push master branch into oss/master
|
# Push master branch into oss/master
|
||||||
|
|||||||
16
build/azure-pipelines/docker/Dockerfile
Normal file
16
build/azure-pipelines/docker/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
#Download base image ubuntu 16.04
|
||||||
|
FROM ubuntu:16.04
|
||||||
|
|
||||||
|
# Update Software repository
|
||||||
|
RUN apt-get update
|
||||||
|
|
||||||
|
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus libgtk-3-0
|
||||||
|
|
||||||
|
ADD ./ /opt/ads-server
|
||||||
|
|
||||||
|
RUN chmod +x /opt/ads-server/server.sh && chmod +x /opt/ads-server/node
|
||||||
|
|
||||||
|
CMD ["/opt/ads-server/server.sh"]
|
||||||
|
|
||||||
|
EXPOSE 8000:8000
|
||||||
|
EXPOSE 8001:8001
|
||||||
@@ -1,10 +1,13 @@
|
|||||||
pool:
|
pool:
|
||||||
vmImage: 'Ubuntu-16.04'
|
vmImage: 'Ubuntu-16.04'
|
||||||
|
|
||||||
|
trigger: none
|
||||||
|
pr: none
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
@@ -31,13 +34,3 @@ steps:
|
|||||||
git push origin HEAD:electron-6.0.x
|
git push origin HEAD:electron-6.0.x
|
||||||
|
|
||||||
displayName: Sync & Merge Exploration
|
displayName: Sync & Merge Exploration
|
||||||
|
|
||||||
trigger: none
|
|
||||||
pr: none
|
|
||||||
|
|
||||||
schedules:
|
|
||||||
- cron: "0 5 * * Mon-Fri"
|
|
||||||
displayName: Mon-Fri at 7:00
|
|
||||||
branches:
|
|
||||||
include:
|
|
||||||
- master
|
|
||||||
|
|||||||
39
build/azure-pipelines/exploration-merge.yml
Normal file
39
build/azure-pipelines/exploration-merge.yml
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
trigger:
|
||||||
|
branches:
|
||||||
|
include: ['master']
|
||||||
|
pr: none
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
- job: ExplorationMerge
|
||||||
|
pool:
|
||||||
|
vmImage: Ubuntu-16.04
|
||||||
|
steps:
|
||||||
|
- task: NodeTool@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: "10.15.1"
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
|
||||||
|
cat << EOF > ~/.netrc
|
||||||
|
machine mssqltools.visualstudio.com
|
||||||
|
login azuredatastudio
|
||||||
|
password $(DEVOPS_PASSWORD)
|
||||||
|
EOF
|
||||||
|
|
||||||
|
git config user.email "andresse@microsoft.com"
|
||||||
|
git config user.name "AzureDataStudio"
|
||||||
|
|
||||||
|
git remote add explore "$ADS_EXPLORE_REPO"
|
||||||
|
git fetch explore
|
||||||
|
|
||||||
|
git checkout -b merge-branch explore/master
|
||||||
|
|
||||||
|
git merge origin/master
|
||||||
|
|
||||||
|
git push explore HEAD:master
|
||||||
|
|
||||||
|
displayName: Sync & Merge Explore
|
||||||
|
env:
|
||||||
|
ADS_EXPLORE_REPO: $(ADS_EXPLORE_REPO)
|
||||||
|
DEVOPS_PASSWORD: $(DEVOPS_PASSWORD)
|
||||||
20
build/azure-pipelines/linux/Dockerfile
Normal file
20
build/azure-pipelines/linux/Dockerfile
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
#Download base image ubuntu 16.04
|
||||||
|
FROM ubuntu:16.04
|
||||||
|
|
||||||
|
# Update Software repository
|
||||||
|
RUN apt-get update --fix-missing
|
||||||
|
|
||||||
|
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 \
|
||||||
|
libkrb5-dev git apt-transport-https ca-certificates curl gnupg-agent software-properties-common \
|
||||||
|
libnss3 libasound2 make gcc libx11-dev fakeroot rpm
|
||||||
|
|
||||||
|
#docker
|
||||||
|
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||||
|
RUN apt-key fingerprint 0EBFCD88
|
||||||
|
RUN add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||||
|
RUN apt-get update
|
||||||
|
RUN apt-get -y install docker-ce docker-ce-cli containerd.io
|
||||||
|
|
||||||
|
ADD ./xvfb.init /etc/init.d/xvfb
|
||||||
|
RUN chmod +x /etc/init.d/xvfb
|
||||||
|
RUN update-rc.d xvfb defaults
|
||||||
@@ -2,53 +2,62 @@ steps:
|
|||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0
|
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev #{{SQL CARBON EDIT}} add kerberos dep
|
||||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||||
sudo chmod +x /etc/init.d/xvfb
|
sudo chmod +x /etc/init.d/xvfb
|
||||||
sudo update-rc.d xvfb defaults
|
sudo update-rc.d xvfb defaults
|
||||||
sudo service xvfb start
|
sudo service xvfb start
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||||
|
inputs:
|
||||||
|
versionSpec: "1.x"
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
vstsFeed: '$(ArtifactFeed)'
|
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
|
||||||
inputs:
|
|
||||||
versionSpec: "1.10.1"
|
|
||||||
- script: |
|
- script: |
|
||||||
yarn --frozen-lockfile
|
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||||
displayName: Install Dependencies
|
displayName: Install Dependencies
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
vstsFeed: '$(ArtifactFeed)'
|
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
- script: |
|
- script: |
|
||||||
yarn gulp electron-x64
|
yarn electron x64
|
||||||
displayName: Download Electron
|
displayName: Download Electron
|
||||||
- script: |
|
- script: |
|
||||||
yarn gulp hygiene
|
yarn gulp hygiene --skip-tslint
|
||||||
displayName: Run Hygiene Checks
|
displayName: Run Hygiene Checks
|
||||||
- script: |
|
- script: |
|
||||||
yarn monaco-compile-check
|
yarn gulp tslint
|
||||||
displayName: Run Monaco Editor Checks
|
displayName: Run TSLint Checks
|
||||||
|
- script: | # {{SQL CARBON EDIT}} add gci checks
|
||||||
|
yarn tslint
|
||||||
|
displayName: Run TSLint (gci)
|
||||||
|
- script: | # {{SQL CARBON EDIT}} add strict null check
|
||||||
|
yarn strict-null-check
|
||||||
|
displayName: Run Strict Null Check
|
||||||
|
# - script: | {{SQL CARBON EDIT}} remove monaco editor checks
|
||||||
|
# yarn monaco-compile-check
|
||||||
|
# displayName: Run Monaco Editor Checks
|
||||||
- script: |
|
- script: |
|
||||||
yarn compile
|
yarn compile
|
||||||
displayName: Compile Sources
|
displayName: Compile Sources
|
||||||
- script: |
|
# - script: | {{SQL CARBON EDIT}} remove step
|
||||||
yarn download-builtin-extensions
|
# yarn download-builtin-extensions
|
||||||
displayName: Download Built-in Extensions
|
# displayName: Download Built-in Extensions
|
||||||
- script: |
|
- script: |
|
||||||
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
|
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
|
||||||
displayName: Run Unit Tests
|
displayName: Run Unit Tests
|
||||||
- script: |
|
# - script: | {{SQL CARBON EDIT}} remove step
|
||||||
DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
|
# DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
|
||||||
displayName: Run Integration Tests
|
# displayName: Run Integration Tests
|
||||||
- task: PublishTestResults@2
|
- task: PublishTestResults@2
|
||||||
displayName: Publish Tests Results
|
displayName: Publish Tests Results
|
||||||
inputs:
|
inputs:
|
||||||
|
|||||||
37
build/azure-pipelines/linux/createDrop.sh
Executable file
37
build/azure-pipelines/linux/createDrop.sh
Executable file
@@ -0,0 +1,37 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -e
|
||||||
|
REPO="$(pwd)"
|
||||||
|
ROOT="$REPO/.."
|
||||||
|
|
||||||
|
# Publish tarball
|
||||||
|
mkdir -p $REPO/.build/linux/{archive,server}
|
||||||
|
PLATFORM_LINUX="linux-x64"
|
||||||
|
BUILDNAME="azuredatastudio-$PLATFORM_LINUX"
|
||||||
|
BUILD="$ROOT/$BUILDNAME"
|
||||||
|
TARBALL_FILENAME="azuredatastudio-$PLATFORM_LINUX.tar.gz"
|
||||||
|
TARBALL_PATH="$REPO/.build/linux/archive/$TARBALL_FILENAME"
|
||||||
|
|
||||||
|
# create version
|
||||||
|
PACKAGEJSON="$BUILD/resources/app/package.json"
|
||||||
|
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||||
|
COMMIT_ID=$(git rev-parse HEAD)
|
||||||
|
echo -e "{ \"version\": \"$VERSION\", \"quality\": \"$VSCODE_QUALITY\", \"commit\": \"$COMMIT_ID\" }" > "$REPO/.build/version.json"
|
||||||
|
|
||||||
|
rm -rf $ROOT/code-*.tar.*
|
||||||
|
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||||
|
|
||||||
|
# Publish Remote Extension Host
|
||||||
|
LEGACY_SERVER_BUILD_NAME="azuredatastudio-reh-$PLATFORM_LINUX"
|
||||||
|
SERVER_BUILD_NAME="azuredatastudio-server-$PLATFORM_LINUX"
|
||||||
|
SERVER_TARBALL_FILENAME="azuredatastudio-server-$PLATFORM_LINUX.tar.gz"
|
||||||
|
SERVER_TARBALL_PATH="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME"
|
||||||
|
|
||||||
|
rm -rf $ROOT/azuredatastudio-server-*.tar.*
|
||||||
|
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||||
|
|
||||||
|
# create docker
|
||||||
|
mkdir -p $REPO/.build/docker
|
||||||
|
docker build -t azuredatastudio-server -f $REPO/build/azure-pipelines/docker/Dockerfile $ROOT/$SERVER_BUILD_NAME
|
||||||
|
docker save azuredatastudio-server | gzip > $REPO/.build/docker/azuredatastudio-server-docker.tar.gz
|
||||||
|
|
||||||
|
node build/azure-pipelines/common/copyArtifacts.js
|
||||||
@@ -1,40 +0,0 @@
|
|||||||
/*---------------------------------------------------------------------------------------------
|
|
||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
|
||||||
*--------------------------------------------------------------------------------------------*/
|
|
||||||
'use strict';
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const documentdb_1 = require("documentdb");
|
|
||||||
function createDefaultConfig(quality) {
|
|
||||||
return {
|
|
||||||
id: quality,
|
|
||||||
frozen: false
|
|
||||||
};
|
|
||||||
}
|
|
||||||
function getConfig(quality) {
|
|
||||||
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
|
||||||
const collection = 'dbs/builds/colls/config';
|
|
||||||
const query = {
|
|
||||||
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
|
|
||||||
parameters: [
|
|
||||||
{ name: '@quality', value: quality }
|
|
||||||
]
|
|
||||||
};
|
|
||||||
return new Promise((c, e) => {
|
|
||||||
client.queryDocuments(collection, query).toArray((err, results) => {
|
|
||||||
if (err && err.code !== 409) {
|
|
||||||
return e(err);
|
|
||||||
}
|
|
||||||
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0]);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
getConfig(process.argv[2])
|
|
||||||
.then(config => {
|
|
||||||
console.log(config.frozen);
|
|
||||||
process.exit(0);
|
|
||||||
})
|
|
||||||
.catch(err => {
|
|
||||||
console.error(err);
|
|
||||||
process.exit(1);
|
|
||||||
});
|
|
||||||
@@ -21,11 +21,11 @@ steps:
|
|||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "1.10.1"
|
versionSpec: "1.x"
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
|
|||||||
@@ -21,11 +21,11 @@ steps:
|
|||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "1.10.1"
|
versionSpec: "1.x"
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
@@ -105,12 +105,45 @@ steps:
|
|||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
|
# Figure out the full absolute path of the product we just built
|
||||||
|
# including the remote server and configure the integration tests
|
||||||
|
# to run with these builds instead of running out of sources.
|
||||||
set -e
|
set -e
|
||||||
|
APP_ROOT=$(agent.builddirectory)/VSCode-linux-x64
|
||||||
|
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||||
|
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||||
|
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-x64" \
|
||||||
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
|
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
|
||||||
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-x64"
|
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-x64"
|
||||||
displayName: Run integration tests
|
displayName: Run integration tests
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp "vscode-linux-x64-build-deb"
|
||||||
|
yarn gulp "vscode-linux-x64-build-rpm"
|
||||||
|
yarn gulp "vscode-linux-x64-prepare-snap"
|
||||||
|
displayName: Build packages
|
||||||
|
|
||||||
|
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||||
|
inputs:
|
||||||
|
ConnectedServiceName: 'ESRP CodeSign'
|
||||||
|
FolderPath: '.build/linux/rpm/x86_64'
|
||||||
|
Pattern: '*.rpm'
|
||||||
|
signConfigType: inlineSignParams
|
||||||
|
inlineOperation: |
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"keyCode": "CP-450779-Pgp",
|
||||||
|
"operationSetCode": "LinuxSign",
|
||||||
|
"parameters": [ ],
|
||||||
|
"toolName": "sign",
|
||||||
|
"toolVersion": "1.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
SessionTimeout: 120
|
||||||
|
displayName: Codesign rpm
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||||
|
|||||||
@@ -10,13 +10,11 @@ BUILD="$ROOT/$BUILDNAME"
|
|||||||
BUILD_VERSION="$(date +%s)"
|
BUILD_VERSION="$(date +%s)"
|
||||||
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
|
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
|
||||||
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
|
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
|
||||||
PACKAGEJSON="$BUILD/resources/app/package.json"
|
|
||||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
|
||||||
|
|
||||||
rm -rf $ROOT/code-*.tar.*
|
rm -rf $ROOT/code-*.tar.*
|
||||||
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||||
|
|
||||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$VERSION" true "$TARBALL_PATH"
|
node build/azure-pipelines/common/createAsset.js "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$TARBALL_PATH"
|
||||||
|
|
||||||
# Publish Remote Extension Host
|
# Publish Remote Extension Host
|
||||||
LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
|
LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
|
||||||
@@ -27,32 +25,28 @@ SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
|
|||||||
rm -rf $ROOT/vscode-server-*.tar.*
|
rm -rf $ROOT/vscode-server-*.tar.*
|
||||||
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||||
|
|
||||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$VERSION" true "$SERVER_TARBALL_PATH"
|
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
|
||||||
|
|
||||||
# Publish hockeyapp symbols
|
# Publish hockeyapp symbols
|
||||||
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "x64" "$VSCODE_HOCKEYAPP_ID_LINUX64"
|
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "x64" "$VSCODE_HOCKEYAPP_ID_LINUX64"
|
||||||
|
|
||||||
# Publish DEB
|
# Publish DEB
|
||||||
yarn gulp "vscode-linux-x64-build-deb"
|
|
||||||
PLATFORM_DEB="linux-deb-x64"
|
PLATFORM_DEB="linux-deb-x64"
|
||||||
DEB_ARCH="amd64"
|
DEB_ARCH="amd64"
|
||||||
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
|
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
|
||||||
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
|
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
|
||||||
|
|
||||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_DEB" package "$DEB_FILENAME" "$VERSION" true "$DEB_PATH"
|
node build/azure-pipelines/common/createAsset.js "$PLATFORM_DEB" package "$DEB_FILENAME" "$DEB_PATH"
|
||||||
|
|
||||||
# Publish RPM
|
# Publish RPM
|
||||||
yarn gulp "vscode-linux-x64-build-rpm"
|
|
||||||
PLATFORM_RPM="linux-rpm-x64"
|
PLATFORM_RPM="linux-rpm-x64"
|
||||||
RPM_ARCH="x86_64"
|
RPM_ARCH="x86_64"
|
||||||
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
|
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
|
||||||
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
|
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
|
||||||
|
|
||||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_RPM" package "$RPM_FILENAME" "$VERSION" true "$RPM_PATH"
|
node build/azure-pipelines/common/createAsset.js "$PLATFORM_RPM" package "$RPM_FILENAME" "$RPM_PATH"
|
||||||
|
|
||||||
# Publish Snap
|
# Publish Snap
|
||||||
yarn gulp "vscode-linux-x64-prepare-snap"
|
|
||||||
|
|
||||||
# Pack snap tarball artifact, in order to preserve file perms
|
# Pack snap tarball artifact, in order to preserve file perms
|
||||||
mkdir -p $REPO/.build/linux/snap-tarball
|
mkdir -p $REPO/.build/linux/snap-tarball
|
||||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-x64.tar.gz"
|
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-x64.tar.gz"
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "1.10.1"
|
versionSpec: "1.x"
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
@@ -43,12 +43,10 @@ steps:
|
|||||||
# Create snap package
|
# Create snap package
|
||||||
BUILD_VERSION="$(date +%s)"
|
BUILD_VERSION="$(date +%s)"
|
||||||
SNAP_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.snap"
|
SNAP_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.snap"
|
||||||
PACKAGEJSON="$(ls $SNAP_ROOT/code*/usr/share/code*/resources/app/package.json)"
|
|
||||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
|
||||||
SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME"
|
SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME"
|
||||||
(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft snap --output "$SNAP_PATH")
|
(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft snap --output "$SNAP_PATH")
|
||||||
|
|
||||||
# Publish snap package
|
# Publish snap package
|
||||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-x64" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"
|
node build/azure-pipelines/common/createAsset.js "linux-snap-x64" package "$SNAP_FILENAME" "$SNAP_PATH"
|
||||||
|
|||||||
170
build/azure-pipelines/linux/sql-product-build-linux.yml
Normal file
170
build/azure-pipelines/linux/sql-product-build-linux.yml
Normal file
@@ -0,0 +1,170 @@
|
|||||||
|
steps:
|
||||||
|
- script: |
|
||||||
|
mkdir -p .build
|
||||||
|
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||||
|
echo -n $VSCODE_QUALITY > .build/quality
|
||||||
|
displayName: Prepare cache flag
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||||
|
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
platformIndependent: true
|
||||||
|
alias: 'Compilation'
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
exit 1
|
||||||
|
displayName: Check RestoreCache
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- task: NodeTool@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: '10.15.1'
|
||||||
|
|
||||||
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||||
|
inputs:
|
||||||
|
versionSpec: "1.x"
|
||||||
|
|
||||||
|
- task: AzureKeyVault@1
|
||||||
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
|
inputs:
|
||||||
|
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||||
|
KeyVaultName: ado-secrets
|
||||||
|
SecretsFilter: 'github-distro-mixin-password'
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
cat << EOF > ~/.netrc
|
||||||
|
machine github.com
|
||||||
|
login azuredatastudio
|
||||||
|
password $(github-distro-mixin-password)
|
||||||
|
EOF
|
||||||
|
|
||||||
|
git config user.email "andresse@microsoft.com"
|
||||||
|
git config user.name "AzureDataStudio"
|
||||||
|
displayName: Prepare tooling
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||||
|
git fetch distro
|
||||||
|
git merge $(node -p "require('./package.json').distro")
|
||||||
|
displayName: Merge distro
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||||
|
displayName: Install dependencies
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn postinstall
|
||||||
|
displayName: Run postinstall scripts
|
||||||
|
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
node build/azure-pipelines/mixin
|
||||||
|
displayName: Mix in quality
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp install-sqltoolsservice
|
||||||
|
yarn gulp install-ssmsmin
|
||||||
|
displayName: Install extension binaries
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp vscode-linux-x64-min-ci
|
||||||
|
yarn gulp vscode-reh-linux-x64-min-ci
|
||||||
|
yarn gulp vscode-reh-web-linux-x64-min-ci
|
||||||
|
displayName: Build
|
||||||
|
env:
|
||||||
|
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
service xvfb start
|
||||||
|
displayName: Start xvfb
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp package-rebuild-extensions
|
||||||
|
yarn gulp compile-extensions
|
||||||
|
yarn gulp package-external-extensions
|
||||||
|
displayName: Package External extensions
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||||
|
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||||
|
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||||
|
DISPLAY=:10 ./scripts/test-extensions-unit.sh
|
||||||
|
displayName: 'Run Stable Extension Unit Tests'
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||||
|
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||||
|
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||||
|
DISPLAY=:10 ./scripts/test-extensions-unit-unstable.sh
|
||||||
|
displayName: 'Run Unstable Extension Unit Tests'
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp vscode-linux-x64-build-deb
|
||||||
|
displayName: Build Deb
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp vscode-linux-x64-build-rpm
|
||||||
|
displayName: Build Rpm
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
./build/azure-pipelines/linux/createDrop.sh
|
||||||
|
displayName: Create Drop
|
||||||
|
|
||||||
|
- task: PublishBuildArtifacts@1
|
||||||
|
displayName: 'Publish Artifact: drop'
|
||||||
|
|
||||||
|
- task: PublishTestResults@2
|
||||||
|
displayName: 'Publish Test Results test-results.xml'
|
||||||
|
inputs:
|
||||||
|
testResultsFiles: 'test-results.xml'
|
||||||
|
searchFolder: '$(Build.SourcesDirectory)'
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- task: PublishCodeCoverageResults@1
|
||||||
|
displayName: 'Publish code coverage from $(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||||
|
inputs:
|
||||||
|
codeCoverageTool: Cobertura
|
||||||
|
summaryFileLocation: '$(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||||
|
reportDirectory: '$(Build.SourcesDirectory)/.build/coverage'
|
||||||
|
continueOnError: true
|
||||||
|
|
||||||
|
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||||
|
displayName: 'Component Detection'
|
||||||
|
inputs:
|
||||||
|
failOnAlert: true
|
||||||
36
build/azure-pipelines/linux/sql-publish.ps1
Normal file
36
build/azure-pipelines/linux/sql-publish.ps1
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
Param(
|
||||||
|
[string]$sourcesDir,
|
||||||
|
[string]$artifactsDir,
|
||||||
|
[string]$storageKey,
|
||||||
|
[string]$documentDbKey
|
||||||
|
)
|
||||||
|
|
||||||
|
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
|
||||||
|
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
||||||
|
|
||||||
|
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
|
||||||
|
$Version = $VersionJson.version
|
||||||
|
$Quality = $VersionJson.quality
|
||||||
|
$CommitId = $VersionJson.commit
|
||||||
|
$Arch = "x64"
|
||||||
|
|
||||||
|
# Publish tarball
|
||||||
|
$PlatformLinux = "linux-$Arch"
|
||||||
|
$TarballFilename = "azuredatastudio-linux-$Arch.tar.gz"
|
||||||
|
$TarballPath = "$artifactsDir\linux\archive\$TarballFilename"
|
||||||
|
|
||||||
|
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformLinux archive-unsigned $TarballFilename $Version true $TarballPath $CommitId
|
||||||
|
|
||||||
|
# Publish DEB
|
||||||
|
$PlatformDeb = "linux-deb-$Arch"
|
||||||
|
$DebFilename = "$(Get-ChildItem -File -Name $artifactsDir\linux\deb\amd64\deb\*.deb)"
|
||||||
|
$DebPath = "$artifactsDir\linux\deb\amd64\deb\$DebFilename"
|
||||||
|
|
||||||
|
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformDeb package $DebFilename $Version true $DebPath $CommitId
|
||||||
|
|
||||||
|
# Publish RPM
|
||||||
|
$PlatformRpm = "linux-rpm-$Arch"
|
||||||
|
$RpmFilename = "$(Get-ChildItem -File -Name $artifactsDir\linux\rpm\x86_64\*.rpm)"
|
||||||
|
$RpmPath = "$artifactsDir\linux\rpm\x86_64\$RpmFilename"
|
||||||
|
|
||||||
|
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformRpm package $RpmFilename $Version true $RpmPath $CommitId
|
||||||
@@ -21,7 +21,7 @@ function main() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
const productJsonFilter = filter('product.json', { restore: true });
|
const productJsonFilter = filter('**/product.json', { restore: true });
|
||||||
|
|
||||||
fancyLog(ansiColors.blue('[mixin]'), `Mixing in sources:`);
|
fancyLog(ansiColors.blue('[mixin]'), `Mixing in sources:`);
|
||||||
return vfs
|
return vfs
|
||||||
@@ -29,7 +29,7 @@ function main() {
|
|||||||
.pipe(filter(f => !f.isDirectory()))
|
.pipe(filter(f => !f.isDirectory()))
|
||||||
.pipe(productJsonFilter)
|
.pipe(productJsonFilter)
|
||||||
.pipe(buffer())
|
.pipe(buffer())
|
||||||
.pipe(json(o => Object.assign({}, require('../product.json'), o)))
|
.pipe(json(o => Object.assign({}, require('../../product.json'), o)))
|
||||||
.pipe(productJsonFilter.restore)
|
.pipe(productJsonFilter.restore)
|
||||||
.pipe(es.mapSync(function (f) {
|
.pipe(es.mapSync(function (f) {
|
||||||
fancyLog(ansiColors.blue('[mixin]'), f.relative, ansiColors.green('✔︎'));
|
fancyLog(ansiColors.blue('[mixin]'), f.relative, ansiColors.green('✔︎'));
|
||||||
@@ -38,4 +38,4 @@ function main() {
|
|||||||
.pipe(vfs.dest('.'));
|
.pipe(vfs.dest('.'));
|
||||||
}
|
}
|
||||||
|
|
||||||
main();
|
main();
|
||||||
|
|||||||
@@ -56,7 +56,7 @@ jobs:
|
|||||||
- template: linux/snap-build-linux.yml
|
- template: linux/snap-build-linux.yml
|
||||||
|
|
||||||
- job: LinuxArmhf
|
- job: LinuxArmhf
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'), ne(variables['VSCODE_QUALITY'], 'stable'))
|
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
|
||||||
pool:
|
pool:
|
||||||
vmImage: 'Ubuntu-16.04'
|
vmImage: 'Ubuntu-16.04'
|
||||||
variables:
|
variables:
|
||||||
@@ -67,7 +67,7 @@ jobs:
|
|||||||
- template: linux/product-build-linux-multiarch.yml
|
- template: linux/product-build-linux-multiarch.yml
|
||||||
|
|
||||||
- job: LinuxArm64
|
- job: LinuxArm64
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'), ne(variables['VSCODE_QUALITY'], 'stable'))
|
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
|
||||||
pool:
|
pool:
|
||||||
vmImage: 'Ubuntu-16.04'
|
vmImage: 'Ubuntu-16.04'
|
||||||
variables:
|
variables:
|
||||||
@@ -78,7 +78,7 @@ jobs:
|
|||||||
- template: linux/product-build-linux-multiarch.yml
|
- template: linux/product-build-linux-multiarch.yml
|
||||||
|
|
||||||
- job: LinuxAlpine
|
- job: LinuxAlpine
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'), ne(variables['VSCODE_QUALITY'], 'stable'))
|
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'))
|
||||||
pool:
|
pool:
|
||||||
vmImage: 'Ubuntu-16.04'
|
vmImage: 'Ubuntu-16.04'
|
||||||
variables:
|
variables:
|
||||||
@@ -118,6 +118,7 @@ jobs:
|
|||||||
- Linux
|
- Linux
|
||||||
- LinuxSnap
|
- LinuxSnap
|
||||||
- LinuxArmhf
|
- LinuxArmhf
|
||||||
|
- LinuxArm64
|
||||||
- LinuxAlpine
|
- LinuxAlpine
|
||||||
- macOS
|
- macOS
|
||||||
steps:
|
steps:
|
||||||
@@ -133,6 +134,7 @@ jobs:
|
|||||||
- Linux
|
- Linux
|
||||||
- LinuxSnap
|
- LinuxSnap
|
||||||
- LinuxArmhf
|
- LinuxArmhf
|
||||||
|
- LinuxArm64
|
||||||
- LinuxAlpine
|
- LinuxAlpine
|
||||||
- LinuxWeb
|
- LinuxWeb
|
||||||
- macOS
|
- macOS
|
||||||
|
|||||||
@@ -12,23 +12,24 @@ steps:
|
|||||||
vstsFeed: 'npm-vscode'
|
vstsFeed: 'npm-vscode'
|
||||||
platformIndependent: true
|
platformIndependent: true
|
||||||
alias: 'Compilation'
|
alias: 'Compilation'
|
||||||
|
dryRun: true
|
||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "1.10.1"
|
versionSpec: "1.x"
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
inputs:
|
inputs:
|
||||||
azureSubscription: 'vscode-builds-subscription'
|
azureSubscription: 'vscode-builds-subscription'
|
||||||
KeyVaultName: vscode
|
KeyVaultName: vscode
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
@@ -41,7 +42,7 @@ steps:
|
|||||||
git config user.email "vscode@microsoft.com"
|
git config user.email "vscode@microsoft.com"
|
||||||
git config user.name "VSCode"
|
git config user.name "VSCode"
|
||||||
displayName: Prepare tooling
|
displayName: Prepare tooling
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
@@ -49,33 +50,33 @@ steps:
|
|||||||
git fetch distro
|
git fetch distro
|
||||||
git merge $(node -p "require('./package.json').distro")
|
git merge $(node -p "require('./package.json').distro")
|
||||||
displayName: Merge distro
|
displayName: Merge distro
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||||
vstsFeed: 'npm-vscode'
|
vstsFeed: 'npm-vscode'
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||||
displayName: Install dependencies
|
displayName: Install dependencies
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||||
vstsFeed: 'npm-vscode'
|
vstsFeed: 'npm-vscode'
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
yarn postinstall
|
yarn postinstall
|
||||||
displayName: Run postinstall scripts
|
displayName: Run postinstall scripts
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
# Mixin must run before optimize, because the CSS loader will
|
# Mixin must run before optimize, because the CSS loader will
|
||||||
# inline small SVGs
|
# inline small SVGs
|
||||||
@@ -83,20 +84,28 @@ steps:
|
|||||||
set -e
|
set -e
|
||||||
node build/azure-pipelines/mixin
|
node build/azure-pipelines/mixin
|
||||||
displayName: Mix in quality
|
displayName: Mix in quality
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
yarn gulp hygiene
|
yarn gulp hygiene --skip-tslint
|
||||||
|
yarn gulp tslint
|
||||||
yarn monaco-compile-check
|
yarn monaco-compile-check
|
||||||
displayName: Run hygiene checks
|
displayName: Run hygiene, tslint and monaco compile checks
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -
|
set -
|
||||||
./build/azure-pipelines/common/extract-telemetry.sh
|
./build/azure-pipelines/common/extract-telemetry.sh
|
||||||
displayName: Extract Telemetry
|
displayName: Extract Telemetry
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
|
||||||
|
./build/azure-pipelines/common/publish-webview.sh
|
||||||
|
displayName: Publish Webview
|
||||||
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
@@ -106,14 +115,22 @@ steps:
|
|||||||
yarn gulp minify-vscode-reh
|
yarn gulp minify-vscode-reh
|
||||||
yarn gulp minify-vscode-reh-web
|
yarn gulp minify-vscode-reh-web
|
||||||
displayName: Compile
|
displayName: Compile
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- script: |
|
- script: |
|
||||||
set -e
|
set -e
|
||||||
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
||||||
node build/azure-pipelines/upload-sourcemaps
|
node build/azure-pipelines/upload-sourcemaps
|
||||||
displayName: Upload sourcemaps
|
displayName: Upload sourcemaps
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
VERSION=`node -p "require(\"./package.json\").version"`
|
||||||
|
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||||
|
node build/azure-pipelines/common/createBuild.js $VERSION
|
||||||
|
displayName: Create build
|
||||||
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
inputs:
|
inputs:
|
||||||
@@ -122,4 +139,4 @@ steps:
|
|||||||
vstsFeed: 'npm-vscode'
|
vstsFeed: 'npm-vscode'
|
||||||
platformIndependent: true
|
platformIndependent: true
|
||||||
alias: 'Compilation'
|
alias: 'Compilation'
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||||
|
|||||||
@@ -1,36 +0,0 @@
|
|||||||
/*---------------------------------------------------------------------------------------------
|
|
||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
|
||||||
*--------------------------------------------------------------------------------------------*/
|
|
||||||
'use strict';
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const cp = require("child_process");
|
|
||||||
let tag = '';
|
|
||||||
try {
|
|
||||||
tag = cp
|
|
||||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
|
||||||
.toString()
|
|
||||||
.trim();
|
|
||||||
if (!isValidTag(tag)) {
|
|
||||||
throw Error(`Invalid tag ${tag}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
console.error(err);
|
|
||||||
console.error('Failed to update types');
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
function isValidTag(t) {
|
|
||||||
if (t.split('.').length !== 3) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
const [major, minor, bug] = t.split('.');
|
|
||||||
// Only release for tags like 1.34.0
|
|
||||||
if (bug !== '0') {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
@@ -35,9 +35,9 @@ function isValidTag(t: string) {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
|
if (isNaN(parseInt(major, 10)) || isNaN(parseInt(minor, 10))) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -9,11 +9,27 @@ pr: none
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "1.10.1"
|
versionSpec: "1.x"
|
||||||
|
|
||||||
|
- bash: |
|
||||||
|
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
|
||||||
|
CHANNEL="G1C14HJ2F"
|
||||||
|
|
||||||
|
if [ "$TAG_VERSION" == "1.999.0" ]; then
|
||||||
|
MESSAGE="<!here>. Someone pushed 1.999.0 tag. Please delete it ASAP from remote and local."
|
||||||
|
|
||||||
|
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
|
||||||
|
-H 'Content-type: application/json; charset=utf-8' \
|
||||||
|
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \
|
||||||
|
https://slack.com/api/chat.postMessage
|
||||||
|
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
displayName: Check 1.999.0 tag
|
||||||
|
|
||||||
- bash: |
|
- bash: |
|
||||||
# Install build dependencies
|
# Install build dependencies
|
||||||
|
|||||||
@@ -1,62 +0,0 @@
|
|||||||
/*---------------------------------------------------------------------------------------------
|
|
||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
|
||||||
*--------------------------------------------------------------------------------------------*/
|
|
||||||
'use strict';
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const fs = require("fs");
|
|
||||||
const cp = require("child_process");
|
|
||||||
const path = require("path");
|
|
||||||
let tag = '';
|
|
||||||
try {
|
|
||||||
tag = cp
|
|
||||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
|
||||||
.toString()
|
|
||||||
.trim();
|
|
||||||
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
|
|
||||||
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
|
|
||||||
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
|
|
||||||
updateDTSFile(outPath, tag);
|
|
||||||
console.log(`Done updating vscode.d.ts at ${outPath}`);
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
console.error(err);
|
|
||||||
console.error('Failed to update types');
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
function updateDTSFile(outPath, tag) {
|
|
||||||
const oldContent = fs.readFileSync(outPath, 'utf-8');
|
|
||||||
const newContent = getNewFileContent(oldContent, tag);
|
|
||||||
fs.writeFileSync(outPath, newContent);
|
|
||||||
}
|
|
||||||
function getNewFileContent(content, tag) {
|
|
||||||
const oldheader = [
|
|
||||||
`/*---------------------------------------------------------------------------------------------`,
|
|
||||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
|
||||||
` * Licensed under the Source EULA. See License.txt in the project root for license information.`,
|
|
||||||
` *--------------------------------------------------------------------------------------------*/`
|
|
||||||
].join('\n');
|
|
||||||
return getNewFileHeader(tag) + content.slice(oldheader.length);
|
|
||||||
}
|
|
||||||
function getNewFileHeader(tag) {
|
|
||||||
const [major, minor] = tag.split('.');
|
|
||||||
const shorttag = `${major}.${minor}`;
|
|
||||||
const header = [
|
|
||||||
`// Type definitions for Visual Studio Code ${shorttag}`,
|
|
||||||
`// Project: https://github.com/microsoft/vscode`,
|
|
||||||
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/Microsoft>`,
|
|
||||||
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
|
|
||||||
``,
|
|
||||||
`/*---------------------------------------------------------------------------------------------`,
|
|
||||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
|
||||||
` * Licensed under the Source EULA.`,
|
|
||||||
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
|
|
||||||
` *--------------------------------------------------------------------------------------------*/`,
|
|
||||||
``,
|
|
||||||
`/**`,
|
|
||||||
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
|
|
||||||
` * See https://code.visualstudio.com/api for more information`,
|
|
||||||
` */`
|
|
||||||
].join('\n');
|
|
||||||
return header;
|
|
||||||
}
|
|
||||||
@@ -19,4 +19,4 @@ steps:
|
|||||||
(cd build ; yarn)
|
(cd build ; yarn)
|
||||||
|
|
||||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||||
node build/azure-pipelines/common/release.js
|
node build/azure-pipelines/common/releaseBuild.js
|
||||||
|
|||||||
73
build/azure-pipelines/sql-product-build.yml
Normal file
73
build/azure-pipelines/sql-product-build.yml
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
resources:
|
||||||
|
containers:
|
||||||
|
- container: linux-x64
|
||||||
|
image: sqltoolscontainers.azurecr.io/linux-build-agent:x64
|
||||||
|
endpoint: ContainerRegistry
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
- job: Compile
|
||||||
|
pool:
|
||||||
|
vmImage: 'Ubuntu-16.04'
|
||||||
|
container: linux-x64
|
||||||
|
steps:
|
||||||
|
- template: sql-product-compile.yml
|
||||||
|
|
||||||
|
- job: macOS
|
||||||
|
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
|
||||||
|
pool:
|
||||||
|
vmImage: macOS 10.13
|
||||||
|
dependsOn:
|
||||||
|
- Compile
|
||||||
|
steps:
|
||||||
|
- template: darwin/sql-product-build-darwin.yml
|
||||||
|
|
||||||
|
- job: Linux
|
||||||
|
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
|
||||||
|
pool:
|
||||||
|
vmImage: 'Ubuntu-16.04'
|
||||||
|
container: linux-x64
|
||||||
|
dependsOn:
|
||||||
|
- Compile
|
||||||
|
steps:
|
||||||
|
- template: linux/sql-product-build-linux.yml
|
||||||
|
|
||||||
|
- job: Windows
|
||||||
|
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
|
||||||
|
pool:
|
||||||
|
vmImage: VS2017-Win2016
|
||||||
|
dependsOn:
|
||||||
|
- Compile
|
||||||
|
steps:
|
||||||
|
- template: win32/sql-product-build-win32.yml
|
||||||
|
|
||||||
|
- job: Windows_Test
|
||||||
|
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
||||||
|
pool:
|
||||||
|
name: mssqltools
|
||||||
|
dependsOn:
|
||||||
|
- Linux
|
||||||
|
- Windows
|
||||||
|
steps:
|
||||||
|
- template: win32/sql-product-test-win32.yml
|
||||||
|
|
||||||
|
- job: Release
|
||||||
|
condition: and(succeeded(), or(eq(variables['VSCODE_RELEASE'], 'true'), and(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['Build.Reason'], 'Schedule'))))
|
||||||
|
pool:
|
||||||
|
vmImage: 'Ubuntu-16.04'
|
||||||
|
dependsOn:
|
||||||
|
- macOS
|
||||||
|
- Linux
|
||||||
|
- Windows
|
||||||
|
- Windows_Test
|
||||||
|
steps:
|
||||||
|
- template: sql-release.yml
|
||||||
|
|
||||||
|
trigger: none
|
||||||
|
pr: none
|
||||||
|
|
||||||
|
schedules:
|
||||||
|
- cron: "0 5 * * Mon-Fri"
|
||||||
|
displayName: Mon-Fri at 7:00
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- master
|
||||||
112
build/azure-pipelines/sql-product-compile.yml
Normal file
112
build/azure-pipelines/sql-product-compile.yml
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
steps:
|
||||||
|
- script: |
|
||||||
|
mkdir -p .build
|
||||||
|
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||||
|
echo -n $VSCODE_QUALITY > .build/quality
|
||||||
|
displayName: Prepare cache flag
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||||
|
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
platformIndependent: true
|
||||||
|
alias: 'Compilation'
|
||||||
|
|
||||||
|
- task: NodeTool@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: "10.15.1"
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||||
|
inputs:
|
||||||
|
versionSpec: "1.x"
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- task: AzureKeyVault@1
|
||||||
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
|
inputs:
|
||||||
|
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||||
|
KeyVaultName: ado-secrets
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
cat << EOF > ~/.netrc
|
||||||
|
machine github.com
|
||||||
|
login azuredatastudio
|
||||||
|
password $(github-distro-mixin-password)
|
||||||
|
EOF
|
||||||
|
|
||||||
|
git config user.email "andresse@microsoft.com"
|
||||||
|
git config user.name "AzureDataStudio"
|
||||||
|
displayName: Prepare tooling
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||||
|
git fetch distro
|
||||||
|
git merge $(node -p "require('./package.json').distro")
|
||||||
|
displayName: Merge distro
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||||
|
displayName: Install dependencies
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn postinstall
|
||||||
|
displayName: Run postinstall scripts
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
# Mixin must run before optimize, because the CSS loader will
|
||||||
|
# inline small SVGs
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
node build/azure-pipelines/mixin
|
||||||
|
displayName: Mix in quality
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp hygiene --skip-tslint
|
||||||
|
yarn gulp tslint
|
||||||
|
displayName: Run hygiene, tslint
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
yarn gulp compile-build
|
||||||
|
yarn gulp compile-extensions-build
|
||||||
|
yarn gulp minify-vscode
|
||||||
|
yarn gulp minify-vscode-reh
|
||||||
|
yarn gulp minify-vscode-reh-web
|
||||||
|
displayName: Compile
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||||
|
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
platformIndependent: true
|
||||||
|
alias: 'Compilation'
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
5
build/azure-pipelines/sql-release.yml
Normal file
5
build/azure-pipelines/sql-release.yml
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
steps:
|
||||||
|
- script: |
|
||||||
|
set -e
|
||||||
|
echo "##vso[build.addbuildtag]Release"
|
||||||
|
displayName: Set For Release
|
||||||
@@ -1,11 +1,11 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "1.10.1"
|
versionSpec: "1.x"
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
|
|||||||
@@ -21,11 +21,11 @@ steps:
|
|||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "1.10.1"
|
versionSpec: "1.x"
|
||||||
|
|
||||||
- task: AzureKeyVault@1
|
- task: AzureKeyVault@1
|
||||||
displayName: 'Azure Key Vault: Get Secrets'
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
|
|||||||
@@ -7,12 +7,9 @@ ROOT="$REPO/.."
|
|||||||
WEB_BUILD_NAME="vscode-web"
|
WEB_BUILD_NAME="vscode-web"
|
||||||
WEB_TARBALL_FILENAME="vscode-web.tar.gz"
|
WEB_TARBALL_FILENAME="vscode-web.tar.gz"
|
||||||
WEB_TARBALL_PATH="$ROOT/$WEB_TARBALL_FILENAME"
|
WEB_TARBALL_PATH="$ROOT/$WEB_TARBALL_FILENAME"
|
||||||
BUILD="$ROOT/$WEB_BUILD_NAME"
|
|
||||||
PACKAGEJSON="$BUILD/package.json"
|
|
||||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
|
||||||
|
|
||||||
rm -rf $ROOT/vscode-web.tar.*
|
rm -rf $ROOT/vscode-web.tar.*
|
||||||
|
|
||||||
(cd $ROOT && tar --owner=0 --group=0 -czf $WEB_TARBALL_PATH $WEB_BUILD_NAME)
|
(cd $ROOT && tar --owner=0 --group=0 -czf $WEB_TARBALL_PATH $WEB_BUILD_NAME)
|
||||||
|
|
||||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "web-standalone" archive-unsigned "$WEB_TARBALL_FILENAME" "$VERSION" true "$WEB_TARBALL_PATH"
|
node build/azure-pipelines/common/createAsset.js web-standalone archive-unsigned "$WEB_TARBALL_FILENAME" "$WEB_TARBALL_PATH"
|
||||||
|
|||||||
@@ -1,50 +1,60 @@
|
|||||||
steps:
|
steps:
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "1.10.1"
|
versionSpec: "1.x"
|
||||||
- task: UsePythonVersion@0
|
- task: UsePythonVersion@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: '2.x'
|
versionSpec: '2.x'
|
||||||
addToPath: true
|
addToPath: true
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
vstsFeed: '$(ArtifactFeed)'
|
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||||
- powershell: |
|
- powershell: |
|
||||||
yarn --frozen-lockfile
|
yarn --frozen-lockfile
|
||||||
|
env:
|
||||||
|
CHILD_CONCURRENCY: "1"
|
||||||
displayName: Install Dependencies
|
displayName: Install Dependencies
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
inputs:
|
inputs:
|
||||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
vstsFeed: '$(ArtifactFeed)'
|
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
- powershell: |
|
- powershell: |
|
||||||
yarn gulp electron
|
yarn electron
|
||||||
displayName: Download Electron
|
- script: |
|
||||||
- powershell: |
|
yarn gulp hygiene --skip-tslint
|
||||||
yarn gulp hygiene
|
|
||||||
displayName: Run Hygiene Checks
|
displayName: Run Hygiene Checks
|
||||||
- powershell: |
|
- script: |
|
||||||
yarn monaco-compile-check
|
yarn gulp tslint
|
||||||
displayName: Run Monaco Editor Checks
|
displayName: Run TSLint Checks
|
||||||
|
- script: | # {{SQL CARBON EDIT}} add step
|
||||||
|
yarn tslint
|
||||||
|
displayName: Run TSLint (gci)
|
||||||
|
- script: | # {{SQL CARBON EDIT}} add step
|
||||||
|
yarn strict-null-check
|
||||||
|
displayName: Run Strict Null Check
|
||||||
|
# - powershell: | {{SQL CARBON EDIT}} remove step
|
||||||
|
# yarn monaco-compile-check
|
||||||
|
# displayName: Run Monaco Editor Checks
|
||||||
- powershell: |
|
- powershell: |
|
||||||
yarn compile
|
yarn compile
|
||||||
displayName: Compile Sources
|
displayName: Compile Sources
|
||||||
- powershell: |
|
# - powershell: | {{SQL CARBON EDIT}} remove step
|
||||||
yarn download-builtin-extensions
|
# yarn download-builtin-extensions
|
||||||
displayName: Download Built-in Extensions
|
# displayName: Download Built-in Extensions
|
||||||
- powershell: |
|
- powershell: |
|
||||||
.\scripts\test.bat --tfs "Unit Tests"
|
.\scripts\test.bat --tfs "Unit Tests"
|
||||||
displayName: Run Unit Tests
|
displayName: Run Unit Tests
|
||||||
- powershell: |
|
# - powershell: | {{SQL CARBON EDIT}} remove step
|
||||||
.\scripts\test-integration.bat --tfs "Integration Tests"
|
# .\scripts\test-integration.bat --tfs "Integration Tests"
|
||||||
displayName: Run Integration Tests
|
# displayName: Run Integration Tests
|
||||||
- task: PublishTestResults@2
|
- task: PublishTestResults@2
|
||||||
displayName: Publish Tests Results
|
displayName: Publish Tests Results
|
||||||
inputs:
|
inputs:
|
||||||
|
|||||||
20
build/azure-pipelines/win32/createDrop.ps1
Normal file
20
build/azure-pipelines/win32/createDrop.ps1
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
|
||||||
|
$Arch = "x64"
|
||||||
|
|
||||||
|
$Repo = "$(pwd)"
|
||||||
|
$Root = "$Repo\.."
|
||||||
|
$LegacyServer = "$Root\azuredatastudio-reh-win32-$Arch"
|
||||||
|
$ServerName = "azuredatastudio-server-win32-$Arch"
|
||||||
|
$Server = "$Root\$ServerName"
|
||||||
|
$ServerZipLocation = "$Repo\.build\win32-$Arch\server"
|
||||||
|
$ServerZip = "$ServerZipLocation\azuredatastudio-server-win32-$Arch.zip"
|
||||||
|
|
||||||
|
# Create server archive
|
||||||
|
New-Item $ServerZipLocation -ItemType Directory # this will throw even when success for we don't want to exec this
|
||||||
|
$global:LASTEXITCODE = 0
|
||||||
|
exec { Rename-Item -Path $LegacyServer -NewName $ServerName } "Rename Item"
|
||||||
|
exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r } "Zip Server"
|
||||||
|
|
||||||
|
exec { node build/azure-pipelines/common/copyArtifacts.js } "Copy Artifacts"
|
||||||
@@ -21,11 +21,11 @@ steps:
|
|||||||
|
|
||||||
- task: NodeTool@0
|
- task: NodeTool@0
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "10.15.1"
|
versionSpec: "12.13.0"
|
||||||
|
|
||||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||||
inputs:
|
inputs:
|
||||||
versionSpec: "1.10.1"
|
versionSpec: "1.x"
|
||||||
|
|
||||||
- task: UsePythonVersion@0
|
- task: UsePythonVersion@0
|
||||||
inputs:
|
inputs:
|
||||||
@@ -107,16 +107,21 @@ steps:
|
|||||||
- powershell: |
|
- powershell: |
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
$ErrorActionPreference = "Stop"
|
$ErrorActionPreference = "Stop"
|
||||||
exec { yarn gulp "electron-$(VSCODE_ARCH)" }
|
exec { yarn electron $(VSCODE_ARCH) }
|
||||||
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
|
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
|
||||||
displayName: Run unit tests
|
displayName: Run unit tests
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
- powershell: |
|
- powershell: |
|
||||||
|
# Figure out the full absolute path of the product we just built
|
||||||
|
# including the remote server and configure the integration tests
|
||||||
|
# to run with these builds instead of running out of sources.
|
||||||
. build/azure-pipelines/win32/exec.ps1
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
$ErrorActionPreference = "Stop"
|
$ErrorActionPreference = "Stop"
|
||||||
exec { yarn gulp "electron-$(VSCODE_ARCH)" }
|
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
|
||||||
exec { .\scripts\test-integration.bat --build --tfs "Integration Tests" }
|
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||||
|
$AppNameShort = $AppProductJson.nameShort
|
||||||
|
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
|
||||||
displayName: Run integration tests
|
displayName: Run integration tests
|
||||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||||
|
|
||||||
|
|||||||
@@ -23,14 +23,13 @@ exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r }
|
|||||||
# get version
|
# get version
|
||||||
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
|
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
|
||||||
$Version = $PackageJson.version
|
$Version = $PackageJson.version
|
||||||
$Quality = "$env:VSCODE_QUALITY"
|
|
||||||
|
|
||||||
$AssetPlatform = if ("$Arch" -eq "ia32") { "win32" } else { "win32-x64" }
|
$AssetPlatform = if ("$Arch" -eq "ia32") { "win32" } else { "win32-x64" }
|
||||||
|
|
||||||
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform-archive" archive "VSCode-win32-$Arch-$Version.zip" $Version true $Zip }
|
exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform-archive" archive "VSCode-win32-$Arch-$Version.zip" $Zip }
|
||||||
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform" setup "VSCodeSetup-$Arch-$Version.exe" $Version true $SystemExe }
|
exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform" setup "VSCodeSetup-$Arch-$Version.exe" $SystemExe }
|
||||||
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform-user" setup "VSCodeUserSetup-$Arch-$Version.exe" $Version true $UserExe }
|
exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform-user" setup "VSCodeUserSetup-$Arch-$Version.exe" $UserExe }
|
||||||
exec { node build/azure-pipelines/common/publish.js $Quality "server-$AssetPlatform" archive "vscode-server-win32-$Arch.zip" $Version true $ServerZip }
|
exec { node build/azure-pipelines/common/createAsset.js "server-$AssetPlatform" archive "vscode-server-win32-$Arch.zip" $ServerZip }
|
||||||
|
|
||||||
# publish hockeyapp symbols
|
# publish hockeyapp symbols
|
||||||
$hockeyAppId = if ("$Arch" -eq "ia32") { "$env:VSCODE_HOCKEYAPP_ID_WIN32" } else { "$env:VSCODE_HOCKEYAPP_ID_WIN64" }
|
$hockeyAppId = if ("$Arch" -eq "ia32") { "$env:VSCODE_HOCKEYAPP_ID_WIN32" } else { "$env:VSCODE_HOCKEYAPP_ID_WIN64" }
|
||||||
|
|||||||
278
build/azure-pipelines/win32/sql-product-build-win32.yml
Normal file
278
build/azure-pipelines/win32/sql-product-build-win32.yml
Normal file
@@ -0,0 +1,278 @@
|
|||||||
|
steps:
|
||||||
|
- powershell: |
|
||||||
|
mkdir .build -ea 0
|
||||||
|
"$env:BUILD_SOURCEVERSION" | Out-File -Encoding ascii -NoNewLine .build\commit
|
||||||
|
"$env:VSCODE_QUALITY" | Out-File -Encoding ascii -NoNewLine .build\quality
|
||||||
|
displayName: Prepare cache flag
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||||
|
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
platformIndependent: true
|
||||||
|
alias: 'Compilation'
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exit 1
|
||||||
|
displayName: Check RestoreCache
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||||
|
|
||||||
|
- task: NodeTool@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: "10.15.1"
|
||||||
|
|
||||||
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||||
|
inputs:
|
||||||
|
versionSpec: "1.x"
|
||||||
|
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: '2.x'
|
||||||
|
addToPath: true
|
||||||
|
|
||||||
|
- task: AzureKeyVault@1
|
||||||
|
displayName: 'Azure Key Vault: Get Secrets'
|
||||||
|
inputs:
|
||||||
|
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||||
|
KeyVaultName: ado-secrets
|
||||||
|
SecretsFilter: 'github-distro-mixin-password'
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
"machine github.com`nlogin azuredatastudio`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
|
||||||
|
|
||||||
|
exec { git config user.email "andresse@microsoft.com" }
|
||||||
|
exec { git config user.name "AzureDataStudio" }
|
||||||
|
displayName: Prepare tooling
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||||
|
git fetch distro
|
||||||
|
git merge $(node -p "require('./package.json').distro")
|
||||||
|
displayName: Merge distro
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
$env:CHILD_CONCURRENCY="1"
|
||||||
|
exec { yarn --frozen-lockfile }
|
||||||
|
displayName: Install dependencies
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||||
|
inputs:
|
||||||
|
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||||
|
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||||
|
vstsFeed: 'BuildCache'
|
||||||
|
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exec { yarn postinstall }
|
||||||
|
displayName: Run postinstall scripts
|
||||||
|
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exec { node build/azure-pipelines/mixin }
|
||||||
|
displayName: Mix in quality
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exec { yarn gulp "install-sqltoolsservice" }
|
||||||
|
displayName: Install sqltoolsservice
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exec { yarn gulp "package-rebuild-extensions" }
|
||||||
|
exec { yarn gulp "vscode-win32-x64-min-ci" }
|
||||||
|
exec { yarn gulp "vscode-reh-win32-x64-min-ci" }
|
||||||
|
exec { yarn gulp "vscode-reh-web-win32-x64-min-ci" }
|
||||||
|
displayName: Build
|
||||||
|
env:
|
||||||
|
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exec { .\scripts\test-unstable.bat --build --coverage --reporter mocha-junit-reporter }
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||||
|
displayName: Run unstable tests
|
||||||
|
|
||||||
|
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||||
|
displayName: 'Sign out code'
|
||||||
|
inputs:
|
||||||
|
ConnectedServiceName: 'Code Signing'
|
||||||
|
FolderPath: '$(agent.builddirectory)/azuredatastudio-win32-x64'
|
||||||
|
Pattern: '*.exe,*.node,resources/app/node_modules.asar.unpacked/*.dll,swiftshader/*.dll,d3dcompiler_47.dll,libGLESv2.dll,ffmpeg.dll,libEGL.dll,Microsoft.SqlTools.Hosting.dll,Microsoft.SqlTools.ResourceProvider.Core.dll,Microsoft.SqlTools.ResourceProvider.DefaultImpl.dll,MicrosoftSqlToolsCredentials.dll,MicrosoftSqlToolsServiceLayer.dll,Newtonsoft.Json.dll,SqlSerializationService.dll,SqlToolsResourceProviderService.dll,Microsoft.SqlServer.*.dll,Microsoft.Data.Tools.Sql.BatchParser.dll'
|
||||||
|
signConfigType: inlineSignParams
|
||||||
|
inlineOperation: |
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"keyCode": "CP-230012",
|
||||||
|
"operationSetCode": "SigntoolSign",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"parameterName": "OpusName",
|
||||||
|
"parameterValue": "Azure Data Studio"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameterName": "OpusInfo",
|
||||||
|
"parameterValue": "https://github.com/microsoft/azuredatastudio"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameterName": "PageHash",
|
||||||
|
"parameterValue": "/NPH"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameterName": "FileDigest",
|
||||||
|
"parameterValue": "/fd sha256"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameterName": "TimeStamp",
|
||||||
|
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"toolName": "signtool.exe",
|
||||||
|
"toolVersion": "6.2.9304.0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"keyCode": "CP-230012",
|
||||||
|
"operationSetCode": "SigntoolVerify",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"parameterName": "VerifyAll",
|
||||||
|
"parameterValue": "/all"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"toolName": "signtool.exe",
|
||||||
|
"toolVersion": "6.2.9304.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
SessionTimeout: 600
|
||||||
|
MaxConcurrency: 5
|
||||||
|
MaxRetryAttempts: 20
|
||||||
|
condition: and(succeeded(), eq(variables['signed'], true))
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exec { yarn gulp "vscode-win32-x64-user-setup" }
|
||||||
|
exec { yarn gulp "vscode-win32-x64-system-setup" }
|
||||||
|
exec { yarn gulp "vscode-win32-x64-archive" }
|
||||||
|
displayName: Archive & User & System setup
|
||||||
|
|
||||||
|
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||||
|
displayName: 'Sign installers'
|
||||||
|
inputs:
|
||||||
|
ConnectedServiceName: 'Code Signing'
|
||||||
|
FolderPath: '.build'
|
||||||
|
Pattern: '*.exe'
|
||||||
|
signConfigType: inlineSignParams
|
||||||
|
inlineOperation: |
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"keyCode": "CP-230012",
|
||||||
|
"operationSetCode": "SigntoolSign",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"parameterName": "OpusName",
|
||||||
|
"parameterValue": "Azure Data Studio"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameterName": "OpusInfo",
|
||||||
|
"parameterValue": "https://github.com/microsoft/azuredatastudio"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameterName": "PageHash",
|
||||||
|
"parameterValue": "/NPH"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameterName": "FileDigest",
|
||||||
|
"parameterValue": "/fd sha256"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"parameterName": "TimeStamp",
|
||||||
|
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"toolName": "signtool.exe",
|
||||||
|
"toolVersion": "6.2.9304.0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"keyCode": "CP-230012",
|
||||||
|
"operationSetCode": "SigntoolVerify",
|
||||||
|
"parameters": [
|
||||||
|
{
|
||||||
|
"parameterName": "VerifyAll",
|
||||||
|
"parameterValue": "/all"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"toolName": "signtool.exe",
|
||||||
|
"toolVersion": "6.2.9304.0"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
SessionTimeout: 600
|
||||||
|
MaxConcurrency: 5
|
||||||
|
MaxRetryAttempts: 20
|
||||||
|
condition: and(succeeded(), eq(variables['signed'], true))
|
||||||
|
|
||||||
|
- task: ArchiveFiles@2
|
||||||
|
displayName: 'Archive build scripts source'
|
||||||
|
inputs:
|
||||||
|
rootFolderOrFile: '$(Build.SourcesDirectory)/build'
|
||||||
|
archiveType: tar
|
||||||
|
archiveFile: '$(Build.BinariesDirectory)/source.tar.gz'
|
||||||
|
|
||||||
|
- task: PublishBuildArtifacts@1
|
||||||
|
displayName: 'Publish Artifact: build scripts source'
|
||||||
|
inputs:
|
||||||
|
PathtoPublish: '$(Build.BinariesDirectory)/source.tar.gz'
|
||||||
|
ArtifactName: source
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
.\build\azure-pipelines\win32\createDrop.ps1
|
||||||
|
displayName: Create Drop
|
||||||
|
|
||||||
|
- task: PublishBuildArtifacts@1
|
||||||
|
displayName: 'Publish Artifact: drop'
|
||||||
|
|
||||||
|
- task: PublishTestResults@2
|
||||||
|
displayName: 'Publish Test Results test-results.xml'
|
||||||
|
inputs:
|
||||||
|
testResultsFiles: 'test-results.xml'
|
||||||
|
searchFolder: '$(Build.SourcesDirectory)'
|
||||||
|
failTaskOnFailedTests: true
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- task: PublishTestResults@2
|
||||||
|
displayName: 'Publish Integration and Smoke Test Results'
|
||||||
|
inputs:
|
||||||
|
testResultsFiles: '*.xml'
|
||||||
|
searchFolder: '$(Build.ArtifactStagingDirectory)\test-results'
|
||||||
|
mergeTestResults: true
|
||||||
|
failTaskOnFailedTests: true
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
|
||||||
|
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||||
|
displayName: 'Component Detection'
|
||||||
|
inputs:
|
||||||
|
failOnAlert: true
|
||||||
106
build/azure-pipelines/win32/sql-product-test-win32.yml
Normal file
106
build/azure-pipelines/win32/sql-product-test-win32.yml
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
steps:
|
||||||
|
- task: NodeTool@0
|
||||||
|
inputs:
|
||||||
|
versionSpec: "10.15.1"
|
||||||
|
|
||||||
|
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||||
|
inputs:
|
||||||
|
versionSpec: "1.x"
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
$env:CHILD_CONCURRENCY="1"
|
||||||
|
exec { git clean -fxd }
|
||||||
|
displayName: Clean repo
|
||||||
|
|
||||||
|
- task: DownloadPipelineArtifact@2
|
||||||
|
inputs:
|
||||||
|
buildType: 'current'
|
||||||
|
targetPath: '$(Build.SourcesDirectory)\.build'
|
||||||
|
artifactName: drop
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
$env:CHILD_CONCURRENCY="1"
|
||||||
|
exec { yarn --frozen-lockfile }
|
||||||
|
displayName: Install dependencies
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
exec { .\node_modules\7zip\7zip-lite\7z.exe x $(Build.SourcesDirectory)\.build\win32-x64/archive/azuredatastudio-win32-x64.zip -o$(Agent.TempDirectory)\azuredatastudio-win32-x64 }
|
||||||
|
displayName: Unzip artifact
|
||||||
|
|
||||||
|
- task: AzureKeyVault@1
|
||||||
|
displayName: 'Azure Key Vault: SqlToolsSecretStore'
|
||||||
|
inputs:
|
||||||
|
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||||
|
KeyVaultName: SqlToolsSecretStore
|
||||||
|
SecretsFilter: 'ads-integration-test-azure-server,ads-integration-test-azure-server-password,ads-integration-test-azure-server-username,ads-integration-test-bdc-server,ads-integration-test-bdc-server-password,ads-integration-test-bdc-server-username,ads-integration-test-standalone-server,ads-integration-test-standalone-server-password,ads-integration-test-standalone-server-username'
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
$AppRoot = "$(Agent.TempDirectory)\azuredatastudio-win32-x64"
|
||||||
|
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||||
|
$AppNameShort = $AppProductJson.nameShort
|
||||||
|
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:INTEGRATION_TEST_CLI_PATH = "$AppRoot\bin\$AppNameShort"; .\scripts\sql-test-integration.bat }
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
displayName: Run stable tests
|
||||||
|
env:
|
||||||
|
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
|
||||||
|
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
|
||||||
|
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
|
||||||
|
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
|
||||||
|
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
|
||||||
|
STANDALONE_SQL: $(ads-integration-test-standalone-server)
|
||||||
|
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
|
||||||
|
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
|
||||||
|
AZURE_SQL: $(ads-integration-test-azure-server)
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
$AppRoot = "$(Agent.TempDirectory)\azuredatastudio-win32-x64"
|
||||||
|
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||||
|
$AppNameShort = $AppProductJson.nameShort
|
||||||
|
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:INTEGRATION_TEST_CLI_PATH = "$AppRoot\bin\$AppNameShort"; .\scripts\sql-test-integration.bat }
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||||
|
displayName: Run release tests
|
||||||
|
env:
|
||||||
|
ADS_TEST_GREP: (.*@REL@|integration test setup)
|
||||||
|
ADS_TEST_INVERT_GREP: 0
|
||||||
|
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
|
||||||
|
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
|
||||||
|
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
|
||||||
|
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
|
||||||
|
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
|
||||||
|
STANDALONE_SQL: $(ads-integration-test-standalone-server)
|
||||||
|
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
|
||||||
|
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
|
||||||
|
AZURE_SQL: $(ads-integration-test-azure-server)
|
||||||
|
|
||||||
|
- powershell: |
|
||||||
|
. build/azure-pipelines/win32/exec.ps1
|
||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
$AppRoot = "$(Agent.TempDirectory)\azuredatastudio-win32-x64"
|
||||||
|
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||||
|
$AppNameShort = $AppProductJson.nameShort
|
||||||
|
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; .\scripts\sql-test-integration-unstable.bat }
|
||||||
|
continueOnError: true
|
||||||
|
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||||
|
displayName: Run unstable integration tests
|
||||||
|
env:
|
||||||
|
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
|
||||||
|
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
|
||||||
|
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
|
||||||
|
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
|
||||||
|
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
|
||||||
|
STANDALONE_SQL: $(ads-integration-test-standalone-server)
|
||||||
|
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
|
||||||
|
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
|
||||||
|
AZURE_SQL: $(ads-integration-test-azure-server)
|
||||||
29
build/azure-pipelines/win32/sql-publish.ps1
Normal file
29
build/azure-pipelines/win32/sql-publish.ps1
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
Param(
|
||||||
|
[string]$sourcesDir,
|
||||||
|
[string]$artifactsDir,
|
||||||
|
[string]$storageKey,
|
||||||
|
[string]$documentDbKey
|
||||||
|
)
|
||||||
|
|
||||||
|
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
|
||||||
|
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
||||||
|
|
||||||
|
$ExeName = "AzureDataStudioSetup.exe"
|
||||||
|
$SystemExe = "$artifactsDir\win32-x64\system-setup\$ExeName"
|
||||||
|
$UserExe = "$artifactsDir\win32-x64\user-setup\$ExeName"
|
||||||
|
$UserExeName = "AzureDataStudioUserSetup.exe"
|
||||||
|
$ZipName = "azuredatastudio-win32-x64.zip"
|
||||||
|
$Zip = "$artifactsDir\win32-x64\archive\$ZipName"
|
||||||
|
|
||||||
|
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
|
||||||
|
$Version = $VersionJson.version
|
||||||
|
$Quality = $VersionJson.quality
|
||||||
|
$CommitId = $VersionJson.commit
|
||||||
|
|
||||||
|
$assetPlatform = "win32-x64"
|
||||||
|
|
||||||
|
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform-archive" archive $ZipName $Version true $Zip $CommitId
|
||||||
|
|
||||||
|
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform" setup $ExeName $Version true $SystemExe $CommitId
|
||||||
|
|
||||||
|
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform-user" setup $UserExeName $Version true $UserExe $CommitId
|
||||||
@@ -1,7 +1,7 @@
|
|||||||
[
|
[
|
||||||
{
|
{
|
||||||
"name": "Microsoft.sqlservernotebook",
|
"name": "Microsoft.sqlservernotebook",
|
||||||
"version": "0.2.1",
|
"version": "0.3.3",
|
||||||
"repo": "https://github.com/Microsoft/azuredatastudio"
|
"repo": "https://github.com/Microsoft/azuredatastudio"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,2 +1,7 @@
|
|||||||
[
|
[
|
||||||
|
{
|
||||||
|
"name": "Microsoft.sqlservernotebook",
|
||||||
|
"version": "0.3.3",
|
||||||
|
"repo": "https://github.com/Microsoft/azuredatastudio"
|
||||||
|
}
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -41,12 +41,7 @@ var editorEntryPoints = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
var editorResources = [
|
var editorResources = [
|
||||||
'out-build/vs/{base,editor}/**/*.{svg,png}',
|
'out-editor-build/vs/base/browser/ui/codiconLabel/**/*.ttf'
|
||||||
'!out-build/vs/base/browser/ui/splitview/**/*',
|
|
||||||
'!out-build/vs/base/browser/ui/toolbar/**/*',
|
|
||||||
'!out-build/vs/base/browser/ui/octiconLabel/**/*',
|
|
||||||
'!out-build/vs/workbench/**',
|
|
||||||
'!**/test/**'
|
|
||||||
];
|
];
|
||||||
|
|
||||||
var BUNDLED_FILE_HEADER = [
|
var BUNDLED_FILE_HEADER = [
|
||||||
@@ -62,7 +57,6 @@ var BUNDLED_FILE_HEADER = [
|
|||||||
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
|
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
|
||||||
|
|
||||||
const extractEditorSrcTask = task.define('extract-editor-src', () => {
|
const extractEditorSrcTask = task.define('extract-editor-src', () => {
|
||||||
console.log(`If the build fails, consider tweaking shakeLevel below to a lower value.`);
|
|
||||||
const apiusages = monacoapi.execute().usageContent;
|
const apiusages = monacoapi.execute().usageContent;
|
||||||
const extrausages = fs.readFileSync(path.join(root, 'build', 'monaco', 'monaco.usage.recipe')).toString();
|
const extrausages = fs.readFileSync(path.join(root, 'build', 'monaco', 'monaco.usage.recipe')).toString();
|
||||||
standalone.extractEditor({
|
standalone.extractEditor({
|
||||||
@@ -76,25 +70,15 @@ const extractEditorSrcTask = task.define('extract-editor-src', () => {
|
|||||||
apiusages,
|
apiusages,
|
||||||
extrausages
|
extrausages
|
||||||
],
|
],
|
||||||
typings: [
|
|
||||||
'typings/lib.ie11_safe_es6.d.ts',
|
|
||||||
'typings/thenable.d.ts',
|
|
||||||
'typings/es6-promise.d.ts',
|
|
||||||
'typings/require-monaco.d.ts',
|
|
||||||
"typings/lib.es2018.promise.d.ts",
|
|
||||||
'vs/monaco.d.ts'
|
|
||||||
],
|
|
||||||
libs: [
|
libs: [
|
||||||
`lib.es5.d.ts`,
|
`lib.es5.d.ts`,
|
||||||
`lib.dom.d.ts`,
|
`lib.dom.d.ts`,
|
||||||
`lib.webworker.importscripts.d.ts`
|
`lib.webworker.importscripts.d.ts`
|
||||||
],
|
],
|
||||||
redirects: {
|
|
||||||
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
|
|
||||||
},
|
|
||||||
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
|
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
|
||||||
importIgnorePattern: /(^vs\/css!)|(promise-polyfill\/polyfill)/,
|
importIgnorePattern: /(^vs\/css!)|(promise-polyfill\/polyfill)/,
|
||||||
destRoot: path.join(root, 'out-editor-src')
|
destRoot: path.join(root, 'out-editor-src'),
|
||||||
|
redirects: []
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -145,18 +129,70 @@ const createESMSourcesAndResourcesTask = task.define('extract-editor-esm', () =>
|
|||||||
});
|
});
|
||||||
|
|
||||||
const compileEditorESMTask = task.define('compile-editor-esm', () => {
|
const compileEditorESMTask = task.define('compile-editor-esm', () => {
|
||||||
|
console.log(`Launching the TS compiler at ${path.join(__dirname, '../out-editor-esm')}...`);
|
||||||
|
let result;
|
||||||
if (process.platform === 'win32') {
|
if (process.platform === 'win32') {
|
||||||
const result = cp.spawnSync(`..\\node_modules\\.bin\\tsc.cmd`, {
|
result = cp.spawnSync(`..\\node_modules\\.bin\\tsc.cmd`, {
|
||||||
cwd: path.join(__dirname, '../out-editor-esm')
|
cwd: path.join(__dirname, '../out-editor-esm')
|
||||||
});
|
});
|
||||||
console.log(result.stdout.toString());
|
|
||||||
console.log(result.stderr.toString());
|
|
||||||
} else {
|
} else {
|
||||||
const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
|
result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
|
||||||
cwd: path.join(__dirname, '../out-editor-esm')
|
cwd: path.join(__dirname, '../out-editor-esm')
|
||||||
});
|
});
|
||||||
console.log(result.stdout.toString());
|
}
|
||||||
console.log(result.stderr.toString());
|
|
||||||
|
console.log(result.stdout.toString());
|
||||||
|
console.log(result.stderr.toString());
|
||||||
|
|
||||||
|
if (result.status !== 0) {
|
||||||
|
console.log(`The TS Compilation failed, preparing analysis folder...`);
|
||||||
|
const destPath = path.join(__dirname, '../../vscode-monaco-editor-esm-analysis');
|
||||||
|
return util.rimraf(destPath)().then(() => {
|
||||||
|
fs.mkdirSync(destPath);
|
||||||
|
|
||||||
|
// initialize a new repository
|
||||||
|
cp.spawnSync(`git`, [`init`], {
|
||||||
|
cwd: destPath
|
||||||
|
});
|
||||||
|
|
||||||
|
// build a list of files to copy
|
||||||
|
const files = util.rreddir(path.join(__dirname, '../out-editor-esm'));
|
||||||
|
|
||||||
|
// copy files from src
|
||||||
|
for (const file of files) {
|
||||||
|
const srcFilePath = path.join(__dirname, '../src', file);
|
||||||
|
const dstFilePath = path.join(destPath, file);
|
||||||
|
if (fs.existsSync(srcFilePath)) {
|
||||||
|
util.ensureDir(path.dirname(dstFilePath));
|
||||||
|
const contents = fs.readFileSync(srcFilePath).toString().replace(/\r\n|\r|\n/g, '\n');
|
||||||
|
fs.writeFileSync(dstFilePath, contents);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// create an initial commit to diff against
|
||||||
|
cp.spawnSync(`git`, [`add`, `.`], {
|
||||||
|
cwd: destPath
|
||||||
|
});
|
||||||
|
|
||||||
|
// create the commit
|
||||||
|
cp.spawnSync(`git`, [`commit`, `-m`, `"original sources"`, `--no-gpg-sign`], {
|
||||||
|
cwd: destPath
|
||||||
|
});
|
||||||
|
|
||||||
|
// copy files from esm
|
||||||
|
for (const file of files) {
|
||||||
|
const srcFilePath = path.join(__dirname, '../out-editor-esm', file);
|
||||||
|
const dstFilePath = path.join(destPath, file);
|
||||||
|
if (fs.existsSync(srcFilePath)) {
|
||||||
|
util.ensureDir(path.dirname(dstFilePath));
|
||||||
|
const contents = fs.readFileSync(srcFilePath).toString().replace(/\r\n|\r|\n/g, '\n');
|
||||||
|
fs.writeFileSync(dstFilePath, contents);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`Open in VS Code the folder at '${destPath}' and you can alayze the compilation error`);
|
||||||
|
throw new Error('Standalone Editor compilation failed. If this is the build machine, simply launch `yarn run gulp editor-distro` on your machine to further analyze the compilation problem.');
|
||||||
|
});
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -21,10 +21,15 @@ const nlsDev = require('vscode-nls-dev');
|
|||||||
const root = path.dirname(__dirname);
|
const root = path.dirname(__dirname);
|
||||||
const commit = util.getVersion(root);
|
const commit = util.getVersion(root);
|
||||||
const plumber = require('gulp-plumber');
|
const plumber = require('gulp-plumber');
|
||||||
const _ = require('underscore');
|
|
||||||
const ext = require('./lib/extensions');
|
const ext = require('./lib/extensions');
|
||||||
|
|
||||||
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
|
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
|
||||||
|
// {{SQL CARBON EDIT}}
|
||||||
|
const sqlLocalizedExtensions = [
|
||||||
|
'dacpac',
|
||||||
|
'schema-compare'
|
||||||
|
];
|
||||||
|
// {{SQL CARBON EDIT}}
|
||||||
|
|
||||||
const compilations = glob.sync('**/tsconfig.json', {
|
const compilations = glob.sync('**/tsconfig.json', {
|
||||||
cwd: extensionsPath,
|
cwd: extensionsPath,
|
||||||
@@ -37,38 +42,38 @@ const tasks = compilations.map(function (tsconfigFile) {
|
|||||||
const absolutePath = path.join(extensionsPath, tsconfigFile);
|
const absolutePath = path.join(extensionsPath, tsconfigFile);
|
||||||
const relativeDirname = path.dirname(tsconfigFile);
|
const relativeDirname = path.dirname(tsconfigFile);
|
||||||
|
|
||||||
const tsconfig = require(absolutePath);
|
const overrideOptions = {};
|
||||||
const tsOptions = _.assign({}, tsconfig.extends ? require(path.join(extensionsPath, relativeDirname, tsconfig.extends)).compilerOptions : {}, tsconfig.compilerOptions);
|
overrideOptions.sourceMap = true;
|
||||||
tsOptions.verbose = false;
|
|
||||||
tsOptions.sourceMap = true;
|
|
||||||
|
|
||||||
const name = relativeDirname.replace(/\//g, '-');
|
const name = relativeDirname.replace(/\//g, '-');
|
||||||
|
|
||||||
const root = path.join('extensions', relativeDirname);
|
const root = path.join('extensions', relativeDirname);
|
||||||
const srcBase = path.join(root, 'src');
|
const srcBase = path.join(root, 'src');
|
||||||
const src = path.join(srcBase, '**');
|
const src = path.join(srcBase, '**');
|
||||||
|
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
|
||||||
|
|
||||||
const out = path.join(root, 'out');
|
const out = path.join(root, 'out');
|
||||||
const baseUrl = getBaseUrl(out);
|
const baseUrl = getBaseUrl(out);
|
||||||
|
|
||||||
let headerId, headerOut;
|
let headerId, headerOut;
|
||||||
let index = relativeDirname.indexOf('/');
|
let index = relativeDirname.indexOf('/');
|
||||||
if (index < 0) {
|
if (index < 0) {
|
||||||
headerId = 'vscode.' + relativeDirname;
|
headerId = 'microsoft.' + relativeDirname; // {{SQL CARBON EDIT}}
|
||||||
headerOut = 'out';
|
headerOut = 'out';
|
||||||
} else {
|
} else {
|
||||||
headerId = 'vscode.' + relativeDirname.substr(0, index);
|
headerId = 'microsoft.' + relativeDirname.substr(0, index); // {{SQL CARBON EDIT}}
|
||||||
headerOut = relativeDirname.substr(index + 1) + '/out';
|
headerOut = relativeDirname.substr(index + 1) + '/out';
|
||||||
}
|
}
|
||||||
|
|
||||||
function createPipeline(build, emitError) {
|
function createPipeline(build, emitError) {
|
||||||
const reporter = createReporter();
|
const reporter = createReporter();
|
||||||
|
|
||||||
tsOptions.inlineSources = !!build;
|
overrideOptions.inlineSources = Boolean(build);
|
||||||
tsOptions.base = path.dirname(absolutePath);
|
overrideOptions.base = path.dirname(absolutePath);
|
||||||
|
|
||||||
const compilation = tsb.create(tsOptions, null, null, err => reporter(err.toString()));
|
const compilation = tsb.create(absolutePath, overrideOptions, false, err => reporter(err.toString()));
|
||||||
|
|
||||||
return function () {
|
const pipeline = function () {
|
||||||
const input = es.through();
|
const input = es.through();
|
||||||
const tsFilter = filter(['**/*.ts', '!**/lib/lib*.d.ts', '!**/node_modules/**'], { restore: true });
|
const tsFilter = filter(['**/*.ts', '!**/lib/lib*.d.ts', '!**/node_modules/**'], { restore: true });
|
||||||
const output = input
|
const output = input
|
||||||
@@ -98,15 +103,20 @@ const tasks = compilations.map(function (tsconfigFile) {
|
|||||||
|
|
||||||
return es.duplex(input, output);
|
return es.duplex(input, output);
|
||||||
};
|
};
|
||||||
}
|
|
||||||
|
|
||||||
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
|
// add src-stream for project files
|
||||||
|
pipeline.tsProjectSrc = () => {
|
||||||
|
return compilation.src(srcOpts);
|
||||||
|
};
|
||||||
|
return pipeline;
|
||||||
|
}
|
||||||
|
|
||||||
const cleanTask = task.define(`clean-extension-${name}`, util.rimraf(out));
|
const cleanTask = task.define(`clean-extension-${name}`, util.rimraf(out));
|
||||||
|
|
||||||
const compileTask = task.define(`compile-extension:${name}`, task.series(cleanTask, () => {
|
const compileTask = task.define(`compile-extension:${name}`, task.series(cleanTask, () => {
|
||||||
const pipeline = createPipeline(false, true);
|
const pipeline = createPipeline(sqlLocalizedExtensions.includes(name), true); // {{SQL CARBON EDIT}}
|
||||||
const input = gulp.src(src, srcOpts);
|
const nonts = gulp.src(src, srcOpts).pipe(filter(['**', '!**/*.ts']));
|
||||||
|
const input = es.merge(nonts, pipeline.tsProjectSrc());
|
||||||
|
|
||||||
return input
|
return input
|
||||||
.pipe(pipeline())
|
.pipe(pipeline())
|
||||||
@@ -115,8 +125,9 @@ const tasks = compilations.map(function (tsconfigFile) {
|
|||||||
|
|
||||||
const watchTask = task.define(`watch-extension:${name}`, task.series(cleanTask, () => {
|
const watchTask = task.define(`watch-extension:${name}`, task.series(cleanTask, () => {
|
||||||
const pipeline = createPipeline(false);
|
const pipeline = createPipeline(false);
|
||||||
const input = gulp.src(src, srcOpts);
|
const nonts = gulp.src(src, srcOpts).pipe(filter(['**', '!**/*.ts']));
|
||||||
const watchInput = watcher(src, srcOpts);
|
const input = es.merge(nonts, pipeline.tsProjectSrc());
|
||||||
|
const watchInput = watcher(src, { ...srcOpts, ...{ readDelay: 200 } });
|
||||||
|
|
||||||
return watchInput
|
return watchInput
|
||||||
.pipe(util.incremental(pipeline, input))
|
.pipe(util.incremental(pipeline, input))
|
||||||
@@ -125,7 +136,8 @@ const tasks = compilations.map(function (tsconfigFile) {
|
|||||||
|
|
||||||
const compileBuildTask = task.define(`compile-build-extension-${name}`, task.series(cleanTask, () => {
|
const compileBuildTask = task.define(`compile-build-extension-${name}`, task.series(cleanTask, () => {
|
||||||
const pipeline = createPipeline(true, true);
|
const pipeline = createPipeline(true, true);
|
||||||
const input = gulp.src(src, srcOpts);
|
const nonts = gulp.src(src, srcOpts).pipe(filter(['**', '!**/*.ts']));
|
||||||
|
const input = es.merge(nonts, pipeline.tsProjectSrc());
|
||||||
|
|
||||||
return input
|
return input
|
||||||
.pipe(pipeline())
|
.pipe(pipeline())
|
||||||
@@ -156,8 +168,8 @@ const cleanExtensionsBuildTask = task.define('clean-extensions-build', util.rimr
|
|||||||
const compileExtensionsBuildTask = task.define('compile-extensions-build', task.series(
|
const compileExtensionsBuildTask = task.define('compile-extensions-build', task.series(
|
||||||
cleanExtensionsBuildTask,
|
cleanExtensionsBuildTask,
|
||||||
task.define('bundle-extensions-build', () => ext.packageLocalExtensionsStream().pipe(gulp.dest('.build'))),
|
task.define('bundle-extensions-build', () => ext.packageLocalExtensionsStream().pipe(gulp.dest('.build'))),
|
||||||
task.define('bundle-marketplace-extensions-build', () => ext.packageMarketplaceExtensionsStream().pipe(gulp.dest('.build'))),
|
task.define('bundle-marketplace-extensions-build', () => ext.packageMarketplaceExtensionsStream().pipe(gulp.dest('.build')))
|
||||||
));
|
));
|
||||||
|
|
||||||
gulp.task(compileExtensionsBuildTask);
|
gulp.task(compileExtensionsBuildTask);
|
||||||
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;
|
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;
|
||||||
|
|||||||
@@ -17,6 +17,7 @@ const vfs = require('vinyl-fs');
|
|||||||
const path = require('path');
|
const path = require('path');
|
||||||
const fs = require('fs');
|
const fs = require('fs');
|
||||||
const pall = require('p-all');
|
const pall = require('p-all');
|
||||||
|
const task = require('./lib/task');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Hygiene works by creating cascading subsets of all our files and
|
* Hygiene works by creating cascading subsets of all our files and
|
||||||
@@ -55,8 +56,10 @@ const indentationFilter = [
|
|||||||
'!src/vs/base/node/terminateProcess.sh',
|
'!src/vs/base/node/terminateProcess.sh',
|
||||||
'!src/vs/base/node/cpuUsage.sh',
|
'!src/vs/base/node/cpuUsage.sh',
|
||||||
'!test/assert.js',
|
'!test/assert.js',
|
||||||
|
'!build/testSetup.js',
|
||||||
|
|
||||||
// except specific folders
|
// except specific folders
|
||||||
|
'!test/automation/out/**',
|
||||||
'!test/smoke/out/**',
|
'!test/smoke/out/**',
|
||||||
'!extensions/vscode-api-tests/testWorkspace/**',
|
'!extensions/vscode-api-tests/testWorkspace/**',
|
||||||
'!extensions/vscode-api-tests/testWorkspace2/**',
|
'!extensions/vscode-api-tests/testWorkspace2/**',
|
||||||
@@ -65,11 +68,12 @@ const indentationFilter = [
|
|||||||
|
|
||||||
// except multiple specific files
|
// except multiple specific files
|
||||||
'!**/package.json',
|
'!**/package.json',
|
||||||
|
'!**/package-lock.json', // {{SQL CARBON EDIT}}
|
||||||
'!**/yarn.lock',
|
'!**/yarn.lock',
|
||||||
'!**/yarn-error.log',
|
'!**/yarn-error.log',
|
||||||
|
|
||||||
// except multiple specific folders
|
// except multiple specific folders
|
||||||
'!**/octicons/**',
|
'!**/codicon/**',
|
||||||
'!**/fixtures/**',
|
'!**/fixtures/**',
|
||||||
'!**/lib/**',
|
'!**/lib/**',
|
||||||
'!extensions/**/out/**',
|
'!extensions/**/out/**',
|
||||||
@@ -99,7 +103,8 @@ const indentationFilter = [
|
|||||||
'!extensions/admin-tool-ext-win/ssmsmin/**',
|
'!extensions/admin-tool-ext-win/ssmsmin/**',
|
||||||
'!extensions/resource-deployment/notebooks/**',
|
'!extensions/resource-deployment/notebooks/**',
|
||||||
'!extensions/mssql/notebooks/**',
|
'!extensions/mssql/notebooks/**',
|
||||||
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts'
|
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts',
|
||||||
|
'!extensions/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts'
|
||||||
];
|
];
|
||||||
|
|
||||||
const copyrightFilter = [
|
const copyrightFilter = [
|
||||||
@@ -120,6 +125,7 @@ const copyrightFilter = [
|
|||||||
'!**/*.opts',
|
'!**/*.opts',
|
||||||
'!**/*.disabled',
|
'!**/*.disabled',
|
||||||
'!**/*.code-workspace',
|
'!**/*.code-workspace',
|
||||||
|
'!**/*.js.map',
|
||||||
'!**/promise-polyfill/polyfill.js',
|
'!**/promise-polyfill/polyfill.js',
|
||||||
'!build/**/*.init',
|
'!build/**/*.init',
|
||||||
'!resources/linux/snap/snapcraft.yaml',
|
'!resources/linux/snap/snapcraft.yaml',
|
||||||
@@ -130,34 +136,36 @@ const copyrightFilter = [
|
|||||||
'!extensions/html-language-features/server/src/modes/typescript/*',
|
'!extensions/html-language-features/server/src/modes/typescript/*',
|
||||||
'!extensions/*/server/bin/*',
|
'!extensions/*/server/bin/*',
|
||||||
'!src/vs/editor/test/node/classification/typescript-test.ts',
|
'!src/vs/editor/test/node/classification/typescript-test.ts',
|
||||||
|
'!scripts/code-web.js',
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
'!extensions/notebook/src/intellisense/text.ts',
|
'!extensions/notebook/src/intellisense/text.ts',
|
||||||
'!extensions/mssql/src/objectExplorerNodeProvider/webhdfs.ts',
|
'!extensions/mssql/src/hdfs/webhdfs.ts',
|
||||||
'!src/sql/workbench/parts/notebook/browser/outputs/tableRenderers.ts',
|
'!src/sql/workbench/contrib/notebook/browser/outputs/tableRenderers.ts',
|
||||||
'!src/sql/workbench/parts/notebook/common/models/url.ts',
|
'!src/sql/workbench/contrib/notebook/common/models/url.ts',
|
||||||
'!src/sql/workbench/parts/notebook/browser/models/renderMimeInterfaces.ts',
|
'!src/sql/workbench/contrib/notebook/browser/models/renderMimeInterfaces.ts',
|
||||||
'!src/sql/workbench/parts/notebook/browser/models/outputProcessor.ts',
|
'!src/sql/workbench/contrib/notebook/browser/models/outputProcessor.ts',
|
||||||
'!src/sql/workbench/parts/notebook/browser/models/mimemodel.ts',
|
'!src/sql/workbench/contrib/notebook/browser/models/mimemodel.ts',
|
||||||
'!src/sql/workbench/parts/notebook/browser/cellViews/media/*.css',
|
'!src/sql/workbench/contrib/notebook/browser/cellViews/media/*.css',
|
||||||
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
|
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
|
||||||
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
|
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
|
||||||
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
|
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
|
||||||
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
|
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
|
||||||
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
|
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
|
||||||
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
|
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
|
||||||
'!src/sql/workbench/parts/notebook/browser/outputs/sanitizer.ts',
|
'!src/sql/workbench/contrib/notebook/browser/outputs/sanitizer.ts',
|
||||||
'!src/sql/workbench/parts/notebook/browser/outputs/renderers.ts',
|
'!src/sql/workbench/contrib/notebook/browser/outputs/renderers.ts',
|
||||||
'!src/sql/workbench/parts/notebook/browser/outputs/registry.ts',
|
'!src/sql/workbench/contrib/notebook/browser/outputs/registry.ts',
|
||||||
'!src/sql/workbench/parts/notebook/browser/outputs/factories.ts',
|
'!src/sql/workbench/contrib/notebook/browser/outputs/factories.ts',
|
||||||
'!src/sql/workbench/parts/notebook/common/models/nbformat.ts',
|
'!src/sql/workbench/contrib/notebook/common/models/nbformat.ts',
|
||||||
'!extensions/markdown-language-features/media/tomorrow.css',
|
'!extensions/markdown-language-features/media/tomorrow.css',
|
||||||
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
|
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
|
||||||
'!src/sql/workbench/parts/notebook/electron-browser/cellViews/media/highlight.css',
|
'!src/sql/workbench/contrib/notebook/electron-browser/cellViews/media/highlight.css',
|
||||||
'!extensions/mssql/sqltoolsservice/**',
|
'!extensions/mssql/sqltoolsservice/**',
|
||||||
'!extensions/import/flatfileimportservice/**',
|
'!extensions/import/flatfileimportservice/**',
|
||||||
'!extensions/notebook/src/prompts/**',
|
'!extensions/notebook/src/prompts/**',
|
||||||
'!extensions/mssql/src/prompts/**',
|
'!extensions/mssql/src/prompts/**',
|
||||||
'!extensions/notebook/resources/jupyter_config/**',
|
'!extensions/notebook/resources/jupyter_config/**',
|
||||||
|
'!extensions/query-history/images/**',
|
||||||
'!**/*.gif',
|
'!**/*.gif',
|
||||||
'!**/*.xlf',
|
'!**/*.xlf',
|
||||||
'!**/*.dacpac',
|
'!**/*.dacpac',
|
||||||
@@ -186,25 +194,44 @@ const tslintBaseFilter = [
|
|||||||
'!extensions/vscode-api-tests/testWorkspace2/**',
|
'!extensions/vscode-api-tests/testWorkspace2/**',
|
||||||
'!extensions/**/*.test.ts',
|
'!extensions/**/*.test.ts',
|
||||||
'!extensions/html-language-features/server/lib/jquery.d.ts',
|
'!extensions/html-language-features/server/lib/jquery.d.ts',
|
||||||
// {{SQL CARBON EDIT}}
|
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts', // {{SQL CARBON EDIT}},
|
||||||
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts'
|
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts', // {{SQL CARBON EDIT}},
|
||||||
|
'!src/vs/workbench/services/themes/common/textMateScopeMatcher.ts' // {{SQL CARBON EDIT}} skip this because we have no plans on touching this and its not ours
|
||||||
];
|
];
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
const useStrictFilter = [
|
|
||||||
'src/**'
|
|
||||||
];
|
|
||||||
|
|
||||||
const sqlFilter = [
|
const sqlFilter = [
|
||||||
'src/sql/**'
|
'src/sql/**',
|
||||||
|
'extensions/**',
|
||||||
|
// Ignore VS Code extensions
|
||||||
|
'!extensions/bat/**',
|
||||||
|
'!extensions/configuration-editing/**',
|
||||||
|
'!extensions/docker/**',
|
||||||
|
'!extensions/extension-editing/**',
|
||||||
|
'!extensions/git/**',
|
||||||
|
'!extensions/git-ui/**',
|
||||||
|
'!extensions/image-preview/**',
|
||||||
|
'!extensions/insights-default/**',
|
||||||
|
'!extensions/json/**',
|
||||||
|
'!extensions/json-language-features/**',
|
||||||
|
'!extensions/markdown-basics/**',
|
||||||
|
'!extensions/markdown-language-features/**',
|
||||||
|
'!extensions/merge-conflict/**',
|
||||||
|
'!extensions/powershell/**',
|
||||||
|
'!extensions/python/**',
|
||||||
|
'!extensions/r/**',
|
||||||
|
'!extensions/theme-*/**',
|
||||||
|
'!extensions/vscode-*/**',
|
||||||
|
'!extensions/xml/**',
|
||||||
|
'!extensions/xml-language-features/**',
|
||||||
|
'!extensions/yarml/**',
|
||||||
];
|
];
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
|
|
||||||
const tslintCoreFilter = [
|
const tslintCoreFilter = [
|
||||||
'src/**/*.ts',
|
'src/**/*.ts',
|
||||||
'test/**/*.ts',
|
'test/**/*.ts',
|
||||||
'!extensions/**/*.ts',
|
'!extensions/**/*.ts',
|
||||||
|
'!test/automation/**',
|
||||||
'!test/smoke/**',
|
'!test/smoke/**',
|
||||||
...tslintBaseFilter
|
...tslintBaseFilter
|
||||||
];
|
];
|
||||||
@@ -213,6 +240,7 @@ const tslintExtensionsFilter = [
|
|||||||
'extensions/**/*.ts',
|
'extensions/**/*.ts',
|
||||||
'!src/**/*.ts',
|
'!src/**/*.ts',
|
||||||
'!test/**/*.ts',
|
'!test/**/*.ts',
|
||||||
|
'test/automation/**/*.ts',
|
||||||
...tslintBaseFilter
|
...tslintBaseFilter
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -220,9 +248,16 @@ const tslintHygieneFilter = [
|
|||||||
'src/**/*.ts',
|
'src/**/*.ts',
|
||||||
'test/**/*.ts',
|
'test/**/*.ts',
|
||||||
'extensions/**/*.ts',
|
'extensions/**/*.ts',
|
||||||
|
'!src/vs/workbench/contrib/extensions/browser/extensionTipsService.ts', // {{SQL CARBON EDIT}} known formatting issue do to commenting out code
|
||||||
...tslintBaseFilter
|
...tslintBaseFilter
|
||||||
];
|
];
|
||||||
|
|
||||||
|
const fileLengthFilter = filter([
|
||||||
|
'**',
|
||||||
|
'!extensions/import/*.docx',
|
||||||
|
'!extensions/admin-tool-ext-win/license/**'
|
||||||
|
], {restore: true});
|
||||||
|
|
||||||
const copyrightHeaderLines = [
|
const copyrightHeaderLines = [
|
||||||
'/*---------------------------------------------------------------------------------------------',
|
'/*---------------------------------------------------------------------------------------------',
|
||||||
' * Copyright (c) Microsoft Corporation. All rights reserved.',
|
' * Copyright (c) Microsoft Corporation. All rights reserved.',
|
||||||
@@ -255,6 +290,33 @@ gulp.task('tslint', () => {
|
|||||||
]).pipe(es.through());
|
]).pipe(es.through());
|
||||||
});
|
});
|
||||||
|
|
||||||
|
function checkPackageJSON(actualPath) {
|
||||||
|
const actual = require(path.join(__dirname, '..', actualPath));
|
||||||
|
const rootPackageJSON = require('../package.json');
|
||||||
|
|
||||||
|
for (let depName in actual.dependencies) {
|
||||||
|
const depVersion = actual.dependencies[depName];
|
||||||
|
const rootDepVersion = rootPackageJSON.dependencies[depName];
|
||||||
|
if (!rootDepVersion) {
|
||||||
|
// missing in root is allowed
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (depVersion !== rootDepVersion) {
|
||||||
|
this.emit('error', `The dependency ${depName} in '${actualPath}' (${depVersion}) is different than in the root package.json (${rootDepVersion})`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const checkPackageJSONTask = task.define('check-package-json', () => {
|
||||||
|
return gulp.src('package.json')
|
||||||
|
.pipe(es.through(function() {
|
||||||
|
checkPackageJSON.call(this, 'remote/package.json');
|
||||||
|
checkPackageJSON.call(this, 'remote/web/package.json');
|
||||||
|
}));
|
||||||
|
});
|
||||||
|
gulp.task(checkPackageJSONTask);
|
||||||
|
|
||||||
|
|
||||||
function hygiene(some) {
|
function hygiene(some) {
|
||||||
let errorCount = 0;
|
let errorCount = 0;
|
||||||
|
|
||||||
@@ -304,23 +366,6 @@ function hygiene(some) {
|
|||||||
this.emit('data', file);
|
this.emit('data', file);
|
||||||
});
|
});
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
// Check for unnecessary 'use strict' lines. These are automatically added by the alwaysStrict compiler option so don't need to be added manually
|
|
||||||
const useStrict = es.through(function (file) {
|
|
||||||
const lines = file.__lines;
|
|
||||||
// Only take the first 10 lines to reduce false positives- the compiler will throw an error if it's not the first non-comment line in a file
|
|
||||||
// (10 is used to account for copyright and extraneous newlines)
|
|
||||||
lines.slice(0, 10).forEach((line, i) => {
|
|
||||||
if (/\s*'use\s*strict\s*'/.test(line)) {
|
|
||||||
console.error(file.relative + '(' + (i + 1) + ',1): Unnecessary \'use strict\' - this is already added by the compiler');
|
|
||||||
errorCount++;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
this.emit('data', file);
|
|
||||||
});
|
|
||||||
// {{SQL CARBON EDIT}} END
|
|
||||||
|
|
||||||
const formatting = es.map(function (file, cb) {
|
const formatting = es.map(function (file, cb) {
|
||||||
tsfmt.processString(file.path, file.contents.toString('utf8'), {
|
tsfmt.processString(file.path, file.contents.toString('utf8'), {
|
||||||
verify: false,
|
verify: false,
|
||||||
@@ -351,6 +396,23 @@ function hygiene(some) {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const filelength = es.through(function (file) {
|
||||||
|
|
||||||
|
const fileName = path.basename(file.relative);
|
||||||
|
const fileDir = path.dirname(file.relative);
|
||||||
|
//check the filename is < 50 characters (basename gets the filename with extension).
|
||||||
|
if (fileName.length > 50) {
|
||||||
|
console.error(`File name '${fileName}' under ${fileDir} is too long. Rename file to have less than 50 characters.`);
|
||||||
|
errorCount++;
|
||||||
|
}
|
||||||
|
if (file.relative.length > 150) {
|
||||||
|
console.error(`File path ${file.relative} exceeds acceptable file-length. Rename the path to have less than 150 characters.`);
|
||||||
|
errorCount++;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.emit('data', file);
|
||||||
|
});
|
||||||
|
|
||||||
const tslintConfiguration = tslint.Configuration.findConfiguration('tslint.json', '.');
|
const tslintConfiguration = tslint.Configuration.findConfiguration('tslint.json', '.');
|
||||||
const tslintOptions = { fix: false, formatter: 'json' };
|
const tslintOptions = { fix: false, formatter: 'json' };
|
||||||
const tsLinter = new tslint.Linter(tslintOptions);
|
const tsLinter = new tslint.Linter(tslintOptions);
|
||||||
@@ -364,25 +426,32 @@ function hygiene(some) {
|
|||||||
let input;
|
let input;
|
||||||
|
|
||||||
if (Array.isArray(some) || typeof some === 'string' || !some) {
|
if (Array.isArray(some) || typeof some === 'string' || !some) {
|
||||||
input = vfs.src(some || all, { base: '.', follow: true, allowEmpty: true });
|
const options = { base: '.', follow: true, allowEmpty: true };
|
||||||
|
if (some) {
|
||||||
|
input = vfs.src(some, options).pipe(filter(all)); // split this up to not unnecessarily filter all a second time
|
||||||
|
} else {
|
||||||
|
input = vfs.src(all, options);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
input = some;
|
input = some;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// {{SQL CARBON EDIT}} Linting for SQL
|
||||||
const tslintSqlConfiguration = tslint.Configuration.findConfiguration('tslint-sql.json', '.');
|
const tslintSqlConfiguration = tslint.Configuration.findConfiguration('tslint-sql.json', '.');
|
||||||
const tslintSqlOptions = { fix: false, formatter: 'json' };
|
const tslintSqlOptions = { fix: false, formatter: 'json' };
|
||||||
const sqlTsLinter = new tslint.Linter(tslintSqlOptions);
|
const sqlTsLinter = new tslint.Linter(tslintSqlOptions);
|
||||||
|
|
||||||
const sqlTsl = es.through(function (file) {
|
const sqlTsl = es.through(function (file) { //TODO restore
|
||||||
const contents = file.contents.toString('utf8');
|
const contents = file.contents.toString('utf8');
|
||||||
sqlTsLinter.lint(file.relative, contents, tslintSqlConfiguration.results);
|
sqlTsLinter.lint(file.relative, contents, tslintSqlConfiguration.results);
|
||||||
|
|
||||||
this.emit('data', file);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const productJsonFilter = filter('product.json', { restore: true });
|
const productJsonFilter = filter('product.json', { restore: true });
|
||||||
|
|
||||||
const result = input
|
const result = input
|
||||||
|
.pipe(fileLengthFilter)
|
||||||
|
.pipe(filelength)
|
||||||
|
.pipe(fileLengthFilter.restore)
|
||||||
.pipe(filter(f => !f.stat.isDirectory()))
|
.pipe(filter(f => !f.stat.isDirectory()))
|
||||||
.pipe(productJsonFilter)
|
.pipe(productJsonFilter)
|
||||||
.pipe(process.env['BUILD_SOURCEVERSION'] ? es.through() : productJson)
|
.pipe(process.env['BUILD_SOURCEVERSION'] ? es.through() : productJson)
|
||||||
@@ -392,15 +461,16 @@ function hygiene(some) {
|
|||||||
.pipe(filter(copyrightFilter))
|
.pipe(filter(copyrightFilter))
|
||||||
.pipe(copyrights);
|
.pipe(copyrights);
|
||||||
|
|
||||||
const typescript = result
|
let typescript = result
|
||||||
.pipe(filter(tslintHygieneFilter))
|
.pipe(filter(tslintHygieneFilter))
|
||||||
.pipe(formatting)
|
.pipe(formatting);
|
||||||
.pipe(tsl)
|
|
||||||
// {{SQL CARBON EDIT}}
|
if (!process.argv.some(arg => arg === '--skip-tslint')) {
|
||||||
.pipe(filter(useStrictFilter))
|
typescript = typescript.pipe(tsl);
|
||||||
.pipe(useStrict)
|
typescript = typescript
|
||||||
.pipe(filter(sqlFilter))
|
.pipe(filter(sqlFilter)) // {{SQL CARBON EDIT}}
|
||||||
.pipe(sqlTsl);
|
.pipe(sqlTsl);
|
||||||
|
}
|
||||||
|
|
||||||
const javascript = result
|
const javascript = result
|
||||||
.pipe(filter(eslintFilter))
|
.pipe(filter(eslintFilter))
|
||||||
@@ -486,7 +556,7 @@ function createGitIndexVinyls(paths) {
|
|||||||
.then(r => r.filter(p => !!p));
|
.then(r => r.filter(p => !!p));
|
||||||
}
|
}
|
||||||
|
|
||||||
gulp.task('hygiene', () => hygiene());
|
gulp.task('hygiene', task.series(checkPackageJSONTask, () => hygiene()));
|
||||||
|
|
||||||
// this allows us to run hygiene as a git pre-commit hook
|
// this allows us to run hygiene as a git pre-commit hook
|
||||||
if (require.main === module) {
|
if (require.main === module) {
|
||||||
|
|||||||
@@ -1,79 +0,0 @@
|
|||||||
/*---------------------------------------------------------------------------------------------
|
|
||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
|
||||||
*--------------------------------------------------------------------------------------------*/
|
|
||||||
|
|
||||||
'use strict';
|
|
||||||
|
|
||||||
const gulp = require('gulp');
|
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
const jeditor = require('gulp-json-editor');
|
|
||||||
const product = require('../product.json');
|
|
||||||
|
|
||||||
gulp.task('mixin', function () {
|
|
||||||
// {{SQL CARBON EDIT}}
|
|
||||||
const updateUrl = process.env['SQLOPS_UPDATEURL'];
|
|
||||||
if (!updateUrl) {
|
|
||||||
console.log('Missing SQLOPS_UPDATEURL, skipping mixin');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const quality = process.env['VSCODE_QUALITY'];
|
|
||||||
|
|
||||||
if (!quality) {
|
|
||||||
console.log('Missing VSCODE_QUALITY, skipping mixin');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}} - apply ADS insiders values if needed
|
|
||||||
let newValues = {
|
|
||||||
"nameShort": product.nameShort,
|
|
||||||
"nameLong": product.nameLong,
|
|
||||||
"applicationName": product.applicationName,
|
|
||||||
"dataFolderName": product.dataFolderName,
|
|
||||||
"win32MutexName": product.win32MutexName,
|
|
||||||
"win32DirName": product.win32DirName,
|
|
||||||
"win32NameVersion": product.win32NameVersion,
|
|
||||||
"win32RegValueName": product.win32RegValueName,
|
|
||||||
"win32AppId": product.win32AppId,
|
|
||||||
"win32x64AppId": product.win32x64AppId,
|
|
||||||
"win32UserAppId": product.win32UserAppId,
|
|
||||||
"win32x64UserAppId": product.win32x64UserAppId,
|
|
||||||
"win32AppUserModelId": product.win32AppUserModelId,
|
|
||||||
"win32ShellNameShort": product.win32ShellNameShort,
|
|
||||||
"darwinBundleIdentifier": product.darwinBundleIdentifier,
|
|
||||||
"updateUrl": updateUrl,
|
|
||||||
"quality": quality,
|
|
||||||
"extensionsGallery": {
|
|
||||||
"serviceUrl": 'https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery.json'
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if (quality === 'insider') {
|
|
||||||
let dashSuffix = '-insiders';
|
|
||||||
let dotSuffix = '.insiders';
|
|
||||||
let displaySuffix = ' - Insiders';
|
|
||||||
|
|
||||||
newValues.extensionsGallery.serviceUrl = `https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery-${quality}.json`;
|
|
||||||
newValues.nameShort += dashSuffix;
|
|
||||||
newValues.nameLong += displaySuffix;
|
|
||||||
newValues.applicationName += dashSuffix;
|
|
||||||
newValues.dataFolderName += dashSuffix;
|
|
||||||
newValues.win32MutexName += dashSuffix;
|
|
||||||
newValues.win32DirName += displaySuffix;
|
|
||||||
newValues.win32NameVersion += displaySuffix;
|
|
||||||
newValues.win32RegValueName += dashSuffix;
|
|
||||||
newValues.win32AppId = "{{9F0801B2-DEE3-4272-A2C6-FBDF25BAAF0F}";
|
|
||||||
newValues.win32x64AppId = "{{6748A5FD-29EB-4BA6-B3C6-E7B981B8D6B0}";
|
|
||||||
newValues.win32UserAppId = "{{0F8CD1ED-483C-40EB-8AD2-8ED784651AA1}";
|
|
||||||
newValues.win32x64UserAppId += dashSuffix;
|
|
||||||
newValues.win32AppUserModelId += dotSuffix;
|
|
||||||
newValues.win32ShellNameShort += displaySuffix;
|
|
||||||
newValues.darwinBundleIdentifier += dotSuffix;
|
|
||||||
}
|
|
||||||
|
|
||||||
return gulp.src('./product.json')
|
|
||||||
.pipe(jeditor(newValues))
|
|
||||||
.pipe(gulp.dest('.'));
|
|
||||||
});
|
|
||||||
@@ -4,7 +4,6 @@
|
|||||||
*--------------------------------------------------------------------------------------------*/
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const gulp = require('gulp');
|
const gulp = require('gulp');
|
||||||
const util = require('./lib/util');
|
const util = require('./lib/util');
|
||||||
const tsfmt = require('typescript-formatter');
|
const tsfmt = require('typescript-formatter');
|
||||||
@@ -12,8 +11,13 @@ const es = require('event-stream');
|
|||||||
const filter = require('gulp-filter');
|
const filter = require('gulp-filter');
|
||||||
const del = require('del');
|
const del = require('del');
|
||||||
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
|
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
|
||||||
const platformInfo = require('service-downloader/out/platform').PlatformInformation;
|
const platform = require('service-downloader/out/platform').PlatformInformation;
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
|
const ext = require('./lib/extensions');
|
||||||
|
const task = require('./lib/task');
|
||||||
|
const glob = require('glob');
|
||||||
|
const vsce = require('vsce');
|
||||||
|
const mkdirp = require('mkdirp');
|
||||||
|
|
||||||
gulp.task('clean-mssql-extension', util.rimraf('extensions/mssql/node_modules'));
|
gulp.task('clean-mssql-extension', util.rimraf('extensions/mssql/node_modules'));
|
||||||
gulp.task('clean-credentials-extension', util.rimraf('extensions/credentials/node_modules'));
|
gulp.task('clean-credentials-extension', util.rimraf('extensions/credentials/node_modules'));
|
||||||
@@ -22,73 +26,79 @@ gulp.task('fmt', () => formatStagedFiles());
|
|||||||
const formatFiles = (some) => {
|
const formatFiles = (some) => {
|
||||||
const formatting = es.map(function (file, cb) {
|
const formatting = es.map(function (file, cb) {
|
||||||
|
|
||||||
tsfmt.processString(file.path, file.contents.toString('utf8'), {
|
tsfmt.processString(file.path, file.contents.toString('utf8'), {
|
||||||
replace: true,
|
replace: true,
|
||||||
tsfmt: true,
|
tsfmt: true,
|
||||||
tslint: true,
|
tslint: true,
|
||||||
tsconfig: true
|
tsconfig: true
|
||||||
// verbose: true
|
// verbose: true
|
||||||
}).then(result => {
|
}).then(result => {
|
||||||
console.info('ran formatting on file ' + file.path + ' result: ' + result.message);
|
console.info('ran formatting on file ' + file.path + ' result: ' + result.message);
|
||||||
if (result.error) {
|
if (result.error) {
|
||||||
console.error(result.message);
|
console.error(result.message);
|
||||||
}
|
}
|
||||||
cb(null, file);
|
cb(null, file);
|
||||||
|
|
||||||
}, err => {
|
}, err => {
|
||||||
cb(err);
|
cb(err);
|
||||||
});
|
|
||||||
});
|
});
|
||||||
return gulp.src(some, { base: '.' })
|
});
|
||||||
.pipe(filter(f => !f.stat.isDirectory()))
|
return gulp.src(some, {
|
||||||
.pipe(formatting);
|
base: '.'
|
||||||
|
})
|
||||||
|
.pipe(filter(f => !f.stat.isDirectory()))
|
||||||
|
.pipe(formatting);
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const formatStagedFiles = () => {
|
const formatStagedFiles = () => {
|
||||||
const cp = require('child_process');
|
const cp = require('child_process');
|
||||||
cp.exec('git diff --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
|
cp.exec('git diff --name-only', {
|
||||||
if (err) {
|
maxBuffer: 2000 * 1024
|
||||||
console.error();
|
}, (err, out) => {
|
||||||
console.error(err);
|
if (err) {
|
||||||
process.exit(1);
|
console.error();
|
||||||
}
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
const some = out
|
const some = out
|
||||||
.split(/\r?\n/)
|
.split(/\r?\n/)
|
||||||
.filter(l => !!l)
|
.filter(l => !!l)
|
||||||
.filter(l => l.match(/.*.ts$/i));
|
.filter(l => l.match(/.*.ts$/i));
|
||||||
|
|
||||||
formatFiles(some).on('error', err => {
|
formatFiles(some).on('error', err => {
|
||||||
console.error();
|
console.error();
|
||||||
console.error(err);
|
console.error(err);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
});
|
||||||
|
|
||||||
cp.exec('git diff --cached --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
|
cp.exec('git diff --cached --name-only', {
|
||||||
if (err) {
|
maxBuffer: 2000 * 1024
|
||||||
console.error();
|
}, (err, out) => {
|
||||||
console.error(err);
|
if (err) {
|
||||||
process.exit(1);
|
console.error();
|
||||||
}
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
const some = out
|
const some = out
|
||||||
.split(/\r?\n/)
|
.split(/\r?\n/)
|
||||||
.filter(l => !!l)
|
.filter(l => !!l)
|
||||||
.filter(l => l.match(/.*.ts$/i));
|
.filter(l => l.match(/.*.ts$/i));
|
||||||
|
|
||||||
formatFiles(some).on('error', err => {
|
formatFiles(some).on('error', err => {
|
||||||
console.error();
|
console.error();
|
||||||
console.error(err);
|
console.error(err);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
function installService() {
|
function installService() {
|
||||||
let config = require('../extensions/mssql/src/config.json');
|
let config = require('../extensions/mssql/config.json');
|
||||||
return platformInfo.getCurrent().then(p => {
|
return platform.getCurrent().then(p => {
|
||||||
let runtime = p.runtimeId;
|
let runtime = p.runtimeId;
|
||||||
// fix path since it won't be correct
|
// fix path since it won't be correct
|
||||||
config.installDirectory = path.join(__dirname, '../extensions/mssql/src', config.installDirectory);
|
config.installDirectory = path.join(__dirname, '../extensions/mssql/src', config.installDirectory);
|
||||||
@@ -108,25 +118,50 @@ gulp.task('install-sqltoolsservice', () => {
|
|||||||
return installService();
|
return installService();
|
||||||
});
|
});
|
||||||
|
|
||||||
function installSsmsMin() {
|
|
||||||
const config = require('../extensions/admin-tool-ext-win/src/config.json');
|
|
||||||
return platformInfo.getCurrent().then(p => {
|
|
||||||
const runtime = p.runtimeId;
|
|
||||||
// fix path since it won't be correct
|
|
||||||
config.installDirectory = path.join(__dirname, '..', 'extensions', 'admin-tool-ext-win', config.installDirectory);
|
|
||||||
var installer = new serviceDownloader(config);
|
|
||||||
const serviceInstallFolder = installer.getInstallDirectory(runtime);
|
|
||||||
const serviceCleanupFolder = path.join(serviceInstallFolder, '..');
|
|
||||||
console.log('Cleaning up the install folder: ' + serviceCleanupFolder);
|
|
||||||
return del(serviceCleanupFolder + '/*').then(() => {
|
|
||||||
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
|
|
||||||
return installer.installService(runtime);
|
|
||||||
}, delError => {
|
|
||||||
console.log('failed to delete the install folder error: ' + delError);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
gulp.task('install-ssmsmin', () => {
|
gulp.task('install-ssmsmin', () => {
|
||||||
return installSsmsMin();
|
const config = require('../extensions/admin-tool-ext-win/config.json');
|
||||||
|
const runtime = 'Windows_64'; // admin-tool-ext is a windows only extension, and we only ship a 64 bit version, so locking the binaries as such
|
||||||
|
// fix path since it won't be correct
|
||||||
|
config.installDirectory = path.join(__dirname, '..', 'extensions', 'admin-tool-ext-win', config.installDirectory);
|
||||||
|
var installer = new serviceDownloader(config);
|
||||||
|
const serviceInstallFolder = installer.getInstallDirectory(runtime);
|
||||||
|
const serviceCleanupFolder = path.join(serviceInstallFolder, '..');
|
||||||
|
console.log('Cleaning up the install folder: ' + serviceCleanupFolder);
|
||||||
|
return del(serviceCleanupFolder + '/*').then(() => {
|
||||||
|
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
|
||||||
|
return installer.installService(runtime);
|
||||||
|
}, delError => {
|
||||||
|
console.log('failed to delete the install folder error: ' + delError);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
const root = path.dirname(__dirname);
|
||||||
|
|
||||||
|
gulp.task('package-external-extensions', task.series(
|
||||||
|
task.define('bundle-external-extensions-build', () => ext.packageExternalExtensionsStream().pipe(gulp.dest('.build/external'))),
|
||||||
|
task.define('create-external-extension-vsix-build', () => {
|
||||||
|
const vsixes = glob.sync('.build/external/extensions/*/package.json').map(manifestPath => {
|
||||||
|
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||||
|
const extensionName = path.basename(extensionPath);
|
||||||
|
return { name: extensionName, path: extensionPath };
|
||||||
|
}).map(element => {
|
||||||
|
const pkgJson = require(path.join(element.path, 'package.json'));
|
||||||
|
const vsixDirectory = path.join(root, '.build', 'extensions');
|
||||||
|
mkdirp.sync(vsixDirectory);
|
||||||
|
const packagePath = path.join(vsixDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
|
||||||
|
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
||||||
|
return vsce.createVSIX({
|
||||||
|
cwd: element.path,
|
||||||
|
packagePath: packagePath,
|
||||||
|
useYarn: true
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
return Promise.all(vsixes);
|
||||||
|
})
|
||||||
|
));
|
||||||
|
|
||||||
|
gulp.task('package-rebuild-extensions', task.series(
|
||||||
|
task.define('clean-rebuild-extensions', () => ext.cleanRebuildExtensions('.build/extensions')),
|
||||||
|
task.define('rebuild-extensions-build', () => ext.packageRebuildExtensionsStream().pipe(gulp.dest('.build'))),
|
||||||
|
));
|
||||||
|
|||||||
@@ -29,9 +29,8 @@ const packageJson = require('../package.json');
|
|||||||
const product = require('../product.json');
|
const product = require('../product.json');
|
||||||
const crypto = require('crypto');
|
const crypto = require('crypto');
|
||||||
const i18n = require('./lib/i18n');
|
const i18n = require('./lib/i18n');
|
||||||
const ext = require('./lib/extensions'); // {{SQL CARBON EDIT}}
|
|
||||||
const deps = require('./dependencies');
|
const deps = require('./dependencies');
|
||||||
const getElectronVersion = require('./lib/electron').getElectronVersion;
|
const { config } = require('./lib/electron');
|
||||||
const createAsar = require('./lib/asar').createAsar;
|
const createAsar = require('./lib/asar').createAsar;
|
||||||
const { compileBuildTask } = require('./gulpfile.compile');
|
const { compileBuildTask } = require('./gulpfile.compile');
|
||||||
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
|
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
|
||||||
@@ -60,8 +59,7 @@ const nodeModules = [
|
|||||||
const vscodeEntryPoints = _.flatten([
|
const vscodeEntryPoints = _.flatten([
|
||||||
buildfile.entrypoint('vs/workbench/workbench.desktop.main'),
|
buildfile.entrypoint('vs/workbench/workbench.desktop.main'),
|
||||||
buildfile.base,
|
buildfile.base,
|
||||||
buildfile.serviceWorker,
|
buildfile.workbenchDesktop,
|
||||||
buildfile.workbench,
|
|
||||||
buildfile.code
|
buildfile.code
|
||||||
]);
|
]);
|
||||||
|
|
||||||
@@ -79,7 +77,7 @@ const vscodeResources = [
|
|||||||
'out-build/vs/base/common/performance.js',
|
'out-build/vs/base/common/performance.js',
|
||||||
'out-build/vs/base/node/languagePacks.js',
|
'out-build/vs/base/node/languagePacks.js',
|
||||||
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh,ps.sh}',
|
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh,ps.sh}',
|
||||||
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
|
'out-build/vs/base/browser/ui/codiconLabel/codicon/**',
|
||||||
'out-build/vs/workbench/browser/media/*-theme.css',
|
'out-build/vs/workbench/browser/media/*-theme.css',
|
||||||
'out-build/vs/workbench/contrib/debug/**/*.json',
|
'out-build/vs/workbench/contrib/debug/**/*.json',
|
||||||
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
|
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
|
||||||
@@ -87,15 +85,13 @@ const vscodeResources = [
|
|||||||
'out-build/vs/workbench/contrib/webview/electron-browser/pre/*.js',
|
'out-build/vs/workbench/contrib/webview/electron-browser/pre/*.js',
|
||||||
'out-build/vs/**/markdown.css',
|
'out-build/vs/**/markdown.css',
|
||||||
'out-build/vs/workbench/contrib/tasks/**/*.json',
|
'out-build/vs/workbench/contrib/tasks/**/*.json',
|
||||||
'out-build/vs/workbench/contrib/welcome/walkThrough/**/*.md',
|
|
||||||
'out-build/vs/platform/files/**/*.exe',
|
'out-build/vs/platform/files/**/*.exe',
|
||||||
'out-build/vs/platform/files/**/*.md',
|
'out-build/vs/platform/files/**/*.md',
|
||||||
'out-build/vs/code/electron-browser/workbench/**',
|
'out-build/vs/code/electron-browser/workbench/**',
|
||||||
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
|
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
|
||||||
'out-build/vs/code/electron-browser/issue/issueReporter.js',
|
'out-build/vs/code/electron-browser/issue/issueReporter.js',
|
||||||
'out-build/vs/code/electron-browser/processExplorer/processExplorer.js',
|
'out-build/vs/code/electron-browser/processExplorer/processExplorer.js',
|
||||||
// {{SQL CARBON EDIT}}
|
'out-build/sql/workbench/electron-browser/splashscreen/*', // {{SQL CARBON EDIT}} STart
|
||||||
'out-build/sql/workbench/electron-browser/splashscreen/*',
|
|
||||||
'out-build/sql/**/*.{svg,png,cur,html}',
|
'out-build/sql/**/*.{svg,png,cur,html}',
|
||||||
'out-build/sql/base/browser/ui/table/media/*.{gif,png,svg}',
|
'out-build/sql/base/browser/ui/table/media/*.{gif,png,svg}',
|
||||||
'out-build/sql/base/browser/ui/checkbox/media/*.{gif,png,svg}',
|
'out-build/sql/base/browser/ui/checkbox/media/*.{gif,png,svg}',
|
||||||
@@ -113,7 +109,8 @@ const vscodeResources = [
|
|||||||
'out-build/sql/media/objectTypes/*.svg',
|
'out-build/sql/media/objectTypes/*.svg',
|
||||||
'out-build/sql/media/icons/*.svg',
|
'out-build/sql/media/icons/*.svg',
|
||||||
'out-build/sql/workbench/parts/notebook/media/**/*.svg',
|
'out-build/sql/workbench/parts/notebook/media/**/*.svg',
|
||||||
'out-build/sql/setup.js',
|
'out-build/sql/setup.js', // {{SQL CARBON EDIT}} end
|
||||||
|
'out-build/vs/platform/auth/common/auth.css',
|
||||||
'!**/test/**'
|
'!**/test/**'
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -125,6 +122,7 @@ const optimizeVSCodeTask = task.define('optimize-vscode', task.series(
|
|||||||
resources: vscodeResources,
|
resources: vscodeResources,
|
||||||
loaderConfig: common.loaderConfig(nodeModules),
|
loaderConfig: common.loaderConfig(nodeModules),
|
||||||
out: 'out-vscode',
|
out: 'out-vscode',
|
||||||
|
inlineAmdImages: true,
|
||||||
bundleInfo: undefined
|
bundleInfo: undefined
|
||||||
})
|
})
|
||||||
));
|
));
|
||||||
@@ -144,73 +142,6 @@ const minifyVSCodeTask = task.define('minify-vscode', task.series(
|
|||||||
));
|
));
|
||||||
gulp.task(minifyVSCodeTask);
|
gulp.task(minifyVSCodeTask);
|
||||||
|
|
||||||
// Package
|
|
||||||
|
|
||||||
// @ts-ignore JSON checking: darwinCredits is optional
|
|
||||||
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
|
|
||||||
|
|
||||||
function darwinBundleDocumentType(extensions, icon) {
|
|
||||||
return {
|
|
||||||
name: product.nameLong + ' document',
|
|
||||||
role: 'Editor',
|
|
||||||
ostypes: ["TEXT", "utxt", "TUTX", "****"],
|
|
||||||
extensions: extensions,
|
|
||||||
iconFile: icon
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const config = {
|
|
||||||
version: getElectronVersion(),
|
|
||||||
productAppName: product.nameLong,
|
|
||||||
companyName: 'Microsoft Corporation',
|
|
||||||
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
|
|
||||||
darwinIcon: 'resources/darwin/code.icns',
|
|
||||||
darwinBundleIdentifier: product.darwinBundleIdentifier,
|
|
||||||
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
|
||||||
darwinHelpBookFolder: 'VS Code HelpBook',
|
|
||||||
darwinHelpBookName: 'VS Code HelpBook',
|
|
||||||
darwinBundleDocumentTypes: [
|
|
||||||
// {{SQL CARBON EDIT}} - Remove most document types and replace with ours
|
|
||||||
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
|
|
||||||
],
|
|
||||||
darwinBundleURLTypes: [{
|
|
||||||
role: 'Viewer',
|
|
||||||
name: product.nameLong,
|
|
||||||
urlSchemes: [product.urlProtocol]
|
|
||||||
}],
|
|
||||||
darwinForceDarkModeSupport: true,
|
|
||||||
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
|
|
||||||
linuxExecutableName: product.applicationName,
|
|
||||||
winIcon: 'resources/win32/code.ico',
|
|
||||||
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
|
|
||||||
|
|
||||||
// @ts-ignore JSON checking: electronRepository is optional
|
|
||||||
repo: product.electronRepository || undefined
|
|
||||||
};
|
|
||||||
|
|
||||||
function getElectron(arch) {
|
|
||||||
return () => {
|
|
||||||
const electronOpts = _.extend({}, config, {
|
|
||||||
platform: process.platform,
|
|
||||||
arch,
|
|
||||||
ffmpegChromium: true,
|
|
||||||
keepDefaultApp: true
|
|
||||||
});
|
|
||||||
|
|
||||||
return gulp.src('package.json')
|
|
||||||
.pipe(json({ name: product.nameShort }))
|
|
||||||
.pipe(electron(electronOpts))
|
|
||||||
.pipe(filter(['**', '!**/app/package.json']))
|
|
||||||
.pipe(vfs.dest('.build/electron'));
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
gulp.task(task.define('electron', task.series(util.rimraf('.build/electron'), getElectron(process.arch))));
|
|
||||||
gulp.task(task.define('electron-ia32', task.series(util.rimraf('.build/electron'), getElectron('ia32'))));
|
|
||||||
gulp.task(task.define('electron-x64', task.series(util.rimraf('.build/electron'), getElectron('x64'))));
|
|
||||||
gulp.task(task.define('electron-arm', task.series(util.rimraf('.build/electron'), getElectron('armv7l'))));
|
|
||||||
gulp.task(task.define('electron-arm64', task.series(util.rimraf('.build/electron'), getElectron('arm64'))));
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Compute checksums for some files.
|
* Compute checksums for some files.
|
||||||
*
|
*
|
||||||
@@ -265,10 +196,7 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
|||||||
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }))
|
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }))
|
||||||
.pipe(util.setExecutableBit(['**/*.sh']));
|
.pipe(util.setExecutableBit(['**/*.sh']));
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
const extensions = gulp.src(['.build/extensions/**', '!.build/extensions/node_modules/**'], { base: '.build', dot: true }); // {{SQL CARBON EDIT}} - don't package the node_modules directory
|
||||||
ext.packageBuiltInExtensions();
|
|
||||||
|
|
||||||
const extensions = gulp.src('.build/extensions/**', { base: '.build', dot: true });
|
|
||||||
|
|
||||||
const sources = es.merge(src, extensions)
|
const sources = es.merge(src, extensions)
|
||||||
.pipe(filter(['**', '!**/*.js.map'], { dot: true }));
|
.pipe(filter(['**', '!**/*.js.map'], { dot: true }));
|
||||||
@@ -468,7 +396,7 @@ gulp.task(task.define(
|
|||||||
optimizeVSCodeTask,
|
optimizeVSCodeTask,
|
||||||
function () {
|
function () {
|
||||||
const pathToMetadata = './out-vscode/nls.metadata.json';
|
const pathToMetadata = './out-vscode/nls.metadata.json';
|
||||||
const pathToExtensions = './extensions/*';
|
const pathToExtensions = '.build/extensions/*';
|
||||||
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
|
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
|
||||||
|
|
||||||
return es.merge(
|
return es.merge(
|
||||||
@@ -489,7 +417,7 @@ gulp.task(task.define(
|
|||||||
optimizeVSCodeTask,
|
optimizeVSCodeTask,
|
||||||
function () {
|
function () {
|
||||||
const pathToMetadata = './out-vscode/nls.metadata.json';
|
const pathToMetadata = './out-vscode/nls.metadata.json';
|
||||||
const pathToExtensions = './extensions/*';
|
const pathToExtensions = '.build/extensions/*';
|
||||||
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
|
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
|
||||||
|
|
||||||
return es.merge(
|
return es.merge(
|
||||||
|
|||||||
@@ -43,7 +43,8 @@ function prepareDebPackage(arch) {
|
|||||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||||
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
||||||
.pipe(replace('@@NAME@@', product.applicationName))
|
.pipe(replace('@@NAME@@', product.applicationName))
|
||||||
.pipe(replace('@@ICON@@', product.linuxIconName))
|
.pipe(replace('@@EXEC@@', `/usr/share/${product.applicationName}/${product.applicationName}`))
|
||||||
|
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
|
||||||
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
|
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
|
||||||
|
|
||||||
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
|
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
|
||||||
@@ -136,6 +137,7 @@ function prepareRpmPackage(arch) {
|
|||||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||||
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
||||||
.pipe(replace('@@NAME@@', product.applicationName))
|
.pipe(replace('@@NAME@@', product.applicationName))
|
||||||
|
.pipe(replace('@@EXEC@@', `/usr/share/${product.applicationName}/${product.applicationName}`))
|
||||||
.pipe(replace('@@ICON@@', product.linuxIconName))
|
.pipe(replace('@@ICON@@', product.linuxIconName))
|
||||||
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
|
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
|
||||||
|
|
||||||
@@ -206,21 +208,25 @@ function prepareSnapPackage(arch) {
|
|||||||
const destination = getSnapBuildPath(arch);
|
const destination = getSnapBuildPath(arch);
|
||||||
|
|
||||||
return function () {
|
return function () {
|
||||||
|
// A desktop file that is placed in snap/gui will be placed into meta/gui verbatim.
|
||||||
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
|
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
|
||||||
.pipe(rename(`usr/share/applications/${product.applicationName}.desktop`));
|
.pipe(rename(`snap/gui/${product.applicationName}.desktop`));
|
||||||
|
|
||||||
|
// A desktop file that is placed in snap/gui will be placed into meta/gui verbatim.
|
||||||
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
|
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
|
||||||
.pipe(rename(`usr/share/applications/${product.applicationName}-url-handler.desktop`));
|
.pipe(rename(`snap/gui/${product.applicationName}-url-handler.desktop`));
|
||||||
|
|
||||||
const desktops = es.merge(desktop, desktopUrlHandler)
|
const desktops = es.merge(desktop, desktopUrlHandler)
|
||||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||||
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
||||||
.pipe(replace('@@NAME@@', product.applicationName))
|
.pipe(replace('@@NAME@@', product.applicationName))
|
||||||
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
|
.pipe(replace('@@EXEC@@', `${product.applicationName} --force-user-env`))
|
||||||
|
.pipe(replace('@@ICON@@', `\${SNAP}/meta/gui/${product.linuxIconName}.png`))
|
||||||
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
|
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
|
||||||
|
|
||||||
|
// An icon that is placed in snap/gui will be placed into meta/gui verbatim.
|
||||||
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
||||||
.pipe(rename(`usr/share/pixmaps/${product.linuxIconName}.png`));
|
.pipe(rename(`snap/gui/${product.linuxIconName}.png`));
|
||||||
|
|
||||||
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
|
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
|
||||||
.pipe(rename(function (p) { p.dirname = `usr/share/${product.applicationName}/${p.dirname}`; }));
|
.pipe(rename(function (p) { p.dirname = `usr/share/${product.applicationName}/${p.dirname}`; }));
|
||||||
@@ -241,7 +247,8 @@ function prepareSnapPackage(arch) {
|
|||||||
|
|
||||||
function buildSnapPackage(arch) {
|
function buildSnapPackage(arch) {
|
||||||
const snapBuildPath = getSnapBuildPath(arch);
|
const snapBuildPath = getSnapBuildPath(arch);
|
||||||
return shell.task(`cd ${snapBuildPath} && snapcraft build`);
|
// Default target for snapcraft runs: pull, build, stage and prime, and finally assembles the snap.
|
||||||
|
return shell.task(`cd ${snapBuildPath} && snapcraft`);
|
||||||
}
|
}
|
||||||
|
|
||||||
const BUILD_TARGETS = [
|
const BUILD_TARGETS = [
|
||||||
|
|||||||
@@ -6,150 +6,11 @@
|
|||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
const gulp = require('gulp');
|
const gulp = require('gulp');
|
||||||
const path = require('path');
|
|
||||||
const es = require('event-stream');
|
|
||||||
const util = require('./lib/util');
|
|
||||||
const task = require('./lib/task');
|
|
||||||
const common = require('./lib/optimize');
|
|
||||||
const product = require('../product.json');
|
|
||||||
const rename = require('gulp-rename');
|
|
||||||
const filter = require('gulp-filter');
|
|
||||||
const json = require('gulp-json-editor');
|
|
||||||
const _ = require('underscore');
|
|
||||||
const deps = require('./dependencies');
|
|
||||||
const vfs = require('vinyl-fs');
|
|
||||||
const packageJson = require('../package.json');
|
|
||||||
const { compileBuildTask } = require('./gulpfile.compile');
|
|
||||||
|
|
||||||
const REPO_ROOT = path.dirname(__dirname);
|
const noop = () => { return Promise.resolve(); };
|
||||||
const commit = util.getVersion(REPO_ROOT);
|
|
||||||
const BUILD_ROOT = path.dirname(REPO_ROOT);
|
|
||||||
const WEB_FOLDER = path.join(REPO_ROOT, 'remote', 'web');
|
|
||||||
|
|
||||||
const productionDependencies = deps.getProductionDependencies(WEB_FOLDER);
|
gulp.task('minify-vscode-web', noop);
|
||||||
|
gulp.task('vscode-web', noop);
|
||||||
const nodeModules = Object.keys(product.dependencies || {})
|
gulp.task('vscode-web-min', noop);
|
||||||
.concat(_.uniq(productionDependencies.map(d => d.name)));
|
gulp.task('vscode-web-ci', noop);
|
||||||
|
gulp.task('vscode-web-min-ci', noop);
|
||||||
const vscodeWebResources = [
|
|
||||||
|
|
||||||
// Workbench
|
|
||||||
'out-build/vs/{base,platform,editor,workbench}/**/*.{svg,png,html}',
|
|
||||||
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
|
|
||||||
'out-build/vs/**/markdown.css',
|
|
||||||
|
|
||||||
// Webview
|
|
||||||
'out-build/vs/workbench/contrib/webview/browser/pre/*.js',
|
|
||||||
|
|
||||||
// Extension Worker
|
|
||||||
'out-build/vs/workbench/services/extensions/worker/extensionHostWorkerMain.js',
|
|
||||||
|
|
||||||
// Excludes
|
|
||||||
'!out-build/vs/**/{node,electron-browser,electron-main}/**',
|
|
||||||
'!out-build/vs/editor/standalone/**',
|
|
||||||
'!out-build/vs/workbench/**/*-tb.png',
|
|
||||||
'!**/test/**'
|
|
||||||
];
|
|
||||||
|
|
||||||
const buildfile = require('../src/buildfile');
|
|
||||||
|
|
||||||
const vscodeWebEntryPoints = [
|
|
||||||
buildfile.workbenchWeb,
|
|
||||||
buildfile.serviceWorker,
|
|
||||||
buildfile.workerExtensionHost,
|
|
||||||
buildfile.keyboardMaps,
|
|
||||||
buildfile.base
|
|
||||||
];
|
|
||||||
|
|
||||||
const optimizeVSCodeWebTask = task.define('optimize-vscode-web', task.series(
|
|
||||||
util.rimraf('out-vscode-web'),
|
|
||||||
common.optimizeTask({
|
|
||||||
src: 'out-build',
|
|
||||||
entryPoints: _.flatten(vscodeWebEntryPoints),
|
|
||||||
otherSources: [],
|
|
||||||
resources: vscodeWebResources,
|
|
||||||
loaderConfig: common.loaderConfig(nodeModules),
|
|
||||||
out: 'out-vscode-web',
|
|
||||||
bundleInfo: undefined
|
|
||||||
})
|
|
||||||
));
|
|
||||||
|
|
||||||
const minifyVSCodeWebTask = task.define('minify-vscode-web', task.series(
|
|
||||||
optimizeVSCodeWebTask,
|
|
||||||
util.rimraf('out-vscode-web-min'),
|
|
||||||
common.minifyTask('out-vscode-web', `https://ticino.blob.core.windows.net/sourcemaps/${commit}/core`)
|
|
||||||
));
|
|
||||||
gulp.task(minifyVSCodeWebTask);
|
|
||||||
|
|
||||||
function packageTask(sourceFolderName, destinationFolderName) {
|
|
||||||
const destination = path.join(BUILD_ROOT, destinationFolderName);
|
|
||||||
|
|
||||||
return () => {
|
|
||||||
const src = gulp.src(sourceFolderName + '/**', { base: '.' })
|
|
||||||
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + sourceFolderName), 'out'); }))
|
|
||||||
.pipe(filter(['**', '!**/*.js.map']));
|
|
||||||
|
|
||||||
const sources = es.merge(src);
|
|
||||||
|
|
||||||
let version = packageJson.version;
|
|
||||||
const quality = product.quality;
|
|
||||||
|
|
||||||
if (quality && quality !== 'stable') {
|
|
||||||
version += '-' + quality;
|
|
||||||
}
|
|
||||||
|
|
||||||
const name = product.nameShort;
|
|
||||||
const packageJsonStream = gulp.src(['remote/web/package.json'], { base: 'remote/web' })
|
|
||||||
.pipe(json({ name, version }));
|
|
||||||
|
|
||||||
const date = new Date().toISOString();
|
|
||||||
|
|
||||||
const productJsonStream = gulp.src(['product.json'], { base: '.' })
|
|
||||||
.pipe(json({ commit, date }));
|
|
||||||
|
|
||||||
const license = gulp.src(['remote/LICENSE'], { base: 'remote' });
|
|
||||||
|
|
||||||
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(REPO_ROOT, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`, `!${d}/.bin/**`]));
|
|
||||||
|
|
||||||
const deps = gulp.src(dependenciesSrc, { base: 'remote/web', dot: true })
|
|
||||||
.pipe(filter(['**', '!**/package-lock.json']))
|
|
||||||
.pipe(util.cleanNodeModules(path.join(__dirname, '.nativeignore')));
|
|
||||||
|
|
||||||
const favicon = gulp.src('resources/server/favicon.ico', { base: 'resources/server' });
|
|
||||||
|
|
||||||
let all = es.merge(
|
|
||||||
packageJsonStream,
|
|
||||||
productJsonStream,
|
|
||||||
license,
|
|
||||||
sources,
|
|
||||||
deps,
|
|
||||||
favicon
|
|
||||||
);
|
|
||||||
|
|
||||||
let result = all
|
|
||||||
.pipe(util.skipDirectories())
|
|
||||||
.pipe(util.fixWin32DirectoryPermissions());
|
|
||||||
|
|
||||||
return result.pipe(vfs.dest(destination));
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const dashed = (str) => (str ? `-${str}` : ``);
|
|
||||||
|
|
||||||
['', 'min'].forEach(minified => {
|
|
||||||
const sourceFolderName = `out-vscode-web${dashed(minified)}`;
|
|
||||||
const destinationFolderName = `vscode-web`;
|
|
||||||
|
|
||||||
const vscodeWebTaskCI = task.define(`vscode-web${dashed(minified)}-ci`, task.series(
|
|
||||||
minified ? minifyVSCodeWebTask : optimizeVSCodeWebTask,
|
|
||||||
util.rimraf(path.join(BUILD_ROOT, destinationFolderName)),
|
|
||||||
packageTask(sourceFolderName, destinationFolderName)
|
|
||||||
));
|
|
||||||
gulp.task(vscodeWebTaskCI);
|
|
||||||
|
|
||||||
const vscodeWebTask = task.define(`vscode-web${dashed(minified)}`, task.series(
|
|
||||||
compileBuildTask,
|
|
||||||
vscodeWebTaskCI
|
|
||||||
));
|
|
||||||
gulp.task(vscodeWebTask);
|
|
||||||
});
|
|
||||||
|
|||||||
@@ -23,7 +23,7 @@ const repoPath = path.dirname(__dirname);
|
|||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
const buildPath = arch => path.join(path.dirname(repoPath), `azuredatastudio-win32-${arch}`);
|
const buildPath = arch => path.join(path.dirname(repoPath), `azuredatastudio-win32-${arch}`);
|
||||||
const zipDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'archive');
|
const zipDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'archive');
|
||||||
const zipPath = arch => path.join(zipDir(arch), `VSCode-win32-${arch}.zip`);
|
const zipPath = arch => path.join(zipDir(arch), `azuredatastudio-win32-${arch}.zip`);
|
||||||
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
|
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
|
||||||
const issPath = path.join(__dirname, 'win32', 'code.iss');
|
const issPath = path.join(__dirname, 'win32', 'code.iss');
|
||||||
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup'))), 'bin', 'ISCC.exe');
|
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup'))), 'bin', 'ISCC.exe');
|
||||||
|
|||||||
@@ -19,9 +19,17 @@ const ansiColors = require('ansi-colors');
|
|||||||
|
|
||||||
const root = path.dirname(path.dirname(__dirname));
|
const root = path.dirname(path.dirname(__dirname));
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
const builtInExtensions = require('../builtInExtensions-insiders.json');
|
const quality = process.env['VSCODE_QUALITY'];
|
||||||
|
const builtInExtensions = quality && quality === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
|
||||||
// {{SQL CARBON EDIT}} - END
|
// {{SQL CARBON EDIT}} - END
|
||||||
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
||||||
|
const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE'];
|
||||||
|
|
||||||
|
function log() {
|
||||||
|
if (ENABLE_LOGGING) {
|
||||||
|
fancyLog.apply(this, arguments);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
function getExtensionPath(extension) {
|
function getExtensionPath(extension) {
|
||||||
return path.join(root, '.build', 'builtInExtensions', extension.name);
|
return path.join(root, '.build', 'builtInExtensions', extension.name);
|
||||||
@@ -46,7 +54,7 @@ function isUpToDate(extension) {
|
|||||||
|
|
||||||
function syncMarketplaceExtension(extension) {
|
function syncMarketplaceExtension(extension) {
|
||||||
if (isUpToDate(extension)) {
|
if (isUpToDate(extension)) {
|
||||||
fancyLog(ansiColors.blue('[marketplace]'), `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));
|
log(ansiColors.blue('[marketplace]'), `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));
|
||||||
return es.readArray([]);
|
return es.readArray([]);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -55,13 +63,13 @@ function syncMarketplaceExtension(extension) {
|
|||||||
return ext.fromMarketplace(extension.name, extension.version, extension.metadata)
|
return ext.fromMarketplace(extension.name, extension.version, extension.metadata)
|
||||||
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
|
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
|
||||||
.pipe(vfs.dest('.build/builtInExtensions'))
|
.pipe(vfs.dest('.build/builtInExtensions'))
|
||||||
.on('end', () => fancyLog(ansiColors.blue('[marketplace]'), extension.name, ansiColors.green('✔︎')));
|
.on('end', () => log(ansiColors.blue('[marketplace]'), extension.name, ansiColors.green('✔︎')));
|
||||||
}
|
}
|
||||||
|
|
||||||
function syncExtension(extension, controlState) {
|
function syncExtension(extension, controlState) {
|
||||||
switch (controlState) {
|
switch (controlState) {
|
||||||
case 'disabled':
|
case 'disabled':
|
||||||
fancyLog(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
|
log(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
|
||||||
return es.readArray([]);
|
return es.readArray([]);
|
||||||
|
|
||||||
case 'marketplace':
|
case 'marketplace':
|
||||||
@@ -69,15 +77,15 @@ function syncExtension(extension, controlState) {
|
|||||||
|
|
||||||
default:
|
default:
|
||||||
if (!fs.existsSync(controlState)) {
|
if (!fs.existsSync(controlState)) {
|
||||||
fancyLog(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
||||||
return es.readArray([]);
|
return es.readArray([]);
|
||||||
|
|
||||||
} else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
|
} else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
|
||||||
fancyLog(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
||||||
return es.readArray([]);
|
return es.readArray([]);
|
||||||
}
|
}
|
||||||
|
|
||||||
fancyLog(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎'));
|
log(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎'));
|
||||||
return es.readArray([]);
|
return es.readArray([]);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -96,8 +104,8 @@ function writeControlFile(control) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function main() {
|
function main() {
|
||||||
fancyLog('Syncronizing built-in extensions...');
|
log('Syncronizing built-in extensions...');
|
||||||
fancyLog(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
|
log(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
|
||||||
|
|
||||||
const control = readControlFile();
|
const control = readControlFile();
|
||||||
const streams = [];
|
const streams = [];
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ const bom = require("gulp-bom");
|
|||||||
const sourcemaps = require("gulp-sourcemaps");
|
const sourcemaps = require("gulp-sourcemaps");
|
||||||
const tsb = require("gulp-tsb");
|
const tsb = require("gulp-tsb");
|
||||||
const path = require("path");
|
const path = require("path");
|
||||||
const _ = require("underscore");
|
|
||||||
const monacodts = require("../monaco/api");
|
const monacodts = require("../monaco/api");
|
||||||
const nls = require("./nls");
|
const nls = require("./nls");
|
||||||
const reporter_1 = require("./reporter");
|
const reporter_1 = require("./reporter");
|
||||||
@@ -22,14 +21,7 @@ const watch = require('./watch');
|
|||||||
const reporter = reporter_1.createReporter();
|
const reporter = reporter_1.createReporter();
|
||||||
function getTypeScriptCompilerOptions(src) {
|
function getTypeScriptCompilerOptions(src) {
|
||||||
const rootDir = path.join(__dirname, `../../${src}`);
|
const rootDir = path.join(__dirname, `../../${src}`);
|
||||||
const tsconfig = require(`../../${src}/tsconfig.json`);
|
let options = {};
|
||||||
let options;
|
|
||||||
if (tsconfig.extends) {
|
|
||||||
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
options = tsconfig.compilerOptions;
|
|
||||||
}
|
|
||||||
options.verbose = false;
|
options.verbose = false;
|
||||||
options.sourceMap = true;
|
options.sourceMap = true;
|
||||||
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
||||||
@@ -38,49 +30,46 @@ function getTypeScriptCompilerOptions(src) {
|
|||||||
options.rootDir = rootDir;
|
options.rootDir = rootDir;
|
||||||
options.baseUrl = rootDir;
|
options.baseUrl = rootDir;
|
||||||
options.sourceRoot = util.toFileUri(rootDir);
|
options.sourceRoot = util.toFileUri(rootDir);
|
||||||
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
|
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1;
|
||||||
return options;
|
return options;
|
||||||
}
|
}
|
||||||
function createCompile(src, build, emitError) {
|
function createCompile(src, build, emitError) {
|
||||||
const opts = _.clone(getTypeScriptCompilerOptions(src));
|
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
|
||||||
opts.inlineSources = !!build;
|
const overrideOptions = Object.assign(Object.assign({}, getTypeScriptCompilerOptions(src)), { inlineSources: Boolean(build) });
|
||||||
opts.noFilesystemLookup = true;
|
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
|
||||||
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
|
function pipeline(token) {
|
||||||
return function (token) {
|
|
||||||
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
|
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
|
||||||
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
|
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
|
||||||
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path)));
|
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path)));
|
||||||
const input = es.through();
|
const input = es.through();
|
||||||
const output = input
|
const output = input
|
||||||
.pipe(utf8Filter)
|
.pipe(utf8Filter)
|
||||||
.pipe(bom())
|
.pipe(bom()) // this is required to preserve BOM in test files that loose it otherwise
|
||||||
.pipe(utf8Filter.restore)
|
.pipe(utf8Filter.restore)
|
||||||
.pipe(tsFilter)
|
.pipe(tsFilter)
|
||||||
.pipe(util.loadSourcemaps())
|
.pipe(util.loadSourcemaps())
|
||||||
.pipe(ts(token))
|
.pipe(compilation(token))
|
||||||
.pipe(noDeclarationsFilter)
|
.pipe(noDeclarationsFilter)
|
||||||
.pipe(build ? nls() : es.through())
|
.pipe(build ? nls() : es.through())
|
||||||
.pipe(noDeclarationsFilter.restore)
|
.pipe(noDeclarationsFilter.restore)
|
||||||
.pipe(sourcemaps.write('.', {
|
.pipe(sourcemaps.write('.', {
|
||||||
addComment: false,
|
addComment: false,
|
||||||
includeContent: !!build,
|
includeContent: !!build,
|
||||||
sourceRoot: opts.sourceRoot
|
sourceRoot: overrideOptions.sourceRoot
|
||||||
}))
|
}))
|
||||||
.pipe(tsFilter.restore)
|
.pipe(tsFilter.restore)
|
||||||
.pipe(reporter.end(!!emitError));
|
.pipe(reporter.end(!!emitError));
|
||||||
return es.duplex(input, output);
|
return es.duplex(input, output);
|
||||||
|
}
|
||||||
|
pipeline.tsProjectSrc = () => {
|
||||||
|
return compilation.src({ base: src });
|
||||||
};
|
};
|
||||||
|
return pipeline;
|
||||||
}
|
}
|
||||||
const typesDts = [
|
|
||||||
'node_modules/typescript/lib/*.d.ts',
|
|
||||||
'node_modules/@types/**/*.d.ts',
|
|
||||||
'!node_modules/@types/webpack/**/*',
|
|
||||||
'!node_modules/@types/uglify-js/**/*',
|
|
||||||
];
|
|
||||||
function compileTask(src, out, build) {
|
function compileTask(src, out, build) {
|
||||||
return function () {
|
return function () {
|
||||||
const compile = createCompile(src, build, true);
|
const compile = createCompile(src, build, true);
|
||||||
const srcPipe = es.merge(gulp.src(`${src}/**`, { base: `${src}` }), gulp.src(typesDts));
|
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
|
||||||
let generator = new MonacoGenerator(false);
|
let generator = new MonacoGenerator(false);
|
||||||
if (src === 'src') {
|
if (src === 'src') {
|
||||||
generator.execute();
|
generator.execute();
|
||||||
@@ -95,8 +84,8 @@ exports.compileTask = compileTask;
|
|||||||
function watchTask(out, build) {
|
function watchTask(out, build) {
|
||||||
return function () {
|
return function () {
|
||||||
const compile = createCompile('src', build);
|
const compile = createCompile('src', build);
|
||||||
const src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src(typesDts));
|
const src = gulp.src('src/**', { base: 'src' });
|
||||||
const watchSrc = watch('src/**', { base: 'src' });
|
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
|
||||||
let generator = new MonacoGenerator(true);
|
let generator = new MonacoGenerator(true);
|
||||||
generator.execute();
|
generator.execute();
|
||||||
return watchSrc
|
return watchSrc
|
||||||
|
|||||||
@@ -12,27 +12,21 @@ import * as bom from 'gulp-bom';
|
|||||||
import * as sourcemaps from 'gulp-sourcemaps';
|
import * as sourcemaps from 'gulp-sourcemaps';
|
||||||
import * as tsb from 'gulp-tsb';
|
import * as tsb from 'gulp-tsb';
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import * as _ from 'underscore';
|
|
||||||
import * as monacodts from '../monaco/api';
|
import * as monacodts from '../monaco/api';
|
||||||
import * as nls from './nls';
|
import * as nls from './nls';
|
||||||
import { createReporter } from './reporter';
|
import { createReporter } from './reporter';
|
||||||
import * as util from './util';
|
import * as util from './util';
|
||||||
import * as fancyLog from 'fancy-log';
|
import * as fancyLog from 'fancy-log';
|
||||||
import * as ansiColors from 'ansi-colors';
|
import * as ansiColors from 'ansi-colors';
|
||||||
|
import ts = require('typescript');
|
||||||
|
|
||||||
const watch = require('./watch');
|
const watch = require('./watch');
|
||||||
|
|
||||||
const reporter = createReporter();
|
const reporter = createReporter();
|
||||||
|
|
||||||
function getTypeScriptCompilerOptions(src: string) {
|
function getTypeScriptCompilerOptions(src: string): ts.CompilerOptions {
|
||||||
const rootDir = path.join(__dirname, `../../${src}`);
|
const rootDir = path.join(__dirname, `../../${src}`);
|
||||||
const tsconfig = require(`../../${src}/tsconfig.json`);
|
let options: ts.CompilerOptions = {};
|
||||||
let options: { [key: string]: any };
|
|
||||||
if (tsconfig.extends) {
|
|
||||||
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
|
|
||||||
} else {
|
|
||||||
options = tsconfig.compilerOptions;
|
|
||||||
}
|
|
||||||
options.verbose = false;
|
options.verbose = false;
|
||||||
options.sourceMap = true;
|
options.sourceMap = true;
|
||||||
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
||||||
@@ -41,18 +35,17 @@ function getTypeScriptCompilerOptions(src: string) {
|
|||||||
options.rootDir = rootDir;
|
options.rootDir = rootDir;
|
||||||
options.baseUrl = rootDir;
|
options.baseUrl = rootDir;
|
||||||
options.sourceRoot = util.toFileUri(rootDir);
|
options.sourceRoot = util.toFileUri(rootDir);
|
||||||
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
|
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1;
|
||||||
return options;
|
return options;
|
||||||
}
|
}
|
||||||
|
|
||||||
function createCompile(src: string, build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream {
|
function createCompile(src: string, build: boolean, emitError?: boolean) {
|
||||||
const opts = _.clone(getTypeScriptCompilerOptions(src));
|
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
|
||||||
opts.inlineSources = !!build;
|
const overrideOptions = { ...getTypeScriptCompilerOptions(src), inlineSources: Boolean(build) };
|
||||||
opts.noFilesystemLookup = true;
|
|
||||||
|
|
||||||
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
|
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
|
||||||
|
|
||||||
return function (token?: util.ICancellationToken) {
|
function pipeline(token?: util.ICancellationToken) {
|
||||||
|
|
||||||
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
|
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
|
||||||
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
|
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
|
||||||
@@ -61,43 +54,35 @@ function createCompile(src: string, build: boolean, emitError?: boolean): (token
|
|||||||
const input = es.through();
|
const input = es.through();
|
||||||
const output = input
|
const output = input
|
||||||
.pipe(utf8Filter)
|
.pipe(utf8Filter)
|
||||||
.pipe(bom())
|
.pipe(bom()) // this is required to preserve BOM in test files that loose it otherwise
|
||||||
.pipe(utf8Filter.restore)
|
.pipe(utf8Filter.restore)
|
||||||
.pipe(tsFilter)
|
.pipe(tsFilter)
|
||||||
.pipe(util.loadSourcemaps())
|
.pipe(util.loadSourcemaps())
|
||||||
.pipe(ts(token))
|
.pipe(compilation(token))
|
||||||
.pipe(noDeclarationsFilter)
|
.pipe(noDeclarationsFilter)
|
||||||
.pipe(build ? nls() : es.through())
|
.pipe(build ? nls() : es.through())
|
||||||
.pipe(noDeclarationsFilter.restore)
|
.pipe(noDeclarationsFilter.restore)
|
||||||
.pipe(sourcemaps.write('.', {
|
.pipe(sourcemaps.write('.', {
|
||||||
addComment: false,
|
addComment: false,
|
||||||
includeContent: !!build,
|
includeContent: !!build,
|
||||||
sourceRoot: opts.sourceRoot
|
sourceRoot: overrideOptions.sourceRoot
|
||||||
}))
|
}))
|
||||||
.pipe(tsFilter.restore)
|
.pipe(tsFilter.restore)
|
||||||
.pipe(reporter.end(!!emitError));
|
.pipe(reporter.end(!!emitError));
|
||||||
|
|
||||||
return es.duplex(input, output);
|
return es.duplex(input, output);
|
||||||
|
}
|
||||||
|
pipeline.tsProjectSrc = () => {
|
||||||
|
return compilation.src({ base: src });
|
||||||
};
|
};
|
||||||
|
return pipeline;
|
||||||
}
|
}
|
||||||
|
|
||||||
const typesDts = [
|
|
||||||
'node_modules/typescript/lib/*.d.ts',
|
|
||||||
'node_modules/@types/**/*.d.ts',
|
|
||||||
'!node_modules/@types/webpack/**/*',
|
|
||||||
'!node_modules/@types/uglify-js/**/*',
|
|
||||||
];
|
|
||||||
|
|
||||||
export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream {
|
export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream {
|
||||||
|
|
||||||
return function () {
|
return function () {
|
||||||
const compile = createCompile(src, build, true);
|
const compile = createCompile(src, build, true);
|
||||||
|
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
|
||||||
const srcPipe = es.merge(
|
|
||||||
gulp.src(`${src}/**`, { base: `${src}` }),
|
|
||||||
gulp.src(typesDts),
|
|
||||||
);
|
|
||||||
|
|
||||||
let generator = new MonacoGenerator(false);
|
let generator = new MonacoGenerator(false);
|
||||||
if (src === 'src') {
|
if (src === 'src') {
|
||||||
generator.execute();
|
generator.execute();
|
||||||
@@ -115,11 +100,8 @@ export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteSt
|
|||||||
return function () {
|
return function () {
|
||||||
const compile = createCompile('src', build);
|
const compile = createCompile('src', build);
|
||||||
|
|
||||||
const src = es.merge(
|
const src = gulp.src('src/**', { base: 'src' });
|
||||||
gulp.src('src/**', { base: 'src' }),
|
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
|
||||||
gulp.src(typesDts),
|
|
||||||
);
|
|
||||||
const watchSrc = watch('src/**', { base: 'src' });
|
|
||||||
|
|
||||||
let generator = new MonacoGenerator(true);
|
let generator = new MonacoGenerator(true);
|
||||||
generator.execute();
|
generator.execute();
|
||||||
|
|||||||
@@ -2,29 +2,88 @@
|
|||||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
*--------------------------------------------------------------------------------------------*/
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
'use strict';
|
'use strict';
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const fs = require('fs');
|
const fs = require("fs");
|
||||||
const path = require('path');
|
const path = require("path");
|
||||||
|
const vfs = require("vinyl-fs");
|
||||||
|
const filter = require("gulp-filter");
|
||||||
|
const json = require("gulp-json-editor");
|
||||||
|
const _ = require("underscore");
|
||||||
|
const util = require("./util");
|
||||||
|
const electron = require('gulp-atom-electron');
|
||||||
const root = path.dirname(path.dirname(__dirname));
|
const root = path.dirname(path.dirname(__dirname));
|
||||||
|
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
|
||||||
|
const commit = util.getVersion(root);
|
||||||
function getElectronVersion() {
|
function getElectronVersion() {
|
||||||
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
|
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
|
||||||
// @ts-ignore
|
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
|
||||||
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
|
return target;
|
||||||
|
}
|
||||||
return target;
|
exports.getElectronVersion = getElectronVersion;
|
||||||
|
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
|
||||||
|
function darwinBundleDocumentType(extensions, icon) {
|
||||||
|
return {
|
||||||
|
name: product.nameLong + ' document',
|
||||||
|
role: 'Editor',
|
||||||
|
ostypes: ["TEXT", "utxt", "TUTX", "****"],
|
||||||
|
extensions: extensions,
|
||||||
|
iconFile: icon
|
||||||
|
};
|
||||||
|
}
|
||||||
|
exports.config = {
|
||||||
|
version: getElectronVersion(),
|
||||||
|
productAppName: product.nameLong,
|
||||||
|
companyName: 'Microsoft Corporation',
|
||||||
|
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
|
||||||
|
darwinIcon: 'resources/darwin/code.icns',
|
||||||
|
darwinBundleIdentifier: product.darwinBundleIdentifier,
|
||||||
|
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
||||||
|
darwinHelpBookFolder: 'VS Code HelpBook',
|
||||||
|
darwinHelpBookName: 'VS Code HelpBook',
|
||||||
|
darwinBundleDocumentTypes: [
|
||||||
|
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
|
||||||
|
],
|
||||||
|
darwinBundleURLTypes: [{
|
||||||
|
role: 'Viewer',
|
||||||
|
name: product.nameLong,
|
||||||
|
urlSchemes: [product.urlProtocol]
|
||||||
|
}],
|
||||||
|
darwinForceDarkModeSupport: true,
|
||||||
|
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
|
||||||
|
linuxExecutableName: product.applicationName,
|
||||||
|
winIcon: 'resources/win32/code.ico',
|
||||||
|
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
|
||||||
|
repo: product.electronRepository || undefined
|
||||||
|
};
|
||||||
|
function getElectron(arch) {
|
||||||
|
return () => {
|
||||||
|
const electronOpts = _.extend({}, exports.config, {
|
||||||
|
platform: process.platform,
|
||||||
|
arch,
|
||||||
|
ffmpegChromium: true,
|
||||||
|
keepDefaultApp: true
|
||||||
|
});
|
||||||
|
return vfs.src('package.json')
|
||||||
|
.pipe(json({ name: product.nameShort }))
|
||||||
|
.pipe(electron(electronOpts))
|
||||||
|
.pipe(filter(['**', '!**/app/package.json']))
|
||||||
|
.pipe(vfs.dest('.build/electron'));
|
||||||
|
};
|
||||||
|
}
|
||||||
|
async function main(arch = process.arch) {
|
||||||
|
const version = getElectronVersion();
|
||||||
|
const electronPath = path.join(root, '.build', 'electron');
|
||||||
|
const versionFile = path.join(electronPath, 'version');
|
||||||
|
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
|
||||||
|
if (!isUpToDate) {
|
||||||
|
await util.rimraf(electronPath)();
|
||||||
|
await util.streamToPromise(getElectron(arch)());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports.getElectronVersion = getElectronVersion;
|
|
||||||
|
|
||||||
// returns 0 if the right version of electron is in .build/electron
|
|
||||||
// @ts-ignore
|
|
||||||
if (require.main === module) {
|
if (require.main === module) {
|
||||||
const version = getElectronVersion();
|
main(process.argv[2]).catch(err => {
|
||||||
const versionFile = path.join(root, '.build', 'electron', 'version');
|
console.error(err);
|
||||||
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
|
process.exit(1);
|
||||||
|
});
|
||||||
process.exit(isUpToDate ? 0 : 1);
|
|
||||||
}
|
}
|
||||||
|
|||||||
100
build/lib/electron.ts
Normal file
100
build/lib/electron.ts
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import * as path from 'path';
|
||||||
|
import * as vfs from 'vinyl-fs';
|
||||||
|
import * as filter from 'gulp-filter';
|
||||||
|
import * as json from 'gulp-json-editor';
|
||||||
|
import * as _ from 'underscore';
|
||||||
|
import * as util from './util';
|
||||||
|
|
||||||
|
const electron = require('gulp-atom-electron');
|
||||||
|
|
||||||
|
const root = path.dirname(path.dirname(__dirname));
|
||||||
|
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
|
||||||
|
const commit = util.getVersion(root);
|
||||||
|
|
||||||
|
export function getElectronVersion(): string {
|
||||||
|
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
|
||||||
|
const target = /^target "(.*)"$/m.exec(yarnrc)![1];
|
||||||
|
return target;
|
||||||
|
}
|
||||||
|
|
||||||
|
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
|
||||||
|
|
||||||
|
function darwinBundleDocumentType(extensions: string[], icon: string) {
|
||||||
|
return {
|
||||||
|
name: product.nameLong + ' document',
|
||||||
|
role: 'Editor',
|
||||||
|
ostypes: ["TEXT", "utxt", "TUTX", "****"],
|
||||||
|
extensions: extensions,
|
||||||
|
iconFile: icon
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export const config = {
|
||||||
|
version: getElectronVersion(),
|
||||||
|
productAppName: product.nameLong,
|
||||||
|
companyName: 'Microsoft Corporation',
|
||||||
|
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
|
||||||
|
darwinIcon: 'resources/darwin/code.icns',
|
||||||
|
darwinBundleIdentifier: product.darwinBundleIdentifier,
|
||||||
|
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
||||||
|
darwinHelpBookFolder: 'VS Code HelpBook',
|
||||||
|
darwinHelpBookName: 'VS Code HelpBook',
|
||||||
|
darwinBundleDocumentTypes: [
|
||||||
|
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
|
||||||
|
],
|
||||||
|
darwinBundleURLTypes: [{
|
||||||
|
role: 'Viewer',
|
||||||
|
name: product.nameLong,
|
||||||
|
urlSchemes: [product.urlProtocol]
|
||||||
|
}],
|
||||||
|
darwinForceDarkModeSupport: true,
|
||||||
|
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
|
||||||
|
linuxExecutableName: product.applicationName,
|
||||||
|
winIcon: 'resources/win32/code.ico',
|
||||||
|
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
|
||||||
|
repo: product.electronRepository || undefined
|
||||||
|
};
|
||||||
|
|
||||||
|
function getElectron(arch: string): () => NodeJS.ReadWriteStream {
|
||||||
|
return () => {
|
||||||
|
const electronOpts = _.extend({}, config, {
|
||||||
|
platform: process.platform,
|
||||||
|
arch,
|
||||||
|
ffmpegChromium: true,
|
||||||
|
keepDefaultApp: true
|
||||||
|
});
|
||||||
|
|
||||||
|
return vfs.src('package.json')
|
||||||
|
.pipe(json({ name: product.nameShort }))
|
||||||
|
.pipe(electron(electronOpts))
|
||||||
|
.pipe(filter(['**', '!**/app/package.json']))
|
||||||
|
.pipe(vfs.dest('.build/electron'));
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main(arch = process.arch): Promise<void> {
|
||||||
|
const version = getElectronVersion();
|
||||||
|
const electronPath = path.join(root, '.build', 'electron');
|
||||||
|
const versionFile = path.join(electronPath, 'version');
|
||||||
|
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
|
||||||
|
|
||||||
|
if (!isUpToDate) {
|
||||||
|
await util.rimraf(electronPath)();
|
||||||
|
await util.streamToPromise(getElectron(arch)());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (require.main === module) {
|
||||||
|
main(process.argv[2]).catch(err => {
|
||||||
|
console.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
|
}
|
||||||
@@ -101,7 +101,7 @@ function fromLocalWebpack(extensionPath) {
|
|||||||
result.emit('error', compilation.warnings.join('\n'));
|
result.emit('error', compilation.warnings.join('\n'));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
const webpackConfig = Object.assign({}, require(webpackConfigPath), { mode: 'production' });
|
const webpackConfig = Object.assign(Object.assign({}, require(webpackConfigPath)), { mode: 'production' });
|
||||||
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
|
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
|
||||||
return webpackGulp(webpackConfig, webpack, webpackDone)
|
return webpackGulp(webpackConfig, webpack, webpackDone)
|
||||||
.pipe(es.through(function (data) {
|
.pipe(es.through(function (data) {
|
||||||
@@ -189,9 +189,11 @@ const excludedExtensions = [
|
|||||||
'integration-tests'
|
'integration-tests'
|
||||||
];
|
];
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
const sqlBuiltInExtensions = [
|
const externalExtensions = [
|
||||||
// Add SQL built-in extensions here.
|
// This is the list of SQL extensions which the source code is included in this repository, but
|
||||||
// the extension will be excluded from SQLOps package and will have separate vsix packages
|
// they get packaged separately. Adding extension name here, will make the build to create
|
||||||
|
// a separate vsix package for the extension and the extension will be excluded from the main package.
|
||||||
|
// Any extension not included here will be installed by default.
|
||||||
'admin-tool-ext-win',
|
'admin-tool-ext-win',
|
||||||
'agent',
|
'agent',
|
||||||
'import',
|
'import',
|
||||||
@@ -199,13 +201,16 @@ const sqlBuiltInExtensions = [
|
|||||||
'admin-pack',
|
'admin-pack',
|
||||||
'dacpac',
|
'dacpac',
|
||||||
'schema-compare',
|
'schema-compare',
|
||||||
'cms'
|
'cms',
|
||||||
|
'query-history',
|
||||||
|
'liveshare',
|
||||||
|
'sql-database-projects'
|
||||||
|
];
|
||||||
|
// extensions that require a rebuild since they have native parts
|
||||||
|
const rebuildExtensions = [
|
||||||
|
'big-data-cluster',
|
||||||
|
'mssql'
|
||||||
];
|
];
|
||||||
// make resource deployment and BDC extension only available in insiders
|
|
||||||
if (process.env['VSCODE_QUALITY'] === 'stable') {
|
|
||||||
sqlBuiltInExtensions.push('resource-deployment');
|
|
||||||
sqlBuiltInExtensions.push('big-data-cluster');
|
|
||||||
}
|
|
||||||
const builtInExtensions = process.env['VSCODE_QUALITY'] === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
|
const builtInExtensions = process.env['VSCODE_QUALITY'] === 'stable' ? require('../builtInExtensions.json') : require('../builtInExtensions-insiders.json');
|
||||||
// {{SQL CARBON EDIT}} - End
|
// {{SQL CARBON EDIT}} - End
|
||||||
function packageLocalExtensionsStream() {
|
function packageLocalExtensionsStream() {
|
||||||
@@ -217,7 +222,7 @@ function packageLocalExtensionsStream() {
|
|||||||
})
|
})
|
||||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1); // {{SQL CARBON EDIT}} add aditional filter
|
.filter(({ name }) => externalExtensions.indexOf(name) === -1); // {{SQL CARBON EDIT}} Remove external Extensions with separate package
|
||||||
const nodeModules = gulp.src('extensions/node_modules/**', { base: '.' });
|
const nodeModules = gulp.src('extensions/node_modules/**', { base: '.' });
|
||||||
const localExtensions = localExtensionDescriptions.map(extension => {
|
const localExtensions = localExtensionDescriptions.map(extension => {
|
||||||
return fromLocal(extension.path)
|
return fromLocal(extension.path)
|
||||||
@@ -236,66 +241,40 @@ function packageMarketplaceExtensionsStream() {
|
|||||||
.pipe(util2.setExecutableBit(['**/*.sh']));
|
.pipe(util2.setExecutableBit(['**/*.sh']));
|
||||||
}
|
}
|
||||||
exports.packageMarketplaceExtensionsStream = packageMarketplaceExtensionsStream;
|
exports.packageMarketplaceExtensionsStream = packageMarketplaceExtensionsStream;
|
||||||
const vfs = require("vinyl-fs");
|
function packageExternalExtensionsStream() {
|
||||||
function packageBuiltInExtensions() {
|
const extenalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||||
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
|
||||||
.map(manifestPath => {
|
.map(manifestPath => {
|
||||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||||
const extensionName = path.basename(extensionPath);
|
const extensionName = path.basename(extensionPath);
|
||||||
return { name: extensionName, path: extensionPath };
|
return { name: extensionName, path: extensionPath };
|
||||||
})
|
})
|
||||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
.filter(({ name }) => externalExtensions.indexOf(name) >= 0);
|
||||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
const builtExtensions = extenalExtensionDescriptions.map(extension => {
|
||||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
|
return fromLocal(extension.path)
|
||||||
const visxDirectory = path.join(path.dirname(root), 'vsix');
|
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||||
try {
|
|
||||||
if (!fs.existsSync(visxDirectory)) {
|
|
||||||
fs.mkdirSync(visxDirectory);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (err) {
|
|
||||||
// don't fail the build if the output directory already exists
|
|
||||||
console.warn(err);
|
|
||||||
}
|
|
||||||
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
|
|
||||||
let pkgJson = JSON.parse(fs.readFileSync(path.join(element.path, 'package.json'), { encoding: 'utf8' }));
|
|
||||||
const packagePath = path.join(visxDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
|
|
||||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
|
||||||
vsce.createVSIX({
|
|
||||||
cwd: element.path,
|
|
||||||
packagePath: packagePath,
|
|
||||||
useYarn: true
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
return es.merge(builtExtensions);
|
||||||
}
|
}
|
||||||
exports.packageBuiltInExtensions = packageBuiltInExtensions;
|
exports.packageExternalExtensionsStream = packageExternalExtensionsStream;
|
||||||
function packageExtensionTask(extensionName, platform, arch) {
|
|
||||||
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
|
||||||
if (platform === 'darwin') {
|
|
||||||
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
|
|
||||||
}
|
|
||||||
platform = platform || process.platform;
|
|
||||||
return () => {
|
|
||||||
const root = path.resolve(path.join(__dirname, '../..'));
|
|
||||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
|
||||||
.map(manifestPath => {
|
|
||||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
|
||||||
const extensionName = path.basename(extensionPath);
|
|
||||||
return { name: extensionName, path: extensionPath };
|
|
||||||
})
|
|
||||||
.filter(({ name }) => extensionName === name);
|
|
||||||
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
|
||||||
return fromLocal(extension.path);
|
|
||||||
}));
|
|
||||||
let result = localExtensions
|
|
||||||
.pipe(util2.skipDirectories())
|
|
||||||
.pipe(util2.fixWin32DirectoryPermissions())
|
|
||||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
|
|
||||||
return result.pipe(vfs.dest(destination));
|
|
||||||
};
|
|
||||||
}
|
|
||||||
exports.packageExtensionTask = packageExtensionTask;
|
|
||||||
// {{SQL CARBON EDIT}} - End
|
// {{SQL CARBON EDIT}} - End
|
||||||
|
function cleanRebuildExtensions(root) {
|
||||||
|
return Promise.all(rebuildExtensions.map(async (e) => {
|
||||||
|
await util2.rimraf(path.join(root, e))();
|
||||||
|
})).then();
|
||||||
|
}
|
||||||
|
exports.cleanRebuildExtensions = cleanRebuildExtensions;
|
||||||
|
function packageRebuildExtensionsStream() {
|
||||||
|
const extenalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||||
|
.map(manifestPath => {
|
||||||
|
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||||
|
const extensionName = path.basename(extensionPath);
|
||||||
|
return { name: extensionName, path: extensionPath };
|
||||||
|
})
|
||||||
|
.filter(({ name }) => rebuildExtensions.indexOf(name) >= 0);
|
||||||
|
const builtExtensions = extenalExtensionDescriptions.map(extension => {
|
||||||
|
return fromLocal(extension.path)
|
||||||
|
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||||
|
});
|
||||||
|
return es.merge(builtExtensions);
|
||||||
|
}
|
||||||
|
exports.packageRebuildExtensionsStream = packageRebuildExtensionsStream;
|
||||||
|
|||||||
@@ -225,9 +225,11 @@ const excludedExtensions = [
|
|||||||
];
|
];
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
// {{SQL CARBON EDIT}}
|
||||||
const sqlBuiltInExtensions = [
|
const externalExtensions = [
|
||||||
// Add SQL built-in extensions here.
|
// This is the list of SQL extensions which the source code is included in this repository, but
|
||||||
// the extension will be excluded from SQLOps package and will have separate vsix packages
|
// they get packaged separately. Adding extension name here, will make the build to create
|
||||||
|
// a separate vsix package for the extension and the extension will be excluded from the main package.
|
||||||
|
// Any extension not included here will be installed by default.
|
||||||
'admin-tool-ext-win',
|
'admin-tool-ext-win',
|
||||||
'agent',
|
'agent',
|
||||||
'import',
|
'import',
|
||||||
@@ -235,15 +237,17 @@ const sqlBuiltInExtensions = [
|
|||||||
'admin-pack',
|
'admin-pack',
|
||||||
'dacpac',
|
'dacpac',
|
||||||
'schema-compare',
|
'schema-compare',
|
||||||
'cms'
|
'cms',
|
||||||
|
'query-history',
|
||||||
|
'liveshare',
|
||||||
|
'sql-database-projects'
|
||||||
];
|
];
|
||||||
|
|
||||||
// make resource deployment and BDC extension only available in insiders
|
// extensions that require a rebuild since they have native parts
|
||||||
if (process.env['VSCODE_QUALITY'] === 'stable') {
|
const rebuildExtensions = [
|
||||||
sqlBuiltInExtensions.push('resource-deployment');
|
'big-data-cluster',
|
||||||
sqlBuiltInExtensions.push('big-data-cluster');
|
'mssql'
|
||||||
}
|
];
|
||||||
|
|
||||||
|
|
||||||
interface IBuiltInExtension {
|
interface IBuiltInExtension {
|
||||||
name: string;
|
name: string;
|
||||||
@@ -266,7 +270,7 @@ export function packageLocalExtensionsStream(): NodeJS.ReadWriteStream {
|
|||||||
})
|
})
|
||||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1); // {{SQL CARBON EDIT}} add aditional filter
|
.filter(({ name }) => externalExtensions.indexOf(name) === -1); // {{SQL CARBON EDIT}} Remove external Extensions with separate package
|
||||||
|
|
||||||
const nodeModules = gulp.src('extensions/node_modules/**', { base: '.' });
|
const nodeModules = gulp.src('extensions/node_modules/**', { base: '.' });
|
||||||
const localExtensions = localExtensionDescriptions.map(extension => {
|
const localExtensions = localExtensionDescriptions.map(extension => {
|
||||||
@@ -288,71 +292,43 @@ export function packageMarketplaceExtensionsStream(): NodeJS.ReadWriteStream {
|
|||||||
.pipe(util2.setExecutableBit(['**/*.sh']));
|
.pipe(util2.setExecutableBit(['**/*.sh']));
|
||||||
}
|
}
|
||||||
|
|
||||||
// {{SQL CARBON EDIT}}
|
export function packageExternalExtensionsStream(): NodeJS.ReadWriteStream {
|
||||||
import * as _ from 'underscore';
|
const extenalExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
|
||||||
import * as vfs from 'vinyl-fs';
|
|
||||||
|
|
||||||
export function packageBuiltInExtensions() {
|
|
||||||
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
|
||||||
.map(manifestPath => {
|
.map(manifestPath => {
|
||||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||||
const extensionName = path.basename(extensionPath);
|
const extensionName = path.basename(extensionPath);
|
||||||
return { name: extensionName, path: extensionPath };
|
return { name: extensionName, path: extensionPath };
|
||||||
})
|
})
|
||||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
.filter(({ name }) => externalExtensions.indexOf(name) >= 0);
|
||||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
|
||||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
|
const builtExtensions = extenalExtensionDescriptions.map(extension => {
|
||||||
const visxDirectory = path.join(path.dirname(root), 'vsix');
|
return fromLocal(extension.path)
|
||||||
try {
|
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||||
if (!fs.existsSync(visxDirectory)) {
|
|
||||||
fs.mkdirSync(visxDirectory);
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
// don't fail the build if the output directory already exists
|
|
||||||
console.warn(err);
|
|
||||||
}
|
|
||||||
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
|
|
||||||
let pkgJson = JSON.parse(fs.readFileSync(path.join(element.path, 'package.json'), { encoding: 'utf8' }));
|
|
||||||
const packagePath = path.join(visxDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
|
|
||||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
|
||||||
vsce.createVSIX({
|
|
||||||
cwd: element.path,
|
|
||||||
packagePath: packagePath,
|
|
||||||
useYarn: true
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
return es.merge(builtExtensions);
|
||||||
|
}
|
||||||
|
// {{SQL CARBON EDIT}} - End
|
||||||
|
|
||||||
|
export function cleanRebuildExtensions(root: string): Promise<void> {
|
||||||
|
return Promise.all(rebuildExtensions.map(async e => {
|
||||||
|
await util2.rimraf(path.join(root, e))();
|
||||||
|
})).then();
|
||||||
}
|
}
|
||||||
|
|
||||||
export function packageExtensionTask(extensionName: string, platform: string, arch: string) {
|
export function packageRebuildExtensionsStream(): NodeJS.ReadWriteStream {
|
||||||
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
const extenalExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
|
||||||
if (platform === 'darwin') {
|
.map(manifestPath => {
|
||||||
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
|
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||||
} else {
|
const extensionName = path.basename(extensionPath);
|
||||||
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
|
return { name: extensionName, path: extensionPath };
|
||||||
}
|
})
|
||||||
|
.filter(({ name }) => rebuildExtensions.indexOf(name) >= 0);
|
||||||
|
|
||||||
platform = platform || process.platform;
|
const builtExtensions = extenalExtensionDescriptions.map(extension => {
|
||||||
|
return fromLocal(extension.path)
|
||||||
|
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||||
|
});
|
||||||
|
|
||||||
return () => {
|
return es.merge(builtExtensions);
|
||||||
const root = path.resolve(path.join(__dirname, '../..'));
|
|
||||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
|
||||||
.map(manifestPath => {
|
|
||||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
|
||||||
const extensionName = path.basename(extensionPath);
|
|
||||||
return { name: extensionName, path: extensionPath };
|
|
||||||
})
|
|
||||||
.filter(({ name }) => extensionName === name);
|
|
||||||
|
|
||||||
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
|
||||||
return fromLocal(extension.path);
|
|
||||||
}));
|
|
||||||
|
|
||||||
let result = localExtensions
|
|
||||||
.pipe(util2.skipDirectories())
|
|
||||||
.pipe(util2.fixWin32DirectoryPermissions())
|
|
||||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
|
|
||||||
|
|
||||||
return result.pipe(vfs.dest(destination));
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
// {{SQL CARBON EDIT}} - End
|
|
||||||
@@ -176,6 +176,7 @@ class XLF {
|
|||||||
this.buffer.push(line.toString());
|
this.buffer.push(line.toString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
exports.XLF = XLF;
|
||||||
XLF.parsePseudo = function (xlfString) {
|
XLF.parsePseudo = function (xlfString) {
|
||||||
return new Promise((resolve) => {
|
return new Promise((resolve) => {
|
||||||
let parser = new xml2js.Parser();
|
let parser = new xml2js.Parser();
|
||||||
@@ -248,7 +249,6 @@ XLF.parse = function (xlfString) {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
exports.XLF = XLF;
|
|
||||||
class Limiter {
|
class Limiter {
|
||||||
constructor(maxDegreeOfParalellism) {
|
constructor(maxDegreeOfParalellism) {
|
||||||
this.maxDegreeOfParalellism = maxDegreeOfParalellism;
|
this.maxDegreeOfParalellism = maxDegreeOfParalellism;
|
||||||
@@ -586,7 +586,7 @@ function createXlfFilesForExtensions() {
|
|||||||
}
|
}
|
||||||
return _xlf;
|
return _xlf;
|
||||||
}
|
}
|
||||||
gulp.src([`./extensions/${extensionName}/package.nls.json`, `./extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(event_stream_1.through(function (file) {
|
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(event_stream_1.through(function (file) {
|
||||||
if (file.isBuffer()) {
|
if (file.isBuffer()) {
|
||||||
const buffer = file.contents;
|
const buffer = file.contents;
|
||||||
const basename = path.basename(file.path);
|
const basename = path.basename(file.path);
|
||||||
@@ -609,7 +609,7 @@ function createXlfFilesForExtensions() {
|
|||||||
}
|
}
|
||||||
else if (basename === 'nls.metadata.json') {
|
else if (basename === 'nls.metadata.json') {
|
||||||
const json = JSON.parse(buffer.toString('utf8'));
|
const json = JSON.parse(buffer.toString('utf8'));
|
||||||
const relPath = path.relative(`./extensions/${extensionName}`, path.dirname(file.path));
|
const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path));
|
||||||
for (let file in json) {
|
for (let file in json) {
|
||||||
const fileContent = json[file];
|
const fileContent = json[file];
|
||||||
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
|
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
|
||||||
@@ -912,8 +912,8 @@ function pullCoreAndExtensionsXlfFiles(apiHostname, username, password, language
|
|||||||
_coreAndExtensionResources.push(...json.workbench);
|
_coreAndExtensionResources.push(...json.workbench);
|
||||||
// extensions
|
// extensions
|
||||||
let extensionsToLocalize = Object.create(null);
|
let extensionsToLocalize = Object.create(null);
|
||||||
glob.sync('./extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
glob.sync('.build/extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
||||||
glob.sync('./extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
glob.sync('.build/extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
||||||
Object.keys(extensionsToLocalize).forEach(extension => {
|
Object.keys(extensionsToLocalize).forEach(extension => {
|
||||||
_coreAndExtensionResources.push({ name: extension, project: extensionsProject });
|
_coreAndExtensionResources.push({ name: extension, project: extensionsProject });
|
||||||
});
|
});
|
||||||
@@ -1086,7 +1086,7 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
|
|||||||
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
|
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
|
||||||
this.queue(translatedMainFile);
|
this.queue(translatedMainFile);
|
||||||
for (let extension in extensionsPacks) {
|
for (let extension in extensionsPacks) {
|
||||||
const translatedExtFile = createI18nFile(`./extensions/${extension}`, extensionsPacks[extension]);
|
const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
|
||||||
this.queue(translatedExtFile);
|
this.queue(translatedExtFile);
|
||||||
const externalExtensionId = externalExtensions[extension];
|
const externalExtensionId = externalExtensions[extension];
|
||||||
if (externalExtensionId) {
|
if (externalExtensionId) {
|
||||||
|
|||||||
@@ -42,6 +42,10 @@
|
|||||||
"name": "vs/workbench/contrib/callHierarchy",
|
"name": "vs/workbench/contrib/callHierarchy",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "vs/workbench/contrib/codeActions",
|
||||||
|
"project": "vscode-workbench"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/comments",
|
"name": "vs/workbench/contrib/comments",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
@@ -106,6 +110,10 @@
|
|||||||
"name": "vs/workbench/contrib/quickopen",
|
"name": "vs/workbench/contrib/quickopen",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "vs/workbench/contrib/userData",
|
||||||
|
"project": "vscode-workbench"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/remote",
|
"name": "vs/workbench/contrib/remote",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
@@ -131,7 +139,7 @@
|
|||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/stats",
|
"name": "vs/workbench/contrib/tags",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -170,6 +178,10 @@
|
|||||||
"name": "vs/workbench/contrib/webview",
|
"name": "vs/workbench/contrib/webview",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "vs/workbench/contrib/customEditor",
|
||||||
|
"project": "vscode-workbench"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/contrib/welcome",
|
"name": "vs/workbench/contrib/welcome",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
@@ -178,10 +190,18 @@
|
|||||||
"name": "vs/workbench/contrib/outline",
|
"name": "vs/workbench/contrib/outline",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "vs/workbench/contrib/userDataSync",
|
||||||
|
"project": "vscode-workbench"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/services/actions",
|
"name": "vs/workbench/services/actions",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "vs/workbench/services/authToken",
|
||||||
|
"project": "vscode-workbench"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/services/bulkEdit",
|
"name": "vs/workbench/services/bulkEdit",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
@@ -230,6 +250,10 @@
|
|||||||
"name": "vs/workbench/services/keybinding",
|
"name": "vs/workbench/services/keybinding",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "vs/workbench/services/lifecycle",
|
||||||
|
"project": "vscode-workbench"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/services/mode",
|
"name": "vs/workbench/services/mode",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
@@ -255,7 +279,7 @@
|
|||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "vs/workbench/services/workspace",
|
"name": "vs/workbench/services/workspaces",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@@ -273,6 +297,10 @@
|
|||||||
{
|
{
|
||||||
"name": "vs/workbench/services/notification",
|
"name": "vs/workbench/services/notification",
|
||||||
"project": "vscode-workbench"
|
"project": "vscode-workbench"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "vs/workbench/services/userData",
|
||||||
|
"project": "vscode-workbench"
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -709,7 +709,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
|
|||||||
}
|
}
|
||||||
return _xlf;
|
return _xlf;
|
||||||
}
|
}
|
||||||
gulp.src([`./extensions/${extensionName}/package.nls.json`, `./extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(through(function (file: File) {
|
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(through(function (file: File) {
|
||||||
if (file.isBuffer()) {
|
if (file.isBuffer()) {
|
||||||
const buffer: Buffer = file.contents as Buffer;
|
const buffer: Buffer = file.contents as Buffer;
|
||||||
const basename = path.basename(file.path);
|
const basename = path.basename(file.path);
|
||||||
@@ -729,7 +729,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
|
|||||||
getXlf().addFile(`extensions/${extensionName}/package`, keys, messages);
|
getXlf().addFile(`extensions/${extensionName}/package`, keys, messages);
|
||||||
} else if (basename === 'nls.metadata.json') {
|
} else if (basename === 'nls.metadata.json') {
|
||||||
const json: BundledExtensionFormat = JSON.parse(buffer.toString('utf8'));
|
const json: BundledExtensionFormat = JSON.parse(buffer.toString('utf8'));
|
||||||
const relPath = path.relative(`./extensions/${extensionName}`, path.dirname(file.path));
|
const relPath = path.relative(`.build/extensions/${extensionName}`, path.dirname(file.path));
|
||||||
for (let file in json) {
|
for (let file in json) {
|
||||||
const fileContent = json[file];
|
const fileContent = json[file];
|
||||||
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
|
getXlf().addFile(`extensions/${extensionName}/${relPath}/${file}`, fileContent.keys, fileContent.messages);
|
||||||
@@ -1053,8 +1053,8 @@ export function pullCoreAndExtensionsXlfFiles(apiHostname: string, username: str
|
|||||||
|
|
||||||
// extensions
|
// extensions
|
||||||
let extensionsToLocalize = Object.create(null);
|
let extensionsToLocalize = Object.create(null);
|
||||||
glob.sync('./extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
glob.sync('.build/extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
||||||
glob.sync('./extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
glob.sync('.build/extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
||||||
|
|
||||||
Object.keys(extensionsToLocalize).forEach(extension => {
|
Object.keys(extensionsToLocalize).forEach(extension => {
|
||||||
_coreAndExtensionResources.push({ name: extension, project: extensionsProject });
|
_coreAndExtensionResources.push({ name: extension, project: extensionsProject });
|
||||||
@@ -1253,7 +1253,7 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
|
|||||||
|
|
||||||
this.queue(translatedMainFile);
|
this.queue(translatedMainFile);
|
||||||
for (let extension in extensionsPacks) {
|
for (let extension in extensionsPacks) {
|
||||||
const translatedExtFile = createI18nFile(`./extensions/${extension}`, extensionsPacks[extension]);
|
const translatedExtFile = createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
|
||||||
this.queue(translatedExtFile);
|
this.queue(translatedExtFile);
|
||||||
|
|
||||||
const externalExtensionId = externalExtensions[extension];
|
const externalExtensionId = externalExtensions[extension];
|
||||||
|
|||||||
@@ -5,6 +5,7 @@
|
|||||||
'use strict';
|
'use strict';
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const es = require("event-stream");
|
const es = require("event-stream");
|
||||||
|
const fs = require("fs");
|
||||||
const gulp = require("gulp");
|
const gulp = require("gulp");
|
||||||
const concat = require("gulp-concat");
|
const concat = require("gulp-concat");
|
||||||
const minifyCSS = require("gulp-cssnano");
|
const minifyCSS = require("gulp-cssnano");
|
||||||
@@ -17,7 +18,7 @@ const fancyLog = require("fancy-log");
|
|||||||
const ansiColors = require("ansi-colors");
|
const ansiColors = require("ansi-colors");
|
||||||
const path = require("path");
|
const path = require("path");
|
||||||
const pump = require("pump");
|
const pump = require("pump");
|
||||||
const uglifyes = require("uglify-es");
|
const terser = require("terser");
|
||||||
const VinylFile = require("vinyl");
|
const VinylFile = require("vinyl");
|
||||||
const bundle = require("./bundle");
|
const bundle = require("./bundle");
|
||||||
const i18n_1 = require("./i18n");
|
const i18n_1 = require("./i18n");
|
||||||
@@ -134,6 +135,14 @@ function optimizeTask(opts) {
|
|||||||
if (err || !result) {
|
if (err || !result) {
|
||||||
return bundlesStream.emit('error', JSON.stringify(err));
|
return bundlesStream.emit('error', JSON.stringify(err));
|
||||||
}
|
}
|
||||||
|
if (opts.inlineAmdImages) {
|
||||||
|
try {
|
||||||
|
result = inlineAmdImages(src, result);
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
return bundlesStream.emit('error', JSON.stringify(err));
|
||||||
|
}
|
||||||
|
}
|
||||||
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
|
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
|
||||||
// Remove css inlined resources
|
// Remove css inlined resources
|
||||||
const filteredResources = resources.slice();
|
const filteredResources = resources.slice();
|
||||||
@@ -169,6 +178,39 @@ function optimizeTask(opts) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
exports.optimizeTask = optimizeTask;
|
exports.optimizeTask = optimizeTask;
|
||||||
|
function inlineAmdImages(src, result) {
|
||||||
|
for (const outputFile of result.files) {
|
||||||
|
for (const sourceFile of outputFile.sources) {
|
||||||
|
if (sourceFile.path && /\.js$/.test(sourceFile.path)) {
|
||||||
|
sourceFile.contents = sourceFile.contents.replace(/\([^.]+\.registerAndGetAmdImageURL\(([^)]+)\)\)/g, (_, m0) => {
|
||||||
|
let imagePath = m0;
|
||||||
|
// remove `` or ''
|
||||||
|
if ((imagePath.charAt(0) === '`' && imagePath.charAt(imagePath.length - 1) === '`')
|
||||||
|
|| (imagePath.charAt(0) === '\'' && imagePath.charAt(imagePath.length - 1) === '\'')) {
|
||||||
|
imagePath = imagePath.substr(1, imagePath.length - 2);
|
||||||
|
}
|
||||||
|
if (!/\.(png|svg)$/.test(imagePath)) {
|
||||||
|
console.log(`original: ${_}`);
|
||||||
|
return _;
|
||||||
|
}
|
||||||
|
const repoLocation = path.join(src, imagePath);
|
||||||
|
const absoluteLocation = path.join(REPO_ROOT_PATH, repoLocation);
|
||||||
|
if (!fs.existsSync(absoluteLocation)) {
|
||||||
|
const message = `Invalid amd image url in file ${sourceFile.path}: ${imagePath}`;
|
||||||
|
console.log(message);
|
||||||
|
throw new Error(message);
|
||||||
|
}
|
||||||
|
const fileContents = fs.readFileSync(absoluteLocation);
|
||||||
|
const mime = /\.svg$/.test(imagePath) ? 'image/svg+xml' : 'image/png';
|
||||||
|
// Mark the file as inlined so we don't ship it by itself
|
||||||
|
result.cssInlinedResources.push(repoLocation);
|
||||||
|
return `("data:${mime};base64,${fileContents.toString('base64')}")`;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* Wrap around uglify and allow the preserveComments function
|
* Wrap around uglify and allow the preserveComments function
|
||||||
* to have a file "context" to include our copyright only once per file.
|
* to have a file "context" to include our copyright only once per file.
|
||||||
@@ -199,7 +241,7 @@ function uglifyWithCopyrights() {
|
|||||||
return false;
|
return false;
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
const minify = composer(uglifyes);
|
const minify = composer(terser);
|
||||||
const input = es.through();
|
const input = es.through();
|
||||||
const output = input
|
const output = input
|
||||||
.pipe(flatmap((stream, f) => {
|
.pipe(flatmap((stream, f) => {
|
||||||
|
|||||||
@@ -6,6 +6,7 @@
|
|||||||
'use strict';
|
'use strict';
|
||||||
|
|
||||||
import * as es from 'event-stream';
|
import * as es from 'event-stream';
|
||||||
|
import * as fs from 'fs';
|
||||||
import * as gulp from 'gulp';
|
import * as gulp from 'gulp';
|
||||||
import * as concat from 'gulp-concat';
|
import * as concat from 'gulp-concat';
|
||||||
import * as minifyCSS from 'gulp-cssnano';
|
import * as minifyCSS from 'gulp-cssnano';
|
||||||
@@ -19,7 +20,7 @@ import * as ansiColors from 'ansi-colors';
|
|||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import * as pump from 'pump';
|
import * as pump from 'pump';
|
||||||
import * as sm from 'source-map';
|
import * as sm from 'source-map';
|
||||||
import * as uglifyes from 'uglify-es';
|
import * as terser from 'terser';
|
||||||
import * as VinylFile from 'vinyl';
|
import * as VinylFile from 'vinyl';
|
||||||
import * as bundle from './bundle';
|
import * as bundle from './bundle';
|
||||||
import { Language, processNlsFiles } from './i18n';
|
import { Language, processNlsFiles } from './i18n';
|
||||||
@@ -161,6 +162,10 @@ export interface IOptimizeTaskOpts {
|
|||||||
* (emit bundleInfo.json file)
|
* (emit bundleInfo.json file)
|
||||||
*/
|
*/
|
||||||
bundleInfo: boolean;
|
bundleInfo: boolean;
|
||||||
|
/**
|
||||||
|
* replace calls to `registerAndGetAmdImageURL` with data uris
|
||||||
|
*/
|
||||||
|
inlineAmdImages: boolean;
|
||||||
/**
|
/**
|
||||||
* (out folder name)
|
* (out folder name)
|
||||||
*/
|
*/
|
||||||
@@ -194,6 +199,14 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
|
|||||||
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
|
bundle.bundle(entryPoints, loaderConfig, function (err, result) {
|
||||||
if (err || !result) { return bundlesStream.emit('error', JSON.stringify(err)); }
|
if (err || !result) { return bundlesStream.emit('error', JSON.stringify(err)); }
|
||||||
|
|
||||||
|
if (opts.inlineAmdImages) {
|
||||||
|
try {
|
||||||
|
result = inlineAmdImages(src, result);
|
||||||
|
} catch (err) {
|
||||||
|
return bundlesStream.emit('error', JSON.stringify(err));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
|
toBundleStream(src, bundledFileHeader, result.files).pipe(bundlesStream);
|
||||||
|
|
||||||
// Remove css inlined resources
|
// Remove css inlined resources
|
||||||
@@ -238,6 +251,42 @@ export function optimizeTask(opts: IOptimizeTaskOpts): () => NodeJS.ReadWriteStr
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function inlineAmdImages(src: string, result: bundle.IBundleResult): bundle.IBundleResult {
|
||||||
|
for (const outputFile of result.files) {
|
||||||
|
for (const sourceFile of outputFile.sources) {
|
||||||
|
if (sourceFile.path && /\.js$/.test(sourceFile.path)) {
|
||||||
|
sourceFile.contents = sourceFile.contents.replace(/\([^.]+\.registerAndGetAmdImageURL\(([^)]+)\)\)/g, (_, m0) => {
|
||||||
|
let imagePath = m0;
|
||||||
|
// remove `` or ''
|
||||||
|
if ((imagePath.charAt(0) === '`' && imagePath.charAt(imagePath.length - 1) === '`')
|
||||||
|
|| (imagePath.charAt(0) === '\'' && imagePath.charAt(imagePath.length - 1) === '\'')) {
|
||||||
|
imagePath = imagePath.substr(1, imagePath.length - 2);
|
||||||
|
}
|
||||||
|
if (!/\.(png|svg)$/.test(imagePath)) {
|
||||||
|
console.log(`original: ${_}`);
|
||||||
|
return _;
|
||||||
|
}
|
||||||
|
const repoLocation = path.join(src, imagePath);
|
||||||
|
const absoluteLocation = path.join(REPO_ROOT_PATH, repoLocation);
|
||||||
|
if (!fs.existsSync(absoluteLocation)) {
|
||||||
|
const message = `Invalid amd image url in file ${sourceFile.path}: ${imagePath}`;
|
||||||
|
console.log(message);
|
||||||
|
throw new Error(message);
|
||||||
|
}
|
||||||
|
const fileContents = fs.readFileSync(absoluteLocation);
|
||||||
|
const mime = /\.svg$/.test(imagePath) ? 'image/svg+xml' : 'image/png';
|
||||||
|
|
||||||
|
// Mark the file as inlined so we don't ship it by itself
|
||||||
|
result.cssInlinedResources.push(repoLocation);
|
||||||
|
|
||||||
|
return `("data:${mime};base64,${fileContents.toString('base64')}")`;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
declare class FileWithCopyright extends VinylFile {
|
declare class FileWithCopyright extends VinylFile {
|
||||||
public __hasOurCopyright: boolean;
|
public __hasOurCopyright: boolean;
|
||||||
}
|
}
|
||||||
@@ -275,7 +324,7 @@ function uglifyWithCopyrights(): NodeJS.ReadWriteStream {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
const minify = (composer as any)(uglifyes);
|
const minify = (composer as any)(terser);
|
||||||
const input = es.through();
|
const input = es.through();
|
||||||
const output = input
|
const output = input
|
||||||
.pipe(flatmap((stream, f) => {
|
.pipe(flatmap((stream, f) => {
|
||||||
|
|||||||
103
build/lib/rollup.js
Normal file
103
build/lib/rollup.js
Normal file
@@ -0,0 +1,103 @@
|
|||||||
|
"use strict";
|
||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
const fs = require("fs");
|
||||||
|
const rollup = require("rollup");
|
||||||
|
const path = require("path");
|
||||||
|
// getting around stupid import rules
|
||||||
|
const nodeResolve = require('rollup-plugin-node-resolve');
|
||||||
|
const commonjs = require('rollup-plugin-commonjs');
|
||||||
|
async function rollupModule(options) {
|
||||||
|
const moduleName = options.moduleName;
|
||||||
|
try {
|
||||||
|
const inputFile = options.inputFile;
|
||||||
|
const outputDirectory = options.outputDirectory;
|
||||||
|
await fs.promises.mkdir(outputDirectory, {
|
||||||
|
recursive: true
|
||||||
|
});
|
||||||
|
const outputFileName = options.outputFileName;
|
||||||
|
const outputMapName = `${outputFileName}.map`;
|
||||||
|
const external = options.external || [];
|
||||||
|
const outputFilePath = path.resolve(outputDirectory, outputFileName);
|
||||||
|
const outputMapPath = path.resolve(outputDirectory, outputMapName);
|
||||||
|
const bundle = await rollup.rollup({
|
||||||
|
input: inputFile,
|
||||||
|
plugins: [
|
||||||
|
nodeResolve(),
|
||||||
|
commonjs(),
|
||||||
|
],
|
||||||
|
external,
|
||||||
|
});
|
||||||
|
const generatedBundle = await bundle.generate({
|
||||||
|
name: moduleName,
|
||||||
|
format: 'umd',
|
||||||
|
sourcemap: true
|
||||||
|
});
|
||||||
|
const result = generatedBundle.output[0];
|
||||||
|
result.code = result.code + '\n//# sourceMappingURL=' + path.basename(outputMapName);
|
||||||
|
await fs.promises.writeFile(outputFilePath, result.code);
|
||||||
|
await fs.promises.writeFile(outputMapPath, result.map);
|
||||||
|
return {
|
||||||
|
name: moduleName,
|
||||||
|
result: true
|
||||||
|
};
|
||||||
|
}
|
||||||
|
catch (ex) {
|
||||||
|
return {
|
||||||
|
name: moduleName,
|
||||||
|
result: false,
|
||||||
|
exception: ex
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
function rollupAngularSlickgrid(root) {
|
||||||
|
return new Promise(async (resolve, reject) => {
|
||||||
|
const result = await rollupModule({
|
||||||
|
moduleName: 'angular2-slickgrid',
|
||||||
|
inputFile: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'index.js'),
|
||||||
|
outputDirectory: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'bundles'),
|
||||||
|
outputFileName: 'angular2-slickgrid.umd.js'
|
||||||
|
});
|
||||||
|
if (!result.result) {
|
||||||
|
return reject(`angular2-slickgrid failed to bundle - ${result.exception}`);
|
||||||
|
}
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.rollupAngularSlickgrid = rollupAngularSlickgrid;
|
||||||
|
function rollupAngular(root) {
|
||||||
|
return new Promise(async (resolve, reject) => {
|
||||||
|
const modules = ['core', 'animations', 'common', 'compiler', 'forms', 'platform-browser', 'platform-browser-dynamic', 'router'];
|
||||||
|
const tasks = modules.map((module) => {
|
||||||
|
return rollupModule({
|
||||||
|
moduleName: `ng.${module}`,
|
||||||
|
inputFile: path.resolve(root, 'node_modules', '@angular', module, '@angular', `${module}.es5.js`),
|
||||||
|
outputDirectory: path.resolve(root, 'node_modules', '@angular', module, 'bundles'),
|
||||||
|
outputFileName: `${module}.umd.js`,
|
||||||
|
external: modules.map(mn => `@angular/${mn}`)
|
||||||
|
});
|
||||||
|
});
|
||||||
|
// array of booleans
|
||||||
|
const x = await Promise.all(tasks);
|
||||||
|
const result = x.reduce((prev, current) => {
|
||||||
|
if (!current.result) {
|
||||||
|
prev.fails.push(current.name);
|
||||||
|
prev.exceptions.push(current.exception);
|
||||||
|
prev.result = false;
|
||||||
|
}
|
||||||
|
return prev;
|
||||||
|
}, {
|
||||||
|
fails: [],
|
||||||
|
exceptions: [],
|
||||||
|
result: true,
|
||||||
|
});
|
||||||
|
if (!result.result) {
|
||||||
|
return reject(`failures: ${result.fails} - exceptions: ${JSON.stringify(result.exceptions)}`);
|
||||||
|
}
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
exports.rollupAngular = rollupAngular;
|
||||||
125
build/lib/rollup.ts
Normal file
125
build/lib/rollup.ts
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
/*---------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||||
|
*--------------------------------------------------------------------------------------------*/
|
||||||
|
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import * as rollup from 'rollup';
|
||||||
|
import * as path from 'path';
|
||||||
|
|
||||||
|
// getting around stupid import rules
|
||||||
|
const nodeResolve = require('rollup-plugin-node-resolve');
|
||||||
|
const commonjs = require('rollup-plugin-commonjs');
|
||||||
|
|
||||||
|
export interface IRollupOptions {
|
||||||
|
moduleName: string;
|
||||||
|
inputFile: string;
|
||||||
|
outputDirectory: string;
|
||||||
|
outputFileName: string;
|
||||||
|
external?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
async function rollupModule(options: IRollupOptions) {
|
||||||
|
const moduleName = options.moduleName;
|
||||||
|
try {
|
||||||
|
const inputFile = options.inputFile;
|
||||||
|
const outputDirectory = options.outputDirectory;
|
||||||
|
|
||||||
|
await fs.promises.mkdir(outputDirectory, {
|
||||||
|
recursive: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const outputFileName = options.outputFileName;
|
||||||
|
const outputMapName = `${outputFileName}.map`;
|
||||||
|
const external = options.external || [];
|
||||||
|
|
||||||
|
const outputFilePath = path.resolve(outputDirectory, outputFileName);
|
||||||
|
const outputMapPath = path.resolve(outputDirectory, outputMapName);
|
||||||
|
|
||||||
|
const bundle = await rollup.rollup({
|
||||||
|
input: inputFile,
|
||||||
|
plugins: [
|
||||||
|
nodeResolve(),
|
||||||
|
commonjs(),
|
||||||
|
],
|
||||||
|
external,
|
||||||
|
});
|
||||||
|
|
||||||
|
const generatedBundle = await bundle.generate({
|
||||||
|
name: moduleName,
|
||||||
|
format: 'umd',
|
||||||
|
sourcemap: true
|
||||||
|
});
|
||||||
|
|
||||||
|
const result = generatedBundle.output[0];
|
||||||
|
result.code = result.code + '\n//# sourceMappingURL=' + path.basename(outputMapName);
|
||||||
|
|
||||||
|
await fs.promises.writeFile(outputFilePath, result.code);
|
||||||
|
await fs.promises.writeFile(outputMapPath, result.map);
|
||||||
|
|
||||||
|
return {
|
||||||
|
name: moduleName,
|
||||||
|
result: true
|
||||||
|
};
|
||||||
|
} catch (ex) {
|
||||||
|
return {
|
||||||
|
name: moduleName,
|
||||||
|
result: false,
|
||||||
|
exception: ex
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function rollupAngularSlickgrid(root: string): Promise<void> {
|
||||||
|
return new Promise(async (resolve, reject) => {
|
||||||
|
const result = await rollupModule({
|
||||||
|
moduleName: 'angular2-slickgrid',
|
||||||
|
inputFile: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'index.js'),
|
||||||
|
outputDirectory: path.resolve(root, 'node_modules', 'angular2-slickgrid', 'out', 'bundles'),
|
||||||
|
outputFileName: 'angular2-slickgrid.umd.js'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!result.result) {
|
||||||
|
return reject(`angular2-slickgrid failed to bundle - ${result.exception}`);
|
||||||
|
}
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function rollupAngular(root: string): Promise<void> {
|
||||||
|
return new Promise(async (resolve, reject) => {
|
||||||
|
|
||||||
|
const modules = ['core', 'animations', 'common', 'compiler', 'forms', 'platform-browser', 'platform-browser-dynamic', 'router'];
|
||||||
|
const tasks = modules.map((module) => {
|
||||||
|
return rollupModule({
|
||||||
|
moduleName: `ng.${module}`,
|
||||||
|
inputFile: path.resolve(root, 'node_modules', '@angular', module, '@angular', `${module}.es5.js`),
|
||||||
|
outputDirectory: path.resolve(root, 'node_modules', '@angular', module, 'bundles'),
|
||||||
|
outputFileName: `${module}.umd.js`,
|
||||||
|
external: modules.map(mn => `@angular/${mn}`)
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// array of booleans
|
||||||
|
const x = await Promise.all(tasks);
|
||||||
|
|
||||||
|
const result = x.reduce<{ fails: string[]; exceptions: string[]; result: boolean }>((prev, current) => {
|
||||||
|
if (!current.result) {
|
||||||
|
prev.fails.push(current.name);
|
||||||
|
prev.exceptions.push(current.exception);
|
||||||
|
prev.result = false;
|
||||||
|
}
|
||||||
|
return prev;
|
||||||
|
}, {
|
||||||
|
fails: [],
|
||||||
|
exceptions: [],
|
||||||
|
result: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!result.result) {
|
||||||
|
return reject(`failures: ${result.fails} - exceptions: ${JSON.stringify(result.exceptions)}`);
|
||||||
|
}
|
||||||
|
resolve();
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
@@ -43,7 +43,9 @@ function extractEditor(options) {
|
|||||||
compilerOptions.declaration = false;
|
compilerOptions.declaration = false;
|
||||||
compilerOptions.moduleResolution = ts.ModuleResolutionKind.Classic;
|
compilerOptions.moduleResolution = ts.ModuleResolutionKind.Classic;
|
||||||
options.compilerOptions = compilerOptions;
|
options.compilerOptions = compilerOptions;
|
||||||
console.log(`Running with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
|
console.log(`Running tree shaker with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
|
||||||
|
// Take the extra included .d.ts files from `tsconfig.monaco.json`
|
||||||
|
options.typings = tsConfig.include.filter(includedFile => /\.d\.ts$/.test(includedFile));
|
||||||
let result = tss.shake(options);
|
let result = tss.shake(options);
|
||||||
for (let fileName in result) {
|
for (let fileName in result) {
|
||||||
if (result.hasOwnProperty(fileName)) {
|
if (result.hasOwnProperty(fileName)) {
|
||||||
@@ -130,7 +132,7 @@ function createESMSourcesAndResources2(options) {
|
|||||||
write(getDestAbsoluteFilePath(file), JSON.stringify(tsConfig, null, '\t'));
|
write(getDestAbsoluteFilePath(file), JSON.stringify(tsConfig, null, '\t'));
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file)) {
|
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file) || /\.ttf$/.test(file)) {
|
||||||
// Transport the files directly
|
// Transport the files directly
|
||||||
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
|
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
|
||||||
continue;
|
continue;
|
||||||
@@ -250,35 +252,37 @@ function transportCSS(module, enqueue, write) {
|
|||||||
const filename = path.join(SRC_DIR, module);
|
const filename = path.join(SRC_DIR, module);
|
||||||
const fileContents = fs.readFileSync(filename).toString();
|
const fileContents = fs.readFileSync(filename).toString();
|
||||||
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
|
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
|
||||||
const inlineResourcesLimit = 300000; //3000; // see https://github.com/Microsoft/monaco-editor/issues/336
|
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64');
|
||||||
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
|
|
||||||
write(module, newContents);
|
write(module, newContents);
|
||||||
return true;
|
return true;
|
||||||
function _rewriteOrInlineUrls(contents, forceBase64, inlineByteLimit) {
|
function _rewriteOrInlineUrls(contents, forceBase64) {
|
||||||
return _replaceURL(contents, (url) => {
|
return _replaceURL(contents, (url) => {
|
||||||
let imagePath = path.join(path.dirname(module), url);
|
const fontMatch = url.match(/^(.*).ttf\?(.*)$/);
|
||||||
let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
|
if (fontMatch) {
|
||||||
if (fileContents.length < inlineByteLimit) {
|
const relativeFontPath = `${fontMatch[1]}.ttf`; // trim the query parameter
|
||||||
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
|
const fontPath = path.join(path.dirname(module), relativeFontPath);
|
||||||
let DATA = ';base64,' + fileContents.toString('base64');
|
enqueue(fontPath);
|
||||||
if (!forceBase64 && /\.svg$/.test(url)) {
|
return relativeFontPath;
|
||||||
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
|
|
||||||
let newText = fileContents.toString()
|
|
||||||
.replace(/"/g, '\'')
|
|
||||||
.replace(/</g, '%3C')
|
|
||||||
.replace(/>/g, '%3E')
|
|
||||||
.replace(/&/g, '%26')
|
|
||||||
.replace(/#/g, '%23')
|
|
||||||
.replace(/\s+/g, ' ');
|
|
||||||
let encodedData = ',' + newText;
|
|
||||||
if (encodedData.length < DATA.length) {
|
|
||||||
DATA = encodedData;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return '"data:' + MIME + DATA + '"';
|
|
||||||
}
|
}
|
||||||
enqueue(imagePath);
|
const imagePath = path.join(path.dirname(module), url);
|
||||||
return url;
|
const fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
|
||||||
|
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
|
||||||
|
let DATA = ';base64,' + fileContents.toString('base64');
|
||||||
|
if (!forceBase64 && /\.svg$/.test(url)) {
|
||||||
|
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
|
||||||
|
let newText = fileContents.toString()
|
||||||
|
.replace(/"/g, '\'')
|
||||||
|
.replace(/</g, '%3C')
|
||||||
|
.replace(/>/g, '%3E')
|
||||||
|
.replace(/&/g, '%26')
|
||||||
|
.replace(/#/g, '%23')
|
||||||
|
.replace(/\s+/g, ' ');
|
||||||
|
let encodedData = ',' + newText;
|
||||||
|
if (encodedData.length < DATA.length) {
|
||||||
|
DATA = encodedData;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return '"data:' + MIME + DATA + '"';
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function _replaceURL(contents, replacer) {
|
function _replaceURL(contents, replacer) {
|
||||||
|
|||||||
@@ -50,7 +50,10 @@ export function extractEditor(options: tss.ITreeShakingOptions & { destRoot: str
|
|||||||
|
|
||||||
options.compilerOptions = compilerOptions;
|
options.compilerOptions = compilerOptions;
|
||||||
|
|
||||||
console.log(`Running with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
|
console.log(`Running tree shaker with shakeLevel ${tss.toStringShakeLevel(options.shakeLevel)}`);
|
||||||
|
|
||||||
|
// Take the extra included .d.ts files from `tsconfig.monaco.json`
|
||||||
|
options.typings = (<string[]>tsConfig.include).filter(includedFile => /\.d\.ts$/.test(includedFile));
|
||||||
|
|
||||||
let result = tss.shake(options);
|
let result = tss.shake(options);
|
||||||
for (let fileName in result) {
|
for (let fileName in result) {
|
||||||
@@ -154,7 +157,7 @@ export function createESMSourcesAndResources2(options: IOptions2): void {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file)) {
|
if (/\.d\.ts$/.test(file) || /\.css$/.test(file) || /\.js$/.test(file) || /\.ttf$/.test(file)) {
|
||||||
// Transport the files directly
|
// Transport the files directly
|
||||||
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
|
write(getDestAbsoluteFilePath(file), fs.readFileSync(path.join(SRC_FOLDER, file)));
|
||||||
continue;
|
continue;
|
||||||
@@ -290,40 +293,41 @@ function transportCSS(module: string, enqueue: (module: string) => void, write:
|
|||||||
const filename = path.join(SRC_DIR, module);
|
const filename = path.join(SRC_DIR, module);
|
||||||
const fileContents = fs.readFileSync(filename).toString();
|
const fileContents = fs.readFileSync(filename).toString();
|
||||||
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
|
const inlineResources = 'base64'; // see https://github.com/Microsoft/monaco-editor/issues/148
|
||||||
const inlineResourcesLimit = 300000;//3000; // see https://github.com/Microsoft/monaco-editor/issues/336
|
|
||||||
|
|
||||||
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64', inlineResourcesLimit);
|
const newContents = _rewriteOrInlineUrls(fileContents, inlineResources === 'base64');
|
||||||
write(module, newContents);
|
write(module, newContents);
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
function _rewriteOrInlineUrls(contents: string, forceBase64: boolean, inlineByteLimit: number): string {
|
function _rewriteOrInlineUrls(contents: string, forceBase64: boolean): string {
|
||||||
return _replaceURL(contents, (url) => {
|
return _replaceURL(contents, (url) => {
|
||||||
let imagePath = path.join(path.dirname(module), url);
|
const fontMatch = url.match(/^(.*).ttf\?(.*)$/);
|
||||||
let fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
|
if (fontMatch) {
|
||||||
|
const relativeFontPath = `${fontMatch[1]}.ttf`; // trim the query parameter
|
||||||
if (fileContents.length < inlineByteLimit) {
|
const fontPath = path.join(path.dirname(module), relativeFontPath);
|
||||||
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
|
enqueue(fontPath);
|
||||||
let DATA = ';base64,' + fileContents.toString('base64');
|
return relativeFontPath;
|
||||||
|
|
||||||
if (!forceBase64 && /\.svg$/.test(url)) {
|
|
||||||
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
|
|
||||||
let newText = fileContents.toString()
|
|
||||||
.replace(/"/g, '\'')
|
|
||||||
.replace(/</g, '%3C')
|
|
||||||
.replace(/>/g, '%3E')
|
|
||||||
.replace(/&/g, '%26')
|
|
||||||
.replace(/#/g, '%23')
|
|
||||||
.replace(/\s+/g, ' ');
|
|
||||||
let encodedData = ',' + newText;
|
|
||||||
if (encodedData.length < DATA.length) {
|
|
||||||
DATA = encodedData;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return '"data:' + MIME + DATA + '"';
|
|
||||||
}
|
}
|
||||||
|
|
||||||
enqueue(imagePath);
|
const imagePath = path.join(path.dirname(module), url);
|
||||||
return url;
|
const fileContents = fs.readFileSync(path.join(SRC_DIR, imagePath));
|
||||||
|
const MIME = /\.svg$/.test(url) ? 'image/svg+xml' : 'image/png';
|
||||||
|
let DATA = ';base64,' + fileContents.toString('base64');
|
||||||
|
|
||||||
|
if (!forceBase64 && /\.svg$/.test(url)) {
|
||||||
|
// .svg => url encode as explained at https://codepen.io/tigt/post/optimizing-svgs-in-data-uris
|
||||||
|
let newText = fileContents.toString()
|
||||||
|
.replace(/"/g, '\'')
|
||||||
|
.replace(/</g, '%3C')
|
||||||
|
.replace(/>/g, '%3E')
|
||||||
|
.replace(/&/g, '%26')
|
||||||
|
.replace(/#/g, '%23')
|
||||||
|
.replace(/\s+/g, ' ');
|
||||||
|
let encodedData = ',' + newText;
|
||||||
|
if (encodedData.length < DATA.length) {
|
||||||
|
DATA = encodedData;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return '"data:' + MIME + DATA + '"';
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -116,7 +116,7 @@ function submitAllStats(productJson, commit) {
|
|||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
appInsights.defaultClient.trackEvent({
|
appInsights.defaultClient.trackEvent({
|
||||||
name: 'monacoworkbench/packagemetrics',
|
name: `${productJson.quality !== 'stable' ? 'adsworkbench' : 'monacoworkbench'}/packagemetrics`,
|
||||||
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
|
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
|
||||||
});
|
});
|
||||||
appInsights.defaultClient.flush({
|
appInsights.defaultClient.flush({
|
||||||
|
|||||||
@@ -126,7 +126,7 @@ export function submitAllStats(productJson: any, commit: string): Promise<boolea
|
|||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
appInsights.defaultClient.trackEvent({
|
appInsights.defaultClient.trackEvent({
|
||||||
name: 'monacoworkbench/packagemetrics',
|
name: `${productJson.quality !== 'stable' ? 'adsworkbench' : 'monacoworkbench'}/packagemetrics`, // {{SQL CARBON EDIT}}
|
||||||
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
|
properties: { commit, size: JSON.stringify(sizes), count: JSON.stringify(counts) }
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -25,17 +25,17 @@ function toStringShakeLevel(shakeLevel) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
exports.toStringShakeLevel = toStringShakeLevel;
|
exports.toStringShakeLevel = toStringShakeLevel;
|
||||||
function printDiagnostics(diagnostics) {
|
function printDiagnostics(options, diagnostics) {
|
||||||
for (const diag of diagnostics) {
|
for (const diag of diagnostics) {
|
||||||
let result = '';
|
let result = '';
|
||||||
if (diag.file) {
|
if (diag.file) {
|
||||||
result += `${diag.file.fileName}: `;
|
result += `${path.join(options.sourcesRoot, diag.file.fileName)}`;
|
||||||
}
|
}
|
||||||
if (diag.file && diag.start) {
|
if (diag.file && diag.start) {
|
||||||
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
|
let location = diag.file.getLineAndCharacterOfPosition(diag.start);
|
||||||
result += `- ${location.line + 1},${location.character} - `;
|
result += `:${location.line + 1}:${location.character}`;
|
||||||
}
|
}
|
||||||
result += JSON.stringify(diag.messageText);
|
result += ` - ` + JSON.stringify(diag.messageText);
|
||||||
console.log(result);
|
console.log(result);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -44,17 +44,17 @@ function shake(options) {
|
|||||||
const program = languageService.getProgram();
|
const program = languageService.getProgram();
|
||||||
const globalDiagnostics = program.getGlobalDiagnostics();
|
const globalDiagnostics = program.getGlobalDiagnostics();
|
||||||
if (globalDiagnostics.length > 0) {
|
if (globalDiagnostics.length > 0) {
|
||||||
printDiagnostics(globalDiagnostics);
|
printDiagnostics(options, globalDiagnostics);
|
||||||
throw new Error(`Compilation Errors encountered.`);
|
throw new Error(`Compilation Errors encountered.`);
|
||||||
}
|
}
|
||||||
const syntacticDiagnostics = program.getSyntacticDiagnostics();
|
const syntacticDiagnostics = program.getSyntacticDiagnostics();
|
||||||
if (syntacticDiagnostics.length > 0) {
|
if (syntacticDiagnostics.length > 0) {
|
||||||
printDiagnostics(syntacticDiagnostics);
|
printDiagnostics(options, syntacticDiagnostics);
|
||||||
throw new Error(`Compilation Errors encountered.`);
|
throw new Error(`Compilation Errors encountered.`);
|
||||||
}
|
}
|
||||||
const semanticDiagnostics = program.getSemanticDiagnostics();
|
const semanticDiagnostics = program.getSemanticDiagnostics();
|
||||||
if (semanticDiagnostics.length > 0) {
|
if (semanticDiagnostics.length > 0) {
|
||||||
printDiagnostics(semanticDiagnostics);
|
printDiagnostics(options, semanticDiagnostics);
|
||||||
throw new Error(`Compilation Errors encountered.`);
|
throw new Error(`Compilation Errors encountered.`);
|
||||||
}
|
}
|
||||||
markNodes(languageService, options);
|
markNodes(languageService, options);
|
||||||
@@ -358,7 +358,7 @@ function markNodes(languageService, options) {
|
|||||||
++step;
|
++step;
|
||||||
let node;
|
let node;
|
||||||
if (step % 100 === 0) {
|
if (step % 100 === 0) {
|
||||||
console.log(`${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
|
console.log(`Treeshaking - ${Math.floor(100 * step / (step + black_queue.length + gray_queue.length))}% - ${step}/${step + black_queue.length + gray_queue.length} (${black_queue.length}, ${gray_queue.length})`);
|
||||||
}
|
}
|
||||||
if (black_queue.length === 0) {
|
if (black_queue.length === 0) {
|
||||||
for (let i = 0; i < gray_queue.length; i++) {
|
for (let i = 0; i < gray_queue.length; i++) {
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user