mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-17 11:01:37 -05:00
Compare commits
791 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
83544eba25 | ||
|
|
fd5e9c087a | ||
|
|
8f6f84178c | ||
|
|
e57eb999fc | ||
|
|
99af31fa1f | ||
|
|
e2990db33e | ||
|
|
b2238dc37b | ||
|
|
6770bf877e | ||
|
|
ba66305bb5 | ||
|
|
8e731c75c3 | ||
|
|
db0409569a | ||
|
|
fb4ab6b501 | ||
|
|
bd1d96a57a | ||
|
|
6fe49e8cef | ||
|
|
6da98ed439 | ||
|
|
5ef69f5133 | ||
|
|
ebf47df35e | ||
|
|
2a93d7c5e9 | ||
|
|
573c5b4470 | ||
|
|
41838b7720 | ||
|
|
ee1359fd7c | ||
|
|
189bcf703e | ||
|
|
ddfdd43fc3 | ||
|
|
648287fa85 | ||
|
|
de27f2c2a5 | ||
|
|
8ff0ee59c5 | ||
|
|
a5a8c9761e | ||
|
|
89554e5c14 | ||
|
|
5618799305 | ||
|
|
32a89676f1 | ||
|
|
461dd79bc2 | ||
|
|
7ef3f003dd | ||
|
|
0c1edc1aeb | ||
|
|
2dfd75352f | ||
|
|
136a1d8cee | ||
|
|
5fd29d5ad8 | ||
|
|
64929de09d | ||
|
|
fefe1454de | ||
|
|
0eaee18dc4 | ||
|
|
28fff9ace8 | ||
|
|
a41073adbf | ||
|
|
379eae22ed | ||
|
|
f49fe59e1f | ||
|
|
9e61c468d1 | ||
|
|
7e0c7e35a1 | ||
|
|
b02bf17d1f | ||
|
|
510b84df3c | ||
|
|
2eebcab74a | ||
|
|
5ccc0fd97b | ||
|
|
76967d9467 | ||
|
|
eb0ee387ec | ||
|
|
bd7aac8ee0 | ||
|
|
977111eb21 | ||
|
|
5ccde42213 | ||
|
|
8b9aeb8679 | ||
|
|
308dfee963 | ||
|
|
f22dacadbe | ||
|
|
13d4ab22b1 | ||
|
|
968d570073 | ||
|
|
638eb151dd | ||
|
|
52e3c2f9c7 | ||
|
|
d517cf0c8c | ||
|
|
9acd757f6a | ||
|
|
167a9f991a | ||
|
|
5cebfb3013 | ||
|
|
4e5d001a9c | ||
|
|
09b578a169 | ||
|
|
d3105beb43 | ||
|
|
74c2d16660 | ||
|
|
c09e7b9e2f | ||
|
|
88126c68e7 | ||
|
|
455de9add6 | ||
|
|
2f6c944317 | ||
|
|
0059ffb777 | ||
|
|
12ba85ec33 | ||
|
|
c161f8d8ad | ||
|
|
206738db63 | ||
|
|
7ecb6b4427 | ||
|
|
0c47a4486b | ||
|
|
c79929fdd2 | ||
|
|
61cbc5eff0 | ||
|
|
fa4ad6cdb9 | ||
|
|
1b26dc0558 | ||
|
|
833313c72b | ||
|
|
8a0824c1c6 | ||
|
|
2d2376a2a6 | ||
|
|
774151b6e9 | ||
|
|
041f34beda | ||
|
|
b1526603cc | ||
|
|
6070922579 | ||
|
|
8cdb5b36a9 | ||
|
|
ad68fddbc9 | ||
|
|
dd8b12f805 | ||
|
|
c5d3251bb5 | ||
|
|
48b6dd796b | ||
|
|
362af3c153 | ||
|
|
fa2fcf2e40 | ||
|
|
8529b52e73 | ||
|
|
ef5ca7bc3a | ||
|
|
5b34dd2eee | ||
|
|
d647dbde09 | ||
|
|
d2e367c139 | ||
|
|
c3ddbba997 | ||
|
|
51ab435e5f | ||
|
|
4a8609c3ef | ||
|
|
03df1edcf8 | ||
|
|
5bd983e7a3 | ||
|
|
4293d53e79 | ||
|
|
ce10c3ac3f | ||
|
|
74caccdfe6 | ||
|
|
778a34a9d2 | ||
|
|
102d820935 | ||
|
|
4a4e6dd90c | ||
|
|
82a08d108b | ||
|
|
099916bf19 | ||
|
|
48dcb7258e | ||
|
|
e181cf2fcd | ||
|
|
746b4d7815 | ||
|
|
a67e3abb4c | ||
|
|
30d9e9c141 | ||
|
|
0fd870d156 | ||
|
|
768d333b35 | ||
|
|
2058682743 | ||
|
|
1c0f7c6702 | ||
|
|
9e0ba700c1 | ||
|
|
ea5f9be441 | ||
|
|
6b5c31410d | ||
|
|
8e4d0ad446 | ||
|
|
0a6dc2720d | ||
|
|
ef8c0e91e6 | ||
|
|
2f6c764869 | ||
|
|
73b9849dd5 | ||
|
|
64e217eac2 | ||
|
|
826e069b63 | ||
|
|
6e8a08c27a | ||
|
|
5a01218691 | ||
|
|
f45374bc30 | ||
|
|
ecbd8fb82c | ||
|
|
a42407b81a | ||
|
|
a622e4ac40 | ||
|
|
6816f2dabb | ||
|
|
a0c3f8f614 | ||
|
|
01b9754c80 | ||
|
|
cd8f26ae9d | ||
|
|
b650168704 | ||
|
|
9587edd867 | ||
|
|
642920504a | ||
|
|
67abc2f690 | ||
|
|
5bf85a2855 | ||
|
|
829717f5de | ||
|
|
866bfc61ba | ||
|
|
6638db1f35 | ||
|
|
f0dde491be | ||
|
|
994a2382ad | ||
|
|
856fec4243 | ||
|
|
d1c594cfd0 | ||
|
|
86da9852ca | ||
|
|
4ba6a979ba | ||
|
|
82974a2135 | ||
|
|
585c18ef4d | ||
|
|
7ec516d851 | ||
|
|
7e970d04ca | ||
|
|
808ce4366d | ||
|
|
8271226487 | ||
|
|
bbb7a67bd2 | ||
|
|
698b4fce41 | ||
|
|
302e8305ef | ||
|
|
659f392196 | ||
|
|
775a25d944 | ||
|
|
bceb766d28 | ||
|
|
36fd618ed4 | ||
|
|
99c473cdf6 | ||
|
|
88b55d0e06 | ||
|
|
184d4bbe27 | ||
|
|
4fc6f4a13e | ||
|
|
adad11c725 | ||
|
|
a1b5af0445 | ||
|
|
db4f512991 | ||
|
|
42ff30515c | ||
|
|
62565e0577 | ||
|
|
de177c0335 | ||
|
|
d614116b63 | ||
|
|
a7ff238653 | ||
|
|
6fb120f5dd | ||
|
|
6e8cc3aaca | ||
|
|
18ab73cc1d | ||
|
|
d1c7370f1c | ||
|
|
794f7a14c0 | ||
|
|
7cd2a6d6aa | ||
|
|
febf6b9e70 | ||
|
|
7201025a15 | ||
|
|
4787d7ba5c | ||
|
|
3de95af25c | ||
|
|
0bf4790a64 | ||
|
|
0d9353d99e | ||
|
|
a898c46e74 | ||
|
|
493e7087cf | ||
|
|
f5ce7fb2a5 | ||
|
|
a8818ab0df | ||
|
|
9691fab917 | ||
|
|
a7f5741608 | ||
|
|
c34869c243 | ||
|
|
ab94b9785e | ||
|
|
ee94524ab1 | ||
|
|
6cce532ca4 | ||
|
|
7f16a4d857 | ||
|
|
255ea1945b | ||
|
|
b38b53b658 | ||
|
|
82c60a23c0 | ||
|
|
c93ea20b75 | ||
|
|
6a4e4fc07b | ||
|
|
d358cdac1e | ||
|
|
4f8ced1f6b | ||
|
|
8cc60fde90 | ||
|
|
b8bc629970 | ||
|
|
9a83dfc022 | ||
|
|
d5e26527a6 | ||
|
|
c9226a07c5 | ||
|
|
d451528b36 | ||
|
|
48b2cbb0bf | ||
|
|
39e6b9933d | ||
|
|
d08fb1aee2 | ||
|
|
5235a1d029 | ||
|
|
3135b8525b | ||
|
|
0e9797c394 | ||
|
|
4145ecfb32 | ||
|
|
20e9b329b1 | ||
|
|
d1ccbf028f | ||
|
|
a1fc621e1b | ||
|
|
9ef6bec960 | ||
|
|
b631530753 | ||
|
|
d9997cebfc | ||
|
|
ffee69a765 | ||
|
|
7a38943412 | ||
|
|
c970173fc0 | ||
|
|
0979ce8de6 | ||
|
|
878bcc0d92 | ||
|
|
68b2f1a8e4 | ||
|
|
8cd06f74b9 | ||
|
|
43387f0d0b | ||
|
|
f3a6fc6f88 | ||
|
|
f2bc367e78 | ||
|
|
46a8410fc5 | ||
|
|
be7c26ede5 | ||
|
|
fb3b7be9e5 | ||
|
|
78731e0c8c | ||
|
|
6b67f27cac | ||
|
|
56182a53d1 | ||
|
|
66e1c01793 | ||
|
|
f0039a64a7 | ||
|
|
22501a09a1 | ||
|
|
c03cce7f60 | ||
|
|
98abf4a758 | ||
|
|
c19bc54877 | ||
|
|
e17d4e96ae | ||
|
|
b333788c3c | ||
|
|
632ca0685e | ||
|
|
52de2b4751 | ||
|
|
fc0c05c755 | ||
|
|
22b8ebd281 | ||
|
|
ec91d3eda0 | ||
|
|
7b31ee27d8 | ||
|
|
927120fa3b | ||
|
|
f1ca2a35ef | ||
|
|
1760af13d1 | ||
|
|
183cb84fbc | ||
|
|
019d5088ec | ||
|
|
15913e5e48 | ||
|
|
e6ffb97a7b | ||
|
|
8655044dfb | ||
|
|
f26c790736 | ||
|
|
7e553031ce | ||
|
|
164ec41fb1 | ||
|
|
8cd9097526 | ||
|
|
134b0b32c6 | ||
|
|
de4b7af1ad | ||
|
|
f976fc9418 | ||
|
|
55059907a3 | ||
|
|
8ca0082ec4 | ||
|
|
5b50696a1b | ||
|
|
840683e3f0 | ||
|
|
ee5dbdffb9 | ||
|
|
43f6a5576d | ||
|
|
8a44de27e7 | ||
|
|
fa9bbd4e1e | ||
|
|
66048f1d63 | ||
|
|
bafd9fd437 | ||
|
|
dae71c3bf4 | ||
|
|
ae8304fc33 | ||
|
|
d6ef42c8b0 | ||
|
|
131b0b93bf | ||
|
|
82185f75d7 | ||
|
|
3769b5066f | ||
|
|
ba8c331356 | ||
|
|
80248846bb | ||
|
|
cf5297958a | ||
|
|
22996cbce7 | ||
|
|
7563416754 | ||
|
|
02b1673c71 | ||
|
|
6ef87d7067 | ||
|
|
e3921c6d14 | ||
|
|
f9ef9d85f4 | ||
|
|
7a2c30e159 | ||
|
|
6438967202 | ||
|
|
63bf82ad84 | ||
|
|
18ab2ae799 | ||
|
|
86d6295bf0 | ||
|
|
3f306d2396 | ||
|
|
2f1f5b2376 | ||
|
|
62d7c71093 | ||
|
|
4f69ed5745 | ||
|
|
ddddf3beb4 | ||
|
|
0ae525cbd5 | ||
|
|
0520870754 | ||
|
|
8b17b77010 | ||
|
|
e2ef1f8a89 | ||
|
|
7f7052ad42 | ||
|
|
738ca479e4 | ||
|
|
34a274a7d1 | ||
|
|
b1496aa12f | ||
|
|
30acba7921 | ||
|
|
b5c0c37a23 | ||
|
|
ef0a92d83f | ||
|
|
b364e32beb | ||
|
|
7f51921176 | ||
|
|
efebd681b6 | ||
|
|
d635390b33 | ||
|
|
61f0d614ce | ||
|
|
27b80804f5 | ||
|
|
df0c505452 | ||
|
|
564f78b6f6 | ||
|
|
df6b6ded33 | ||
|
|
e801a04bcf | ||
|
|
3b1eaca58e | ||
|
|
22a427f934 | ||
|
|
4645a8ba6b | ||
|
|
1b88c10197 | ||
|
|
5a392dfd58 | ||
|
|
e49ff93122 | ||
|
|
399788ccc1 | ||
|
|
08fde8719d | ||
|
|
f7bef3f87b | ||
|
|
c70e7794eb | ||
|
|
9eb438bb24 | ||
|
|
38decaea90 | ||
|
|
ade68b184d | ||
|
|
3c702c15e2 | ||
|
|
76e8805a6b | ||
|
|
5dc7049f8c | ||
|
|
97f852c3d6 | ||
|
|
a8eed6114b | ||
|
|
6864d39f85 | ||
|
|
6f47c1fcda | ||
|
|
c2c64293f5 | ||
|
|
5bbc17be5c | ||
|
|
abbb1e54da | ||
|
|
08d81927b4 | ||
|
|
3d718068d1 | ||
|
|
1d31a6ef98 | ||
|
|
9c8f36e463 | ||
|
|
bd988f62a2 | ||
|
|
29a46b9f8b | ||
|
|
55acb36e33 | ||
|
|
a8b442a274 | ||
|
|
330f690628 | ||
|
|
7cc430d199 | ||
|
|
2558d6bff6 | ||
|
|
5aff7ef4c7 | ||
|
|
7569f7fa32 | ||
|
|
333f634e94 | ||
|
|
b62c0cf2ab | ||
|
|
572a83dac7 | ||
|
|
dbf834c00f | ||
|
|
24a6897836 | ||
|
|
23da6155dd | ||
|
|
ef9321ef2c | ||
|
|
836bf1d28a | ||
|
|
9eb319f392 | ||
|
|
79b6a14d64 | ||
|
|
0dec2ff9b5 | ||
|
|
dd270a78fc | ||
|
|
827e6162c7 | ||
|
|
9f54fbc8cc | ||
|
|
f8858a3511 | ||
|
|
82e5221024 | ||
|
|
004297aea6 | ||
|
|
f7b8a019cd | ||
|
|
a89788a020 | ||
|
|
62af81e88c | ||
|
|
ab736466cd | ||
|
|
6a6f30523c | ||
|
|
8e3fa0a26d | ||
|
|
72c088e137 | ||
|
|
ce5eb00177 | ||
|
|
5629356c66 | ||
|
|
6e7311ca87 | ||
|
|
d315ccff68 | ||
|
|
789ee4b133 | ||
|
|
428745e929 | ||
|
|
cea8d62051 | ||
|
|
fa79e5b016 | ||
|
|
15fd37e049 | ||
|
|
1dd4ea19a3 | ||
|
|
067af76904 | ||
|
|
a7f597c943 | ||
|
|
833adf3515 | ||
|
|
bd15a96b83 | ||
|
|
024bd00d93 | ||
|
|
eb1aafa639 | ||
|
|
bf6b68c614 | ||
|
|
a895f53029 | ||
|
|
78d3b9d555 | ||
|
|
42e1b28130 | ||
|
|
5590d60c5a | ||
|
|
d79423c728 | ||
|
|
d1a0ae43c8 | ||
|
|
e4b0371b2a | ||
|
|
e191556d3b | ||
|
|
5a269fc49a | ||
|
|
613cd58aa3 | ||
|
|
af9984f73b | ||
|
|
3b1c9e910d | ||
|
|
5b29aef5f3 | ||
|
|
b65a7795df | ||
|
|
c6a78456b8 | ||
|
|
f8067ffada | ||
|
|
f7059a2365 | ||
|
|
d013b594b1 | ||
|
|
c5d427ebb1 | ||
|
|
240b90610f | ||
|
|
5cfad825fc | ||
|
|
8918d1593c | ||
|
|
b1e0b7c1e3 | ||
|
|
3a5e4cbeac | ||
|
|
7bfa6e611e | ||
|
|
004c177f7b | ||
|
|
86cc3f77ee | ||
|
|
a33820ecdd | ||
|
|
7babd6f3d0 | ||
|
|
e28ecf44cb | ||
|
|
93685d3a09 | ||
|
|
d660405e73 | ||
|
|
684fb2566b | ||
|
|
696f6841cb | ||
|
|
cef60f3ae5 | ||
|
|
34fe2b44cc | ||
|
|
a16bfbfedd | ||
|
|
fb4fccf2d5 | ||
|
|
b53cad78bd | ||
|
|
a431ca7ef2 | ||
|
|
c2022cac57 | ||
|
|
806d807eae | ||
|
|
24e3b1c5e6 | ||
|
|
bbe3605317 | ||
|
|
06d67f5ad2 | ||
|
|
41f9f22e38 | ||
|
|
a94cbb528e | ||
|
|
4a68ab4659 | ||
|
|
4c24043cc8 | ||
|
|
2ca5d18855 | ||
|
|
37426b0794 | ||
|
|
a70ebeed1c | ||
|
|
8f2113e6b5 | ||
|
|
03cb0565d4 | ||
|
|
dd14f9b93d | ||
|
|
4dd15fb479 | ||
|
|
397f6afaf1 | ||
|
|
65fb77ef5c | ||
|
|
1e22f47304 | ||
|
|
7c9be74970 | ||
|
|
5efb2cf918 | ||
|
|
98505110a4 | ||
|
|
1a864584b6 | ||
|
|
8aa8dc29a1 | ||
|
|
06e86e57e7 | ||
|
|
6a375fdd8c | ||
|
|
b8ad7e3072 | ||
|
|
597a0cad6b | ||
|
|
a646af2ad2 | ||
|
|
143b70c6a8 | ||
|
|
c1e95a2246 | ||
|
|
53a081262d | ||
|
|
8b46143d48 | ||
|
|
a05edc619c | ||
|
|
6c5aa6b367 | ||
|
|
0246c3f895 | ||
|
|
e3ae5263c6 | ||
|
|
0bfb1aab7e | ||
|
|
d120102805 | ||
|
|
7e5b864299 | ||
|
|
4dd6db57ee | ||
|
|
a2f105a913 | ||
|
|
ab31a7b964 | ||
|
|
203ff3872f | ||
|
|
2ee3840650 | ||
|
|
fa80dbfb27 | ||
|
|
2237d286b6 | ||
|
|
4124f6b1ad | ||
|
|
23361d3d56 | ||
|
|
4c8f3ddfd3 | ||
|
|
f631a8aa9a | ||
|
|
bbc6460d3f | ||
|
|
09d78544cf | ||
|
|
a791aff0a2 | ||
|
|
ce318f123f | ||
|
|
9374056e61 | ||
|
|
5ef19affd0 | ||
|
|
d40abf4add | ||
|
|
86f8b3f9ec | ||
|
|
385d7f2803 | ||
|
|
2ee04e0cf0 | ||
|
|
9bdaba3b65 | ||
|
|
80d46fb8a4 | ||
|
|
268f9ef725 | ||
|
|
3813d9385b | ||
|
|
48bf72bfc4 | ||
|
|
fa1d5cc49d | ||
|
|
de5fd11155 | ||
|
|
cd30a8cbc0 | ||
|
|
ec1e54db9a | ||
|
|
82963ad075 | ||
|
|
b24671bbf6 | ||
|
|
2ab7a47353 | ||
|
|
5d4da455bd | ||
|
|
0b039830ea | ||
|
|
77e1ca59ed | ||
|
|
e8e8ee5941 | ||
|
|
23861bd369 | ||
|
|
4c946b21a9 | ||
|
|
e74538b40d | ||
|
|
b6ef5469de | ||
|
|
ee2850f2e2 | ||
|
|
3387a762c4 | ||
|
|
75388cc3af | ||
|
|
087f7fc43d | ||
|
|
5da0e16e44 | ||
|
|
eb465fde1a | ||
|
|
26ece1ee86 | ||
|
|
f18b65a690 | ||
|
|
6851b2091f | ||
|
|
74396c1558 | ||
|
|
d02c680dab | ||
|
|
888327f5bc | ||
|
|
2623e7da88 | ||
|
|
0d2a3bc2d7 | ||
|
|
5eeaa5710c | ||
|
|
92e1f83046 | ||
|
|
9a3f72591e | ||
|
|
12d824d791 | ||
|
|
613fef5e73 | ||
|
|
1d4babefba | ||
|
|
6e9e81e3a1 | ||
|
|
c292561eb1 | ||
|
|
248464191d | ||
|
|
f1cdfb768d | ||
|
|
777c188a3f | ||
|
|
bf00a6b695 | ||
|
|
b58927fea1 | ||
|
|
2aa7a145d4 | ||
|
|
4d618c5ef1 | ||
|
|
543e3e2c09 | ||
|
|
93c9426f25 | ||
|
|
d4feb903b0 | ||
|
|
5a1183a457 | ||
|
|
22774f28c0 | ||
|
|
1936e0dbbd | ||
|
|
df6e86554c | ||
|
|
33218bb0e5 | ||
|
|
f475c04ce3 | ||
|
|
0788796f1a | ||
|
|
3a01f960a7 | ||
|
|
d37105ada2 | ||
|
|
66fda57513 | ||
|
|
2fe82e4b2f | ||
|
|
eee7e52bd4 | ||
|
|
ecd76eb870 | ||
|
|
bcaa09e910 | ||
|
|
32df727ff9 | ||
|
|
e1bfe6cdda | ||
|
|
e4e71af597 | ||
|
|
749ddc30c7 | ||
|
|
c4965c7fe9 | ||
|
|
de72ab176c | ||
|
|
b453c3a48e | ||
|
|
857c658888 | ||
|
|
d91488da62 | ||
|
|
16fbd4abfd | ||
|
|
285f8bc28c | ||
|
|
3fb4877859 | ||
|
|
e25cbdf4b9 | ||
|
|
b1db9a8cf1 | ||
|
|
f418104b7a | ||
|
|
5454917569 | ||
|
|
effa50a9bd | ||
|
|
ac87346507 | ||
|
|
cacbcb5415 | ||
|
|
0b2a2ad0ed | ||
|
|
44bc7a89df | ||
|
|
bcb5384639 | ||
|
|
b23e577ccc | ||
|
|
96a28f2c4d | ||
|
|
826c4115a7 | ||
|
|
c95ea16a44 | ||
|
|
8e1a2248e4 | ||
|
|
6b29fd05bd | ||
|
|
080d9bbaa6 | ||
|
|
263d342a79 | ||
|
|
0d2dcb3d25 | ||
|
|
9dd35c8c0d | ||
|
|
e85f93abec | ||
|
|
18c12dac9a | ||
|
|
a0f1d68cfb | ||
|
|
37f651fe08 | ||
|
|
d2e4e94aec | ||
|
|
cf1a09aeaf | ||
|
|
32897d3e07 | ||
|
|
3f2a728ed0 | ||
|
|
af24a9d002 | ||
|
|
6582debd73 | ||
|
|
575d1c8543 | ||
|
|
08b78c3ca5 | ||
|
|
e0a867a184 | ||
|
|
2b8508574d | ||
|
|
b8976785fd | ||
|
|
79e2c56ec8 | ||
|
|
1ea09c7add | ||
|
|
7489a65bbe | ||
|
|
faabdb8d88 | ||
|
|
57c5d98bdc | ||
|
|
b5c249c25d | ||
|
|
60a244888d | ||
|
|
c4dfc5cf70 | ||
|
|
4c2ffdfc68 | ||
|
|
704c5174f9 | ||
|
|
b708b4a42b | ||
|
|
ee98ce5c18 | ||
|
|
7162272f1e | ||
|
|
faee6b45e0 | ||
|
|
aef69ab12a | ||
|
|
a712426185 | ||
|
|
19be0d0ff3 | ||
|
|
3202e46930 | ||
|
|
5b95d6777f | ||
|
|
084524cd2d | ||
|
|
6ab03053a0 | ||
|
|
9f065b2b5a | ||
|
|
5327ed84c1 | ||
|
|
4018a29a16 | ||
|
|
7cbc268c52 | ||
|
|
bca7c8e6bd | ||
|
|
d0fb6de390 | ||
|
|
63f3d9862f | ||
|
|
00f8dcb23e | ||
|
|
bc4165037c | ||
|
|
07109617b5 | ||
|
|
6385443a4c | ||
|
|
6ef415d0e6 | ||
|
|
ba8ba9f68d | ||
|
|
b30252021b | ||
|
|
db57171ece | ||
|
|
3688e9981d | ||
|
|
b4de26a801 | ||
|
|
2a15768a25 | ||
|
|
f971417746 | ||
|
|
33854d42e4 | ||
|
|
2d9f6dcd86 | ||
|
|
e3c347e148 | ||
|
|
f7c468d6f0 | ||
|
|
e6cac8cc14 | ||
|
|
79d0239362 | ||
|
|
a0e31fc723 | ||
|
|
6a6048d40f | ||
|
|
a29ae4d3b9 | ||
|
|
82b19614e1 | ||
|
|
49851daf0d | ||
|
|
d815ae0e83 | ||
|
|
cb50fae12d | ||
|
|
26072af82f | ||
|
|
89c1c4897a | ||
|
|
5e5563f974 | ||
|
|
5df68e5942 | ||
|
|
d895de0bc1 | ||
|
|
2ec4a0c8a8 | ||
|
|
5e3ec6ea39 | ||
|
|
6f06ab440a | ||
|
|
c3bb7a66e0 | ||
|
|
aadc871124 | ||
|
|
cb2cea4ebd | ||
|
|
6125e68c1f | ||
|
|
71b80e0817 | ||
|
|
ac6a4e590d | ||
|
|
1f61a2581c | ||
|
|
bf23a52ba4 | ||
|
|
db498db0a8 | ||
|
|
9d3d64eef3 | ||
|
|
e694e0273b | ||
|
|
ced882a2e5 | ||
|
|
754c643b1b | ||
|
|
6a136854b0 | ||
|
|
a584aca969 | ||
|
|
b3fbb29bf2 | ||
|
|
b7299e5eec | ||
|
|
510c45b9b7 | ||
|
|
aad9c0f965 | ||
|
|
373828d76f | ||
|
|
86a9a2c069 | ||
|
|
6e7e6ee434 | ||
|
|
6af544afde | ||
|
|
dca21bd3be | ||
|
|
9b82b101cd | ||
|
|
47a14bbbff | ||
|
|
094d6f2339 | ||
|
|
290dd9531f | ||
|
|
141226332c | ||
|
|
7e0a5205b2 | ||
|
|
c6c863cd84 | ||
|
|
28d453fced | ||
|
|
4d62983680 | ||
|
|
d3ea9c3168 | ||
|
|
603a79d094 | ||
|
|
16481927e8 | ||
|
|
6f06c18014 | ||
|
|
7868afb4fd | ||
|
|
66d4d5c73f | ||
|
|
075479274d | ||
|
|
bae797f975 | ||
|
|
ea0f9e6ce9 | ||
|
|
fa6c52699e | ||
|
|
209d7e48d8 | ||
|
|
0bd3e1b0e1 | ||
|
|
9229b26b9e | ||
|
|
29dbce079b | ||
|
|
86df538db9 | ||
|
|
d9c5b7ea9e | ||
|
|
c9128d56c0 | ||
|
|
888755e842 | ||
|
|
3ac096b3b1 | ||
|
|
7ebd1eb053 | ||
|
|
2128851bdf | ||
|
|
823d136a00 | ||
|
|
a67e62b2d0 | ||
|
|
d262ea21e3 | ||
|
|
aaf115a5c8 | ||
|
|
206c5146e1 | ||
|
|
abe917f3c1 | ||
|
|
0793e11b04 | ||
|
|
9df66deb81 | ||
|
|
9765b0ed8e | ||
|
|
83c9c3f618 | ||
|
|
dd5dd12ee6 | ||
|
|
b68fd91a02 | ||
|
|
4270547147 | ||
|
|
c4b90360a5 | ||
|
|
7d49e75e46 | ||
|
|
8db5bd438e | ||
|
|
f6f18b68b5 | ||
|
|
ab8a9509b8 | ||
|
|
4dda5ee549 | ||
|
|
5ae8017233 | ||
|
|
66cdbbb335 | ||
|
|
3e9b694e6f | ||
|
|
b1eef13bb0 | ||
|
|
82f93f7da5 | ||
|
|
119008d05d | ||
|
|
aeaac4bc17 | ||
|
|
789e26ae60 | ||
|
|
b813ace79c | ||
|
|
02f497712d | ||
|
|
fda4ba81c3 | ||
|
|
e7a9d34ecd | ||
|
|
485cb43a34 | ||
|
|
3281d28de7 | ||
|
|
856833dbc4 | ||
|
|
15f1945f31 | ||
|
|
b1f29a8c92 | ||
|
|
cbbd4ffbb6 | ||
|
|
e2ea397fb9 | ||
|
|
6a4c9b4108 | ||
|
|
a61c86bff5 | ||
|
|
35b09542e2 | ||
|
|
a57536be4b | ||
|
|
fbb2accacb | ||
|
|
0a393400b2 | ||
|
|
8b5ce753e4 | ||
|
|
f05b9396e8 | ||
|
|
6670fe8c1c | ||
|
|
b8518f5795 | ||
|
|
c94291af52 | ||
|
|
1d6f48806e | ||
|
|
4626f37671 | ||
|
|
33a7fe38e1 |
16
.eslintignore
Normal file
16
.eslintignore
Normal file
@@ -0,0 +1,16 @@
|
||||
**/vs/nls.build.js
|
||||
**/vs/nls.js
|
||||
**/vs/css.build.js
|
||||
**/vs/css.js
|
||||
**/vs/loader.js
|
||||
**/promise-polyfill/**
|
||||
**/insane/**
|
||||
**/marked/**
|
||||
**/test/**/*.js
|
||||
**/node_modules/**
|
||||
**/vscode-api-tests/testWorkspace/**
|
||||
**/vscode-api-tests/testWorkspace2/**
|
||||
**/extensions/**/out/**
|
||||
**/extensions/**/build/**
|
||||
**/extensions/markdown-language-features/media/**
|
||||
**/extensions/typescript-basics/test/colorize-fixtures/**
|
||||
289
.eslintrc.json
289
.eslintrc.json
@@ -1,20 +1,273 @@
|
||||
{
|
||||
"root": true,
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true
|
||||
},
|
||||
"rules": {
|
||||
"no-console": 0,
|
||||
"no-cond-assign": 0,
|
||||
"no-unused-vars": 1,
|
||||
"no-extra-semi": "warn",
|
||||
"semi": "warn"
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaFeatures": {
|
||||
"experimentalObjectRestSpread": true
|
||||
}
|
||||
}
|
||||
"root": true,
|
||||
"parser": "@typescript-eslint/parser",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 6,
|
||||
"sourceType": "module"
|
||||
},
|
||||
"plugins": [
|
||||
"@typescript-eslint",
|
||||
"jsdoc"
|
||||
],
|
||||
"rules": {
|
||||
"constructor-super": "warn",
|
||||
"curly": "warn",
|
||||
"eqeqeq": "warn",
|
||||
"no-buffer-constructor": "warn",
|
||||
"no-caller": "warn",
|
||||
"no-debugger": "warn",
|
||||
"no-duplicate-case": "warn",
|
||||
"no-duplicate-imports": "warn",
|
||||
"no-eval": "warn",
|
||||
"no-extra-semi": "warn",
|
||||
"no-new-wrappers": "warn",
|
||||
"no-redeclare": "off",
|
||||
"no-sparse-arrays": "warn",
|
||||
"no-throw-literal": "warn",
|
||||
"no-unsafe-finally": "warn",
|
||||
"no-unused-labels": "warn",
|
||||
"no-restricted-globals": ["warn", "name", "length", "event", "closed", "external", "status", "origin", "orientation"], // non-complete list of globals that are easy to access unintentionally
|
||||
"no-var": "warn",
|
||||
"jsdoc/no-types": "warn",
|
||||
"semi": "off",
|
||||
"@typescript-eslint/semi": "warn",
|
||||
"@typescript-eslint/class-name-casing": "warn",
|
||||
"code-no-unused-expressions": [
|
||||
"warn",
|
||||
{
|
||||
"allowTernary": true
|
||||
}
|
||||
],
|
||||
"code-translation-remind": "warn",
|
||||
"code-no-nls-in-standalone-editor": "warn",
|
||||
"code-no-standalone-editor": "warn",
|
||||
"code-no-unexternalized-strings": "warn",
|
||||
"code-layering": [
|
||||
"warn",
|
||||
{
|
||||
"common": ["browser"], // {{SQL CARBON EDIT}} @anthonydresser not ideal, but for our purposes its fine for now,
|
||||
"node": [
|
||||
"common"
|
||||
],
|
||||
"browser": [
|
||||
"common"
|
||||
],
|
||||
"electron-main": [
|
||||
"common",
|
||||
"node"
|
||||
],
|
||||
"electron-browser": [
|
||||
"common",
|
||||
"browser",
|
||||
"node"
|
||||
]
|
||||
}
|
||||
],
|
||||
"code-import-patterns": [
|
||||
"warn",
|
||||
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
// !!! Do not relax these rules !!!
|
||||
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
|
||||
{
|
||||
"target": "**/{vs,sql}/base/common/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/common/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/base/test/common/**",
|
||||
"restrictions": [
|
||||
"assert",
|
||||
"sinon",
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/common/**",
|
||||
"**/{vs,sql}/base/test/common/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/base/browser/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/{common,browser}/**",
|
||||
"@angular/*",
|
||||
"rxjs/*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/base/node/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/{common,browser,node}/**",
|
||||
"!path" // node modules (except path where we have our own impl)
|
||||
]
|
||||
},
|
||||
{
|
||||
// vs/base/test/browser contains tests for vs/base/browser
|
||||
"target": "**/{vs,sql}/base/test/browser/**",
|
||||
"restrictions": [
|
||||
"assert",
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/{common,browser}/**",
|
||||
"**/{vs,sql}/base/test/{common,browser}/**",
|
||||
"@angular/*",
|
||||
"rxjs/*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/base/parts/*/common/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/common/**",
|
||||
"**/{vs,sql}/base/parts/*/common/**"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/base/parts/*/browser/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/{common,browser}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser}/**",
|
||||
"@angular/*",
|
||||
"rxjs/*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/base/parts/*/node/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/{common,browser,node}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser,node}/**",
|
||||
"!path" // node modules (except path where we have our own impl)
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/base/parts/*/electron-browser/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/{common,browser,node,electron-browser}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser,node,electron-browser}/**",
|
||||
"!path", // node modules (except path where we have our own impl)
|
||||
"@angular/*",
|
||||
"rxjs/*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/base/parts/*/electron-main/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/{common,browser,node,electron-main}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser,node,electron-main}/**",
|
||||
"!path", // node modules (except path where we have our own impl)
|
||||
"@angular/*",
|
||||
"rxjs/*"
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/platform/*/common/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/common/**",
|
||||
"**/{vs,sql}/base/parts/*/common/**",
|
||||
"**/{vs,sql}/{platform,workbench}/**/common/**",
|
||||
"**/vs/editor/common/**",
|
||||
"azdata" // TODO remove
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/platform/*/test/common/**",
|
||||
"restrictions": [
|
||||
"assert",
|
||||
"sinon",
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/common/**",
|
||||
"**/{vs,sql}/{platform,workbench}/**/common/**",
|
||||
"**/{vs,sql}/{base,platform,workbench}/**/test/common/**",
|
||||
"typemoq",
|
||||
"azdata" // TODO remove
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/platform/*/browser/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/{common,browser}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser}/**",
|
||||
"**/{vs,sql}/{platform,workbench}/**/{common,browser}/**",
|
||||
"**/vs/editor/{common,browser}/**",
|
||||
"@angular/*",
|
||||
"rxjs/*",
|
||||
"azdata" // TODO remove
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/platform/*/node/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/{common,browser,node}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser,node}/**",
|
||||
"**/{vs,sql}/{platform,workbench}/**/{common,browser,node}/**",
|
||||
"**/vs/editor/{common,browser,node}/**",
|
||||
"!path", // node modules (except path where we have our own impl)
|
||||
"azdata" // TODO remove
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/platform/*/electron-browser/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"vs/css!./**/*",
|
||||
"**/{vs,sql}/base/{common,browser,node}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser,node,electron-browser}/**",
|
||||
"**/{vs,sql}/{platform,workbench}/**/{common,browser,node,electron-browser}/**",
|
||||
"**/vs/editor/{common,browser,node,electron-browser}/**",
|
||||
"!path", // node modules (except path where we have our own impl)
|
||||
"@angular/*",
|
||||
"rxjs/*",
|
||||
"azdata" // TODO remove
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/platform/*/electron-main/**",
|
||||
"restrictions": [
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/{common,browser,node}/**",
|
||||
"**/{vs,sql}/base/parts/*/{common,browser,node,electron-browser}/**",
|
||||
"**/{vs,sql}/{platform,workbench}/**/{common,browser,node,electron-main}/**",
|
||||
"**/vs/editor/{common,browser,node,electron-main}/**",
|
||||
"!path", // node modules (except path where we have our own impl)
|
||||
"azdata" // TODO remove
|
||||
]
|
||||
},
|
||||
{
|
||||
"target": "**/{vs,sql}/platform/*/test/browser/**",
|
||||
"restrictions": [
|
||||
"assert",
|
||||
"sinon",
|
||||
"vs/nls",
|
||||
"**/{vs,sql}/base/{common,browser}/**",
|
||||
"**/{vs,sql}/{platform,workbench}/**/{common,browser}/**",
|
||||
"**/{vs,sql}/{base,platform,workbench}/**/test/{common,browser}/**",
|
||||
"typemoq",
|
||||
"@angular/*",
|
||||
"rxjs/*",
|
||||
"azdata" // TODO remove
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"files": [
|
||||
"*.js"
|
||||
],
|
||||
"rules": {
|
||||
"jsdoc/no-types": "off"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
7
.eslintrc.sql.json
Normal file
7
.eslintrc.sql.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"extends": ".eslintrc.json",
|
||||
"rules": {
|
||||
"no-sync": "warn",
|
||||
"strict": ["warn", "never"]
|
||||
}
|
||||
}
|
||||
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -2,11 +2,11 @@
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: Bug
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!-- Please read our Rules of Conduct: https://opensource.microsoft.com/codeofconduct/ -->
|
||||
<!-- Please search existing issues to avoid creating duplicates. -->
|
||||
<!-- Also please test using the latest insiders build to make sure your issue has not already been fixed. -->
|
||||
|
||||
|
||||
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1 @@
|
||||
blank_issues_enabled: false
|
||||
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -2,7 +2,7 @@
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: Enhancement
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
73
.github/classifier.yml
vendored
73
.github/classifier.yml
vendored
@@ -1,49 +1,36 @@
|
||||
{
|
||||
perform: false,
|
||||
perform: true,
|
||||
alwaysRequireAssignee: false,
|
||||
labelsRequiringAssignee: [],
|
||||
defaultLabel: 'Triage: Needed',
|
||||
defaultAssignee: '',
|
||||
autoAssignees: {
|
||||
accessibility: [],
|
||||
acquisition: [],
|
||||
agent: [],
|
||||
azure: [],
|
||||
backup: [],
|
||||
bcdr: [],
|
||||
'chart viewer': [],
|
||||
connection: [],
|
||||
dacfx: [],
|
||||
dashboard: [],
|
||||
'data explorer': [],
|
||||
documentation: [],
|
||||
'edit data': [],
|
||||
export: [],
|
||||
extensibility: [],
|
||||
extensionManager: [],
|
||||
globalization: [],
|
||||
grid: [],
|
||||
import: [],
|
||||
insights: [],
|
||||
intellisense: [],
|
||||
localization: [],
|
||||
'managed instance': [],
|
||||
notebooks: [],
|
||||
'object explorer': [],
|
||||
performance: [],
|
||||
profiler: [],
|
||||
'query editor': [],
|
||||
'query execution': [],
|
||||
reliability: [],
|
||||
restore: [],
|
||||
scripting: [],
|
||||
'server group': [],
|
||||
settings: [],
|
||||
setup: [],
|
||||
shell: [],
|
||||
showplan: [],
|
||||
snippet: [],
|
||||
sql2019Preview: [],
|
||||
sqldw: [],
|
||||
supportability: [],
|
||||
ux: []
|
||||
Area - Acquisition: [],
|
||||
Area - Azure: [],
|
||||
Area - Backup\Restore: [],
|
||||
Area - Charting\Insights: [],
|
||||
Area - Connection: [ charles-gagnon ],
|
||||
Area - DacFX: [],
|
||||
Area - Dashboard: [],
|
||||
Area - Data Explorer: [],
|
||||
Area - Edit Data: [],
|
||||
Area - Extensibility: [],
|
||||
Area - External Table: [],
|
||||
Area - Fundamentals: [],
|
||||
Area - Language Service: [ charles-gagnon ],
|
||||
Area - Localization: [],
|
||||
Area - Notebooks: [ chlafreniere ],
|
||||
Area - Performance: [],
|
||||
Area - Query Editor: [ anthonydresser ],
|
||||
Area - Query Plan: [],
|
||||
Area - Reliability: [],
|
||||
Area - Resource Deployment: [],
|
||||
Area - Schema Compare: [],
|
||||
Area - Shell: [],
|
||||
Area - SQL Agent: [],
|
||||
Area - SQL Import: [],
|
||||
Area - SQL Profiler: [],
|
||||
Area - SQL 2019: [],
|
||||
Area - SSMS Integration: []
|
||||
}
|
||||
}
|
||||
|
||||
34
.github/feature-requests.yml
vendored
Normal file
34
.github/feature-requests.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
{
|
||||
typeLabel: {
|
||||
name: 'feature-request'
|
||||
},
|
||||
candidateMilestone: {
|
||||
number: 107,
|
||||
name: 'Backlog Candidates'
|
||||
},
|
||||
approvedMilestone: {
|
||||
number: 8,
|
||||
name: 'Backlog'
|
||||
},
|
||||
onLabeled: {
|
||||
delay: 60,
|
||||
perform: true
|
||||
},
|
||||
onCandidateMilestoned: {
|
||||
candidatesComment: "This feature request is now a candidate for our backlog. The community has 60 days to upvote the issue. If it receives 20 upvotes we will move it to our backlog. If not, we will close it. To learn more about how we handle feature requests, please see our [documentation](https://aka.ms/vscode-issue-lifecycle).\n\nHappy Coding!",
|
||||
perform: true
|
||||
},
|
||||
onMonitorUpvotes: {
|
||||
upvoteThreshold: 20,
|
||||
acceptanceComment: ":slightly_smiling_face: This feature request received a sufficient number of community upvotes and we moved it to our backlog. To learn more about how we handle feature requests, please see our [documentation](https://aka.ms/vscode-issue-lifecycle).\n\nHappy Coding!",
|
||||
perform: true
|
||||
},
|
||||
onMonitorDaysOnCandidateMilestone: {
|
||||
daysOnMilestone: 60,
|
||||
warningPeriod: 10,
|
||||
numberOfCommentsToPreventAutomaticRejection: 20,
|
||||
rejectionComment: ":slightly_frowning_face: In the last 60 days, this feature request has received less than 20 community upvotes and we closed it. Still a big Thank You to you for taking the time to create this issue! To learn more about how we handle feature requests, please see our [documentation](https://aka.ms/vscode-issue-lifecycle).\n\nHappy Coding!",
|
||||
warningComment: "This feature request has not yet received the 20 community upvotes it takes to make to our backlog. 10 days to go. To learn more about how we handle feature requests, please see our [documentation](https://aka.ms/vscode-issue-lifecycle).\n\nHappy Coding",
|
||||
perform: true
|
||||
}
|
||||
}
|
||||
2
.github/locker.yml
vendored
2
.github/locker.yml
vendored
@@ -1,6 +1,6 @@
|
||||
{
|
||||
daysAfterClose: 45,
|
||||
daysSinceLastUpdate: 3,
|
||||
ignoredLabels: [],
|
||||
ignoredLabels: ['A11y_ADS_OctTestPass', 'A11y_ADS_Schema_Dacpac_Backup', 'A11y_AzureDataStudio', 'A11yExclusion', 'A11yMAS', 'A11yResolved: Will Not Fix', 'A11yTCS'],
|
||||
perform: true
|
||||
}
|
||||
|
||||
9
.github/pull_request_template.md
vendored
Normal file
9
.github/pull_request_template.md
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
<!-- Thank you for submitting a Pull Request. Please:
|
||||
* Read our Pull Request guidelines:
|
||||
https://github.com/Microsoft/azuredatastudio/wiki/How-to-Contribute#pull-requests.
|
||||
* Associate an issue with the Pull Request.
|
||||
* Ensure that the code is up-to-date with the `master` branch.
|
||||
* Include a description of the proposed changes and how to test them.
|
||||
-->
|
||||
|
||||
This PR fixes #
|
||||
6
.github/stale.yml
vendored
Normal file
6
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
perform: true,
|
||||
label: 'Stale PR',
|
||||
daysSinceLastUpdate: 7,
|
||||
ignoredLabels: ['Do Not Merge']
|
||||
}
|
||||
118
.github/workflows/ci.yml
vendored
Normal file
118
.github/workflows/ci.yml
vendored
Normal file
@@ -0,0 +1,118 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- release/*
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- release/*
|
||||
|
||||
jobs:
|
||||
# linux:
|
||||
# runs-on: ubuntu-latest
|
||||
# env:
|
||||
# CHILD_CONCURRENCY: "1"
|
||||
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
# steps:
|
||||
# - uses: actions/checkout@v1
|
||||
# # TODO: rename azure-pipelines/linux/xvfb.init to github-actions
|
||||
# - run: |
|
||||
# sudo apt-get update
|
||||
# sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev # {{SQL CARBON EDIT}} add kerberos dep
|
||||
# sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||
# sudo chmod +x /etc/init.d/xvfb
|
||||
# sudo update-rc.d xvfb defaults
|
||||
# sudo service xvfb start
|
||||
# name: Setup Build Environment
|
||||
# - uses: actions/setup-node@v1
|
||||
# with:
|
||||
# node-version: 10
|
||||
# # TODO: cache node modules
|
||||
# - run: yarn --frozen-lockfile
|
||||
# name: Install Dependencies
|
||||
# - run: yarn electron x64
|
||||
# name: Download Electron
|
||||
# - run: yarn gulp hygiene
|
||||
# name: Run Hygiene Checks
|
||||
# - run: yarn strict-vscode # {{SQL CARBON EDIT}} add step
|
||||
# name: Run Strict Compile Options
|
||||
# # - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
|
||||
# # name: Run Monaco Editor Checks
|
||||
# - run: yarn valid-layers-check
|
||||
# name: Run Valid Layers Checks
|
||||
# - run: yarn compile
|
||||
# name: Compile Sources
|
||||
# # - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
|
||||
# # name: Download Built-in Extensions
|
||||
# - run: DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
|
||||
# name: Run Unit Tests
|
||||
# # - run: DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
|
||||
# # name: Run Integration Tests
|
||||
|
||||
windows:
|
||||
runs-on: windows-2016
|
||||
env:
|
||||
CHILD_CONCURRENCY: "1"
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 10
|
||||
- uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: '2.x'
|
||||
- run: yarn --frozen-lockfile
|
||||
name: Install Dependencies
|
||||
- run: yarn electron
|
||||
name: Download Electron
|
||||
- run: yarn gulp hygiene
|
||||
name: Run Hygiene Checks
|
||||
- run: yarn strict-vscode # {{SQL CARBON EDIT}} add step
|
||||
name: Run Strict Compile Options
|
||||
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
|
||||
# name: Run Monaco Editor Checks
|
||||
- run: yarn valid-layers-check
|
||||
name: Run Valid Layers Checks
|
||||
- run: yarn compile
|
||||
name: Compile Sources
|
||||
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
|
||||
# name: Download Built-in Extensions
|
||||
- run: .\scripts\test.bat --tfs "Unit Tests"
|
||||
name: Run Unit Tests
|
||||
# - run: .\scripts\test-integration.bat --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
|
||||
# name: Run Integration Tests
|
||||
|
||||
darwin:
|
||||
runs-on: macos-latest
|
||||
env:
|
||||
CHILD_CONCURRENCY: "1"
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- uses: actions/setup-node@v1
|
||||
with:
|
||||
node-version: 10
|
||||
- run: yarn --frozen-lockfile
|
||||
name: Install Dependencies
|
||||
- run: yarn electron x64
|
||||
name: Download Electron
|
||||
- run: yarn gulp hygiene
|
||||
name: Run Hygiene Checks
|
||||
- run: yarn strict-vscode # {{SQL CARBON EDIT}} add step
|
||||
name: Run Strict Compile Options
|
||||
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
|
||||
# name: Run Monaco Editor Checks
|
||||
- run: yarn valid-layers-check
|
||||
name: Run Valid Layers Checks
|
||||
- run: yarn compile
|
||||
name: Compile Sources
|
||||
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
|
||||
# name: Download Built-in Extensions
|
||||
- run: ./scripts/test.sh --tfs "Unit Tests"
|
||||
name: Run Unit Tests
|
||||
# - run: ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
|
||||
# name: Run Integration Tests
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -30,3 +30,4 @@ coverage/
|
||||
test_data/
|
||||
test-results/
|
||||
yarn-error.log
|
||||
*.vsix
|
||||
|
||||
33
.lgtm/javascript-queries/promises.ql
Normal file
33
.lgtm/javascript-queries/promises.ql
Normal file
@@ -0,0 +1,33 @@
|
||||
/**
|
||||
* @name No floating promises
|
||||
* @kind problem
|
||||
* @problem.severity error
|
||||
* @id js/experimental/floating-promise
|
||||
*/
|
||||
import javascript
|
||||
|
||||
private predicate isEscapingPromise(PromiseDefinition promise) {
|
||||
exists (DataFlow::Node escape | promise.flowsTo(escape) |
|
||||
escape = any(DataFlow::InvokeNode invk).getAnArgument()
|
||||
or
|
||||
escape = any(DataFlow::FunctionNode fun).getAReturn()
|
||||
or
|
||||
escape = any(ThrowStmt t).getExpr().flow()
|
||||
or
|
||||
escape = any(GlobalVariable v).getAnAssignedExpr().flow()
|
||||
or
|
||||
escape = any(DataFlow::PropWrite write).getRhs()
|
||||
or
|
||||
exists(WithStmt with, Assignment assign |
|
||||
with.mayAffect(assign.getLhs()) and
|
||||
assign.getRhs().flow() = escape
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
from PromiseDefinition promise
|
||||
where
|
||||
not exists(promise.getAMethodCall(any(string m | m = "then" or m = "catch" or m = "finally"))) and
|
||||
not exists (AwaitExpr e | promise.flowsTo(e.getOperand().flow())) and
|
||||
not isEscapingPromise(promise)
|
||||
select promise, "This promise appears to be a floating promise"
|
||||
6
.prettierrc.json
Normal file
6
.prettierrc.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"useTabs": true,
|
||||
"printWidth": 120,
|
||||
"semi": true,
|
||||
"singleQuote": true
|
||||
}
|
||||
1
.vscode/extensions.json
vendored
1
.vscode/extensions.json
vendored
@@ -2,7 +2,6 @@
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=827846
|
||||
// for the documentation about the extensions.json format
|
||||
"recommendations": [
|
||||
"ms-vscode.vscode-typescript-tslint-plugin",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"EditorConfig.EditorConfig",
|
||||
"msjsdiag.debugger-for-chrome"
|
||||
|
||||
143
.vscode/launch.json
vendored
143
.vscode/launch.json
vendored
@@ -16,6 +16,7 @@
|
||||
"request": "attach",
|
||||
"name": "Attach to Extension Host",
|
||||
"port": 5870,
|
||||
"timeout": 30000,
|
||||
"restart": true,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
@@ -35,7 +36,10 @@
|
||||
"port": 5876,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
]
|
||||
],
|
||||
"presentation": {
|
||||
"hidden": true,
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
@@ -53,7 +57,10 @@
|
||||
"port": 5875,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
]
|
||||
],
|
||||
"presentation": {
|
||||
"hidden": true,
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
@@ -66,17 +73,16 @@
|
||||
"request": "launch",
|
||||
"name": "Launch azuredatastudio",
|
||||
"windows": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat",
|
||||
"timeout": 20000
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat"
|
||||
},
|
||||
"osx": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
|
||||
"timeout": 20000
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||
},
|
||||
"linux": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
|
||||
"timeout": 20000
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh"
|
||||
},
|
||||
"port": 9222,
|
||||
"timeout": 20000,
|
||||
"env": {
|
||||
"VSCODE_EXTHOST_WILL_SEND_SOCKET": null
|
||||
},
|
||||
@@ -84,9 +90,12 @@
|
||||
"urlFilter": "*workbench.html*",
|
||||
"runtimeArgs": [
|
||||
"--inspect=5875",
|
||||
"--no-cached-data"
|
||||
"--no-cached-data",
|
||||
],
|
||||
"webRoot": "${workspaceFolder}"
|
||||
"webRoot": "${workspaceFolder}",
|
||||
// Settings for js-debug:
|
||||
"pauseForSourceMap": false,
|
||||
"outFiles": ["${workspaceFolder}/out/**/*.js"],
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
@@ -101,7 +110,11 @@
|
||||
],
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
]
|
||||
],
|
||||
"presentation": {
|
||||
"group": "2_launch",
|
||||
"order": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
@@ -128,13 +141,43 @@
|
||||
"timeout": 45000
|
||||
},
|
||||
{
|
||||
"name": "Launch Built-in Extension",
|
||||
"type": "extensionHost",
|
||||
"type": "chrome",
|
||||
"request": "launch",
|
||||
"runtimeExecutable": "${execPath}",
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/debug-auto-launch"
|
||||
]
|
||||
"name": "Launch ADS (Web) (TBD)",
|
||||
"runtimeExecutable": "yarn",
|
||||
"runtimeArgs": [
|
||||
"web"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "2_launch",
|
||||
"order": 2
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
"request": "launch",
|
||||
"name": "Launch ADS (Web, Chrome) (TBD)",
|
||||
"url": "http://localhost:8080",
|
||||
"preLaunchTask": "Run web",
|
||||
"presentation": {
|
||||
"group": "2_launch",
|
||||
"order": 3
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Git Unit Tests",
|
||||
"program": "${workspaceFolder}/extensions/git/node_modules/mocha/bin/_mocha",
|
||||
"stopOnEntry": false,
|
||||
"cwd": "${workspaceFolder}/extensions/git",
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/extensions/git/out/**/*.js"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "5_tests",
|
||||
"order": 10
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
@@ -144,6 +187,10 @@
|
||||
"cwd": "${workspaceFolder}/test/smoke",
|
||||
"env": {
|
||||
"BUILD_ARTIFACTSTAGINGDIRECTORY": "${workspaceFolder}"
|
||||
},
|
||||
"presentation": {
|
||||
"group": "5_tests",
|
||||
"order": 8
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -165,7 +212,13 @@
|
||||
"cwd": "${workspaceFolder}",
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
]
|
||||
],
|
||||
"env": {
|
||||
"MOCHA_COLORS": "true"
|
||||
},
|
||||
"presentation": {
|
||||
"hidden": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
@@ -183,6 +236,22 @@
|
||||
"webRoot": "${workspaceFolder}",
|
||||
"timeout": 45000
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
"request": "launch",
|
||||
"name": "Run Extension Integration Tests",
|
||||
"windows": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.bat"
|
||||
},
|
||||
"osx": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.sh"
|
||||
},
|
||||
"linux": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql-test-integration.sh"
|
||||
},
|
||||
"webRoot": "${workspaceFolder}",
|
||||
"timeout": 45000
|
||||
},
|
||||
],
|
||||
"compounds": [
|
||||
{
|
||||
@@ -204,21 +273,45 @@
|
||||
"configurations": [
|
||||
"Launch azuredatastudio",
|
||||
"Attach to Main Process"
|
||||
]
|
||||
],
|
||||
"presentation": {
|
||||
"group": "1_vscode",
|
||||
"order": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Search and Renderer processes",
|
||||
"name": "Debug azuredatastudio Main, Renderer & Extension Host",
|
||||
"configurations": [
|
||||
"Launch azuredatastudio",
|
||||
"Attach to Search Process"
|
||||
]
|
||||
"Attach to Main Process",
|
||||
"Attach to Extension Host"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "1_vscode",
|
||||
"order": 3
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Renderer and Extension Host processes",
|
||||
"name": "Debug Renderer and Extension Host processes",
|
||||
"configurations": [
|
||||
"Launch azuredatastudio",
|
||||
"Attach to Extension Host"
|
||||
]
|
||||
],
|
||||
"presentation": {
|
||||
"group": "1_vscode",
|
||||
"order": 2
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Attach Renderer and Extension Host",
|
||||
"configurations": [
|
||||
"Attach to azuredatastudio",
|
||||
"Attach to Extension Host"
|
||||
],
|
||||
"presentation": {
|
||||
"group": "1_vscode",
|
||||
"order": 2
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
10
.vscode/searches/ts36031.code-search
vendored
Normal file
10
.vscode/searches/ts36031.code-search
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
# Query: \\w+\\?\\..+![(.[]
|
||||
# Flags: RegExp
|
||||
# ContextLines: 2
|
||||
|
||||
src/vs/base/browser/ui/tree/asyncDataTree.ts:
|
||||
270 } : undefined,
|
||||
271 isChecked: options.ariaProvider!.isChecked ? (e) => {
|
||||
272: return options.ariaProvider?.isChecked!(e.element as T);
|
||||
273 } : undefined
|
||||
274 },
|
||||
9
.vscode/settings.json
vendored
9
.vscode/settings.json
vendored
@@ -37,8 +37,14 @@
|
||||
}
|
||||
}
|
||||
],
|
||||
"eslint.options": {
|
||||
"rulePaths": [
|
||||
"./build/lib/eslint"
|
||||
]
|
||||
},
|
||||
"typescript.tsdk": "node_modules/typescript/lib",
|
||||
"npm.exclude": "**/extensions/**",
|
||||
"npm.packageManager": "yarn",
|
||||
"emmet.excludeLanguages": [],
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"typescript.preferences.quoteStyle": "single",
|
||||
@@ -60,5 +66,6 @@
|
||||
"remote.extensionKind": {
|
||||
"msjsdiag.debugger-for-chrome": "workspace"
|
||||
},
|
||||
"gulp.autoDetect": "off",
|
||||
"files.insertFinalNewline": true
|
||||
}
|
||||
}
|
||||
|
||||
56
.vscode/tasks.json
vendored
56
.vscode/tasks.json
vendored
@@ -5,7 +5,10 @@
|
||||
"type": "npm",
|
||||
"script": "watch",
|
||||
"label": "Build VS Code",
|
||||
"group": "build",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"reveal": "never"
|
||||
@@ -30,40 +33,32 @@
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
"script": "strict-initialization-watch",
|
||||
"label": "TS - Strict Initialization",
|
||||
"script": "strict-function-types-watch",
|
||||
"label": "TS - Strict Function Types",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"reveal": "never"
|
||||
},
|
||||
"problemMatcher": {
|
||||
"base": "$tsc-watch",
|
||||
"owner": "typescript-strict-initialization",
|
||||
"owner": "typescript-function-types",
|
||||
"applyTo": "allDocuments"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
"script": "strict-null-check-watch",
|
||||
"label": "TS - Strict Null Cheks",
|
||||
"script": "strict-vscode-watch",
|
||||
"label": "TS - Strict VSCode",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"reveal": "never"
|
||||
},
|
||||
"problemMatcher": {
|
||||
"base": "$tsc-watch",
|
||||
"owner": "typescript-strict-null-checks",
|
||||
"owner": "typescript-vscode",
|
||||
"applyTo": "allDocuments"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "gulp",
|
||||
"task": "tslint",
|
||||
"label": "Run tslint",
|
||||
"problemMatcher": [
|
||||
"$tslint5"
|
||||
]
|
||||
},
|
||||
{
|
||||
"label": "Run tests",
|
||||
"type": "shell",
|
||||
@@ -87,8 +82,8 @@
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"type": "gulp",
|
||||
"task": "electron",
|
||||
"type": "npm",
|
||||
"script": "electron",
|
||||
"label": "Download electron"
|
||||
},
|
||||
{
|
||||
@@ -96,6 +91,31 @@
|
||||
"task": "hygiene",
|
||||
"problemMatcher": []
|
||||
},
|
||||
|
||||
{
|
||||
"type": "shell",
|
||||
"command": "yarn web -- --no-launch",
|
||||
"label": "Run web",
|
||||
"isBackground": true,
|
||||
"problemMatcher": {
|
||||
"pattern": {
|
||||
"regexp": ""
|
||||
},
|
||||
"background": {
|
||||
"beginsPattern": ".*node .*",
|
||||
"endsPattern": "Web UI available at .*"
|
||||
}
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "never"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
"script": "eslint",
|
||||
"problemMatcher": {
|
||||
"source": "eslint",
|
||||
"base": "$eslint-stylish"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
2
.yarnrc
2
.yarnrc
@@ -1,3 +1,3 @@
|
||||
disturl "https://atom.io/download/electron"
|
||||
target "4.2.9"
|
||||
target "6.1.6"
|
||||
runtime "electron"
|
||||
|
||||
62
CHANGELOG.md
62
CHANGELOG.md
@@ -1,5 +1,65 @@
|
||||
# Change Log
|
||||
|
||||
## Version 1.14.1
|
||||
* Release date: December 26, 2019
|
||||
* Release status: General Availability
|
||||
* Hotfix for bug https://github.com/microsoft/azuredatastudio/issues/8768
|
||||
|
||||
## Version 1.14.0
|
||||
* Release date: December 19, 2019
|
||||
* Release status: General Availability
|
||||
* Added bigdatacluster.ignoreSslVerification setting to allow ignoring SSL verification errors when connecting to a BDC [#8129](https://github.com/microsoft/azuredatastudio/issues/8129)
|
||||
* Changed attach to connection dropdown in Notebooks to only list the currently active connection [#8582](https://github.com/microsoft/azuredatastudio/pull/8582)
|
||||
* Allow changing default language flavor for offline query editors [#8419](https://github.com/microsoft/azuredatastudio/pull/8419)
|
||||
* GA status for Big Data Cluster/SQL 2019 features [#8269](https://github.com/microsoft/azuredatastudio/issues/8269)
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/44?closed=1).
|
||||
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
## Version 1.13.1
|
||||
* Release date: November 15, 2019
|
||||
* Release status: General Availability
|
||||
* Resolved [#8210 Copy/Paste results are out of order](https://github.com/microsoft/azuredatastudio/issues/8210).
|
||||
|
||||
## Version 1.13.0
|
||||
* Release date: November 4, 2019
|
||||
* Release status: General Availability
|
||||
* General Availability release for Schema Compare and DACPAC extensions
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/43?closed=1).
|
||||
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
* aspnerd for `Use selected DB for import wizard schema list` [#7878](https://github.com/microsoft/azuredatastudio/pull/7878)
|
||||
|
||||
## Version 1.12.2
|
||||
* Release date: October 11, 2019
|
||||
* Release status: General Availability
|
||||
* Hotfix release (1.12.2): `Disable automatically starting the EH in inspect mode` https://github.com/microsoft/azuredatastudio/commit/c9bef82ace6c67190d0e83820011a2bbd1f793c1
|
||||
|
||||
## Version 1.12.1
|
||||
* Release date: October 7, 2019
|
||||
* Release status: General Availability
|
||||
* Hotfix release: `Notebooks: Ensure quotes and backslashes are escaped properly in text editor model` https://github.com/microsoft/azuredatastudio/pull/7540
|
||||
|
||||
## Version 1.12.0
|
||||
* Release date: October 2, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
## What's new in this version
|
||||
* Announcing the Query History panel
|
||||
* Improved Query Results Grid copy selection support
|
||||
* TempDB page added to Server Reports extension
|
||||
* PowerShell extension update
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/42?closed=1).
|
||||
|
||||
## Version 1.11.0
|
||||
* Release date: September 10, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
## What's new in this version
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/41?closed=1).
|
||||
|
||||
## Version 1.10.0
|
||||
* Release date: August 14, 2019
|
||||
* Release status: General Availability
|
||||
@@ -197,7 +257,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
|
||||
## What's new in this version
|
||||
* Announcing the SQL Server 2019 Preview extension.
|
||||
* Support for SQL Server 2019 preview features including big data cluster support.
|
||||
* Support for SQL Server 2019 preview features including Big Data Cluster support.
|
||||
* Azure Data Studio Notebooks
|
||||
* The Azure Resource Explorer viewlets you browse data-related endpoints for your Azure accounts and create connections to them in Object Explorer. In this release Azure SQL Databases and servers are supported.
|
||||
* SQL Server Polybase Create External Table Wizard
|
||||
|
||||
21
README.md
21
README.md
@@ -2,6 +2,7 @@
|
||||
|
||||
[](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://dev.azure.com/azuredatastudio/azuredatastudio/_build/latest?definitionId=4&branchName=master)
|
||||
[](https://twitter.com/azuredatastudio)
|
||||
|
||||
Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
|
||||
|
||||
@@ -9,13 +10,13 @@ Azure Data Studio is a data management tool that enables you to work with SQL Se
|
||||
|
||||
Platform | Link
|
||||
-- | --
|
||||
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2100710
|
||||
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2100711
|
||||
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2100712
|
||||
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2100809
|
||||
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2100714
|
||||
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2100810
|
||||
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2100672
|
||||
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2113530
|
||||
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2113628
|
||||
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2113529
|
||||
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2113528
|
||||
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2113627
|
||||
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2113718
|
||||
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2113344
|
||||
|
||||
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
|
||||
|
||||
@@ -68,6 +69,12 @@ The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.micro
|
||||
## Contributions and "Thank You"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* eulercamposbarros for `Prevent connections from moving on click (#7528)`
|
||||
* AlexFsmn for `Fixed issue where task icons got hidden if text was too long`
|
||||
* jamesrod817 for `Tempdb (#7022)`
|
||||
* dzsquared for `fix(snippets): ads parenthesis to sqlcreateindex snippet #7020`
|
||||
* devmattrick for `Update row count as updates are received #6642`
|
||||
* mottykohn for `In Message panel onclick scroll to line #6417`
|
||||
* Stevoni for `Corrected Keyboard Shortcut Execution Issue #5480`
|
||||
* yamatoya for `fix the format #4899`
|
||||
* GeoffYoung for `Fix sqlDropColumn description #4422`
|
||||
|
||||
@@ -36,6 +36,7 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
||||
jquery-ui: https://github.com/jquery/jquery-ui
|
||||
jquery.event.drag: https://github.com/devongovett/jquery.event.drag
|
||||
jschardet: https://github.com/aadsm/jschardet
|
||||
jupyter-powershell: https://github.com/vors/jupyter-powershell
|
||||
JupyterLab: https://github.com/jupyterlab/jupyterlab
|
||||
make-error: https://github.com/JsCommunity/make-error
|
||||
minimist: https://github.com/substack/minimist
|
||||
@@ -1175,7 +1176,35 @@ That's all there is to it!
|
||||
=========================================
|
||||
END OF jschardet NOTICES AND INFORMATION
|
||||
|
||||
%% jupyter-powershell NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 Sergei Vorobev
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
=========================================
|
||||
END OF jupyter-powershell NOTICES AND INFORMATION
|
||||
|
||||
%% JupyterLab NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
Copyright (c) 2015 Project Jupyter Contributors
|
||||
All rights reserved.
|
||||
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
steps:
|
||||
- script: |
|
||||
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:10
|
||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||
sudo chmod +x /etc/init.d/xvfb
|
||||
sudo update-rc.d xvfb defaults
|
||||
sudo service xvfb start
|
||||
sudo apt-get install -y libkrb5-dev
|
||||
# sh -e /etc/init.d/xvfb start
|
||||
# sleep 3
|
||||
displayName: "Linux preinstall"
|
||||
condition: eq(variables['Agent.OS'], 'Linux')
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "$(build-cache)"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
|
||||
- script: |
|
||||
yarn --frozen-lockfile
|
||||
displayName: Install Dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
env:
|
||||
GITHUB_TOKEN: $(GITHUB_TOKEN)
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "$(build-cache)"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
yarn gulp electron-x64
|
||||
displayName: Download Electron
|
||||
env:
|
||||
GITHUB_TOKEN: $(GITHUB_TOKEN)
|
||||
|
||||
- script: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
|
||||
- script: |
|
||||
yarn tslint
|
||||
displayName: "Run TSLint"
|
||||
|
||||
- script: |
|
||||
yarn strict-null-check
|
||||
displayName: "Run Strict Null Check"
|
||||
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: "Compile"
|
||||
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter
|
||||
displayName: "Tests"
|
||||
condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux'))
|
||||
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter --coverage
|
||||
displayName: "Tests"
|
||||
condition: and(succeeded(), ne(variables['Agent.OS'], 'Linux'))
|
||||
|
||||
- task: PublishTestResults@2
|
||||
inputs:
|
||||
testResultsFiles: "**/test-results.xml"
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
inputs:
|
||||
codeCoverageTool: "cobertura"
|
||||
summaryFileLocation: $(System.DefaultWorkingDirectory)/.build/coverage/cobertura-coverage.xml
|
||||
reportDirectory: $(System.DefaultWorkingDirectory)/.build/coverage/lcov-reports
|
||||
condition: ne(variables['Agent.OS'], 'Linux')
|
||||
@@ -1,65 +0,0 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "$(build-cache)"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
|
||||
- script: |
|
||||
yarn --frozen-lockfile
|
||||
displayName: Install Dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
env:
|
||||
GITHUB_TOKEN: $(GITHUB_TOKEN)
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
|
||||
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
|
||||
vstsFeed: "$(build-cache)"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
yarn gulp electron-x64
|
||||
displayName: "Electron"
|
||||
env:
|
||||
GITHUB_TOKEN: $(GITHUB_TOKEN)
|
||||
|
||||
- script: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
|
||||
- script: |
|
||||
yarn tslint
|
||||
displayName: "Run TSLint"
|
||||
|
||||
- script: |
|
||||
yarn strict-null-check
|
||||
displayName: "Run Strict Null Check"
|
||||
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: "Compile"
|
||||
|
||||
- script: |
|
||||
.\scripts\test.bat --reporter mocha-junit-reporter --coverage
|
||||
displayName: "Test"
|
||||
|
||||
- task: PublishTestResults@2
|
||||
inputs:
|
||||
testResultsFiles: "test-results.xml"
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
inputs:
|
||||
codeCoverageTool: "cobertura"
|
||||
summaryFileLocation: $(System.DefaultWorkingDirectory)\.build\coverage\cobertura-coverage.xml
|
||||
reportDirectory: $(System.DefaultWorkingDirectory)\.build\coverage\lcov-report
|
||||
@@ -3,20 +3,20 @@ trigger:
|
||||
- release/*
|
||||
|
||||
jobs:
|
||||
- job: Windows
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
steps:
|
||||
- template: azure-pipelines-windows.yml
|
||||
- job: Windows
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
steps:
|
||||
- template: build/azure-pipelines/win32/continuous-build-win32.yml
|
||||
|
||||
- job: Linux
|
||||
pool:
|
||||
vmImage: "Ubuntu-16.04"
|
||||
steps:
|
||||
- template: azure-pipelines-linux-mac.yml
|
||||
- job: Linux
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
steps:
|
||||
- template: build/azure-pipelines/linux/continuous-build-linux.yml
|
||||
|
||||
- job: macOS
|
||||
pool:
|
||||
vmImage: macOS 10.13
|
||||
steps:
|
||||
- template: azure-pipelines-linux-mac.yml
|
||||
- job: macOS
|
||||
pool:
|
||||
vmImage: macOS-latest
|
||||
steps:
|
||||
- template: build/azure-pipelines/darwin/continuous-build-darwin.yml
|
||||
|
||||
@@ -1 +1 @@
|
||||
2019-07-11T05:47:05.444Z
|
||||
2019-12-01T02:20:58.491Z
|
||||
|
||||
36
build/azure-pipelines/common/copyArtifacts.ts
Normal file
36
build/azure-pipelines/common/copyArtifacts.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as vfs from 'vinyl-fs';
|
||||
|
||||
const files = [
|
||||
'.build/extensions/**/*.vsix', // external extensions
|
||||
'.build/win32-x64/**/*.{exe,zip}', // windows binaries
|
||||
'.build/linux/sha256hashes.txt', // linux hashes
|
||||
'.build/linux/deb/amd64/deb/*', // linux debs
|
||||
'.build/linux/rpm/x86_64/*', // linux rpms
|
||||
'.build/linux/server/*', // linux server
|
||||
'.build/linux/archive/*', // linux archive
|
||||
'.build/docker/**', // docker images
|
||||
'.build/darwin/**', // darwin binaries
|
||||
'.build/version.json' // version information
|
||||
];
|
||||
|
||||
async function main() {
|
||||
return new Promise((resolve, reject) => {
|
||||
const stream = vfs.src(files, { base: '.build', allowEmpty: true })
|
||||
.pipe(vfs.dest(process.env.BUILD_ARTIFACTSTAGINGDIRECTORY!));
|
||||
|
||||
stream.on('end', () => resolve());
|
||||
stream.on('error', e => reject(e));
|
||||
});
|
||||
}
|
||||
|
||||
main().catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
132
build/azure-pipelines/common/createAsset.ts
Normal file
132
build/azure-pipelines/common/createAsset.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import { Readable } from 'stream';
|
||||
import * as crypto from 'crypto';
|
||||
import * as azure from 'azure-storage';
|
||||
import * as mime from 'mime';
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
|
||||
interface Asset {
|
||||
platform: string;
|
||||
type: string;
|
||||
url: string;
|
||||
mooncakeUrl?: string;
|
||||
hash: string;
|
||||
sha256hash: string;
|
||||
size: number;
|
||||
supportsFastUpdate?: boolean;
|
||||
}
|
||||
|
||||
if (process.argv.length !== 6) {
|
||||
console.error('Usage: node createAsset.js PLATFORM TYPE NAME FILE');
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
function hashStream(hashName: string, stream: Readable): Promise<string> {
|
||||
return new Promise<string>((c, e) => {
|
||||
const shasum = crypto.createHash(hashName);
|
||||
|
||||
stream
|
||||
.on('data', shasum.update.bind(shasum))
|
||||
.on('error', e)
|
||||
.on('close', () => c(shasum.digest('hex')));
|
||||
});
|
||||
}
|
||||
|
||||
async function doesAssetExist(blobService: azure.BlobService, quality: string, blobName: string): Promise<boolean | undefined> {
|
||||
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
|
||||
return existsResult.exists;
|
||||
}
|
||||
|
||||
async function uploadBlob(blobService: azure.BlobService, quality: string, blobName: string, file: string): Promise<void> {
|
||||
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(file),
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
|
||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
function getEnv(name: string): string {
|
||||
const result = process.env[name];
|
||||
|
||||
if (typeof result === 'undefined') {
|
||||
throw new Error('Missing env: ' + name);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const [, , platform, type, name, file] = process.argv;
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
|
||||
console.log('Creating asset...');
|
||||
|
||||
const stat = await new Promise<fs.Stats>((c, e) => fs.stat(file, (err, stat) => err ? e(err) : c(stat)));
|
||||
const size = stat.size;
|
||||
|
||||
console.log('Size:', size);
|
||||
|
||||
const stream = fs.createReadStream(file);
|
||||
const [sha1hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]);
|
||||
|
||||
console.log('SHA1:', sha1hash);
|
||||
console.log('SHA256:', sha256hash);
|
||||
|
||||
const blobName = commit + '/' + name;
|
||||
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2']!;
|
||||
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2']!)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
|
||||
const blobExists = await doesAssetExist(blobService, quality, blobName);
|
||||
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Uploading blobs to Azure storage...');
|
||||
|
||||
await uploadBlob(blobService, quality, blobName, file);
|
||||
|
||||
console.log('Blobs successfully uploaded.');
|
||||
|
||||
const asset: Asset = {
|
||||
platform,
|
||||
type,
|
||||
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
|
||||
hash: sha1hash,
|
||||
sha256hash,
|
||||
size
|
||||
};
|
||||
|
||||
// Remove this if we ever need to rollback fast updates for windows
|
||||
if (/win32/.test(platform)) {
|
||||
asset.supportsFastUpdate = true;
|
||||
}
|
||||
|
||||
console.log('Asset:', JSON.stringify(asset, null, ' '));
|
||||
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await scripts.storedProcedure('createAsset').execute('', [commit, asset, true]);
|
||||
}
|
||||
|
||||
main().then(() => {
|
||||
console.log('Asset successfully created');
|
||||
process.exit(0);
|
||||
}, err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
60
build/azure-pipelines/common/createBuild.ts
Normal file
60
build/azure-pipelines/common/createBuild.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
|
||||
if (process.argv.length !== 3) {
|
||||
console.error('Usage: node createBuild.js VERSION');
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
function getEnv(name: string): string {
|
||||
const result = process.env[name];
|
||||
|
||||
if (typeof result === 'undefined') {
|
||||
throw new Error('Missing env: ' + name);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const [, , _version] = process.argv;
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
const queuedBy = getEnv('BUILD_QUEUEDBY');
|
||||
const sourceBranch = getEnv('BUILD_SOURCEBRANCH');
|
||||
const version = _version + (quality === 'stable' ? '' : `-${quality}`);
|
||||
|
||||
console.log('Creating build...');
|
||||
console.log('Quality:', quality);
|
||||
console.log('Version:', version);
|
||||
console.log('Commit:', commit);
|
||||
|
||||
const build = {
|
||||
id: commit,
|
||||
timestamp: (new Date()).getTime(),
|
||||
version,
|
||||
isReleased: false,
|
||||
sourceBranch,
|
||||
queuedBy,
|
||||
assets: [],
|
||||
updates: {}
|
||||
};
|
||||
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await scripts.storedProcedure('createBuild').execute('', [{ ...build, _partitionKey: '' }]);
|
||||
}
|
||||
|
||||
main().then(() => {
|
||||
console.log('Build successfully created');
|
||||
process.exit(0);
|
||||
}, err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
9
build/azure-pipelines/common/publish-webview.sh
Executable file
9
build/azure-pipelines/common/publish-webview.sh
Executable file
@@ -0,0 +1,9 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
REPO="$(pwd)"
|
||||
|
||||
# Publish webview contents
|
||||
PACKAGEJSON="$REPO/package.json"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
|
||||
node build/azure-pipelines/common/publish-webview.js "$REPO/src/vs/workbench/contrib/webview/browser/pre/"
|
||||
87
build/azure-pipelines/common/publish-webview.ts
Normal file
87
build/azure-pipelines/common/publish-webview.ts
Normal file
@@ -0,0 +1,87 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as azure from 'azure-storage';
|
||||
import * as mime from 'mime';
|
||||
import * as minimist from 'minimist';
|
||||
import { basename, join } from 'path';
|
||||
|
||||
const fileNames = [
|
||||
'fake.html',
|
||||
'host.js',
|
||||
'index.html',
|
||||
'main.js',
|
||||
'service-worker.js'
|
||||
];
|
||||
|
||||
async function assertContainer(blobService: azure.BlobService, container: string): Promise<void> {
|
||||
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
async function doesBlobExist(blobService: azure.BlobService, container: string, blobName: string): Promise<boolean | undefined> {
|
||||
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
|
||||
return existsResult.exists;
|
||||
}
|
||||
|
||||
async function uploadBlob(blobService: azure.BlobService, container: string, blobName: string, file: string): Promise<void> {
|
||||
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(file),
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
|
||||
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
async function publish(commit: string, files: readonly string[]): Promise<void> {
|
||||
|
||||
console.log('Publishing...');
|
||||
console.log('Commit:', commit);
|
||||
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT']!;
|
||||
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY']!)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
|
||||
await assertContainer(blobService, commit);
|
||||
|
||||
for (const file of files) {
|
||||
const blobName = basename(file);
|
||||
const blobExists = await doesBlobExist(blobService, commit, blobName);
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
|
||||
continue;
|
||||
}
|
||||
console.log('Uploading blob to Azure storage...');
|
||||
await uploadBlob(blobService, commit, blobName, file);
|
||||
}
|
||||
|
||||
console.log('Blobs successfully uploaded.');
|
||||
}
|
||||
|
||||
function main(): void {
|
||||
const commit = process.env['BUILD_SOURCEVERSION'];
|
||||
|
||||
if (!commit) {
|
||||
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
|
||||
return;
|
||||
}
|
||||
|
||||
const opts = minimist(process.argv.slice(2));
|
||||
const [directory] = opts._;
|
||||
|
||||
const files = fileNames.map(fileName => join(directory, fileName));
|
||||
|
||||
publish(commit, files).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
if (process.argv.length < 3) {
|
||||
console.error('Usage: node publish.js <directory>');
|
||||
process.exit(-1);
|
||||
}
|
||||
main();
|
||||
70
build/azure-pipelines/common/releaseBuild.ts
Normal file
70
build/azure-pipelines/common/releaseBuild.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
|
||||
function getEnv(name: string): string {
|
||||
const result = process.env[name];
|
||||
|
||||
if (typeof result === 'undefined') {
|
||||
throw new Error('Missing env: ' + name);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
interface Config {
|
||||
id: string;
|
||||
frozen: boolean;
|
||||
}
|
||||
|
||||
function createDefaultConfig(quality: string): Config {
|
||||
return {
|
||||
id: quality,
|
||||
frozen: false
|
||||
};
|
||||
}
|
||||
|
||||
async function getConfig(client: CosmosClient, quality: string): Promise<Config> {
|
||||
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${quality}"`;
|
||||
|
||||
const res = await client.database('builds').container('config').items.query(query).fetchAll();
|
||||
|
||||
if (res.resources.length === 0) {
|
||||
return createDefaultConfig(quality);
|
||||
}
|
||||
|
||||
return res.resources[0] as Config;
|
||||
}
|
||||
|
||||
async function main(): Promise<void> {
|
||||
const commit = getEnv('BUILD_SOURCEVERSION');
|
||||
const quality = getEnv('VSCODE_QUALITY');
|
||||
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const config = await getConfig(client, quality);
|
||||
|
||||
console.log('Quality config:', config);
|
||||
|
||||
if (config.frozen) {
|
||||
console.log(`Skipping release because quality ${quality} is frozen.`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Releasing build ${commit}...`);
|
||||
|
||||
const scripts = client.database('builds').container(quality).scripts;
|
||||
await scripts.storedProcedure('releaseBuild').execute('', [commit]);
|
||||
}
|
||||
|
||||
main().then(() => {
|
||||
console.log('Build successfully released');
|
||||
process.exit(0);
|
||||
}, err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -8,7 +8,7 @@
|
||||
import * as url from 'url';
|
||||
import * as azure from 'azure-storage';
|
||||
import * as mime from 'mime';
|
||||
import { DocumentClient, RetrievedDocument } from 'documentdb';
|
||||
import { CosmosClient } from '@azure/cosmos';
|
||||
|
||||
function log(...args: any[]) {
|
||||
console.log(...[`[${new Date().toISOString()}]`, ...args]);
|
||||
@@ -23,7 +23,7 @@ if (process.argv.length < 3) {
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
interface Build extends RetrievedDocument {
|
||||
interface Build {
|
||||
assets: Asset[];
|
||||
}
|
||||
|
||||
@@ -38,62 +38,20 @@ interface Asset {
|
||||
supportsFastUpdate?: boolean;
|
||||
}
|
||||
|
||||
function updateBuild(commit: string, quality: string, platform: string, type: string, asset: Asset): Promise<void> {
|
||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/' + quality;
|
||||
const updateQuery = {
|
||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
||||
parameters: [{ name: '@id', value: commit }]
|
||||
};
|
||||
|
||||
let updateTries = 0;
|
||||
|
||||
function _update(): Promise<void> {
|
||||
updateTries++;
|
||||
|
||||
return new Promise<void>((c, e) => {
|
||||
client.queryDocuments(collection, updateQuery).toArray((err, results) => {
|
||||
if (err) { return e(err); }
|
||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
||||
|
||||
const release = results[0];
|
||||
|
||||
release.assets = [
|
||||
...release.assets.filter((a: any) => !(a.platform === platform && a.type === type)),
|
||||
asset
|
||||
];
|
||||
|
||||
client.replaceDocument(release._self, release, err => {
|
||||
if (err && err.code === 409 && updateTries < 5) { return c(_update()); }
|
||||
if (err) { return e(err); }
|
||||
|
||||
log('Build successfully updated.');
|
||||
c();
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return _update();
|
||||
}
|
||||
|
||||
async function sync(commit: string, quality: string): Promise<void> {
|
||||
log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
|
||||
|
||||
const cosmosdb = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = `dbs/builds/colls/${quality}`;
|
||||
const query = {
|
||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
||||
parameters: [{ name: '@id', value: commit }]
|
||||
};
|
||||
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const container = client.database('builds').container(quality);
|
||||
|
||||
const build = await new Promise<Build>((c, e) => {
|
||||
cosmosdb.queryDocuments(collection, query).toArray((err, results) => {
|
||||
if (err) { return e(err); }
|
||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
||||
c(results[0] as Build);
|
||||
});
|
||||
});
|
||||
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${commit}"`;
|
||||
const res = await container.items.query<Build>(query, {}).fetchAll();
|
||||
|
||||
if (res.resources.length !== 1) {
|
||||
throw new Error(`No builds found for ${commit}`);
|
||||
}
|
||||
|
||||
const build = res.resources[0];
|
||||
|
||||
log(`Found build for ${commit}, with ${build.assets.length} assets`);
|
||||
|
||||
@@ -140,8 +98,9 @@ async function sync(commit: string, quality: string): Promise<void> {
|
||||
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
|
||||
|
||||
log(` Updating build in DB...`);
|
||||
asset.mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
||||
await updateBuild(commit, quality, asset.platform, asset.type, asset);
|
||||
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
||||
await container.scripts.storedProcedure('setAssetMooncakeUrl')
|
||||
.execute('', [commit, asset.platform, asset.type, mooncakeUrl]);
|
||||
|
||||
log(` Done ✔️`);
|
||||
} catch (err) {
|
||||
|
||||
@@ -1,46 +1,52 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: '$(ArtifactFeed)'
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
- script: |
|
||||
yarn --frozen-lockfile
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install Dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: '$(ArtifactFeed)'
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
- script: |
|
||||
yarn gulp electron-x64
|
||||
yarn electron x64
|
||||
displayName: Download Electron
|
||||
- script: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
- script: | # {{SQL CARBON EDIT}} add step
|
||||
yarn strict-vscode
|
||||
displayName: Run Strict Null Check.
|
||||
# - script: | {{SQL CARBON EDIT}} remove step
|
||||
# yarn monaco-compile-check
|
||||
# displayName: Run Monaco Editor Checks
|
||||
- script: |
|
||||
yarn monaco-compile-check
|
||||
displayName: Run Monaco Editor Checks
|
||||
yarn valid-layers-check
|
||||
displayName: Run Valid Layers Checks
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
- script: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
# - script: | {{SQL CARBON EDIT}} remove step
|
||||
# yarn download-builtin-extensions
|
||||
# displayName: Download Built-in Extensions
|
||||
- script: |
|
||||
./scripts/test.sh --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests
|
||||
- script: |
|
||||
./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
displayName: Run Integration Tests
|
||||
# - script: | {{SQL CARBON EDIT}} remove step
|
||||
# ./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
# displayName: Run Integration Tests
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
|
||||
11
build/azure-pipelines/darwin/createDrop.sh
Executable file
11
build/azure-pipelines/darwin/createDrop.sh
Executable file
@@ -0,0 +1,11 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
REPO="$(pwd)"
|
||||
|
||||
# ensure drop directories exist
|
||||
mkdir -p $REPO/.build/darwin/{archive,server}
|
||||
|
||||
# package Remote Extension Host
|
||||
pushd .. && mv azuredatastudio-reh-darwin azuredatastudio-server-darwin && zip -Xry $REPO/.build/darwin/server/azuredatastudio-server-darwin.zip azuredatastudio-server-darwin && popd
|
||||
|
||||
node build/azure-pipelines/common/copyArtifacts.js
|
||||
22
build/azure-pipelines/darwin/entitlements.xml
Normal file
22
build/azure-pipelines/darwin/entitlements.xml
Normal file
@@ -0,0 +1,22 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>com.apple.security.cs.allow-jit</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.disable-executable-page-protection</key>
|
||||
<true/>
|
||||
<key>com.apple.security.cs.disable-library-validation</key>
|
||||
<true/>
|
||||
<key>com.apple.security.network.client</key>
|
||||
<true/>
|
||||
<key>com.apple.security.network.server</key>
|
||||
<true/>
|
||||
<key>com.apple.security.app-sandbox</key>
|
||||
<false/>
|
||||
<key>com.apple.security.automation.apple-events</key>
|
||||
<true/>
|
||||
</dict>
|
||||
</plist>
|
||||
@@ -21,11 +21,11 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
@@ -102,20 +102,28 @@ steps:
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
# Figure out the full absolute path of the product we just built
|
||||
# including the remote server and configure the integration tests
|
||||
# to run with these builds instead of running out of sources.
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-darwin
|
||||
APP_NAME="`ls $APP_ROOT | head -n 1`"
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME/Contents/MacOS/Electron" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
|
||||
./scripts/test-integration.sh --build --tfs "Integration Tests"
|
||||
displayName: Run integration tests
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
cd test/smoke
|
||||
yarn compile
|
||||
cd -
|
||||
yarn smoketest --web --headless
|
||||
continueOnError: true
|
||||
displayName: Run web smoke tests
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
# Web Smoke Tests disabled due to https://github.com/microsoft/vscode/issues/80308
|
||||
# - script: |
|
||||
# set -e
|
||||
# cd test/smoke
|
||||
# yarn compile
|
||||
# cd -
|
||||
# yarn smoketest --web --headless
|
||||
# continueOnError: true
|
||||
# displayName: Run web smoke tests
|
||||
# condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
@@ -5,28 +5,20 @@ set -e
|
||||
zip -d ../VSCode-darwin.zip "*.pkg"
|
||||
|
||||
# publish the build
|
||||
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
|
||||
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
|
||||
node build/azure-pipelines/common/publish.js \
|
||||
"$VSCODE_QUALITY" \
|
||||
node build/azure-pipelines/common/createAsset.js \
|
||||
darwin \
|
||||
archive \
|
||||
"VSCode-darwin-$VSCODE_QUALITY.zip" \
|
||||
$VERSION \
|
||||
true \
|
||||
../VSCode-darwin.zip
|
||||
|
||||
# package Remote Extension Host
|
||||
pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
|
||||
|
||||
# publish Remote Extension Host
|
||||
node build/azure-pipelines/common/publish.js \
|
||||
"$VSCODE_QUALITY" \
|
||||
node build/azure-pipelines/common/createAsset.js \
|
||||
server-darwin \
|
||||
archive-unsigned \
|
||||
"vscode-server-darwin.zip" \
|
||||
$VERSION \
|
||||
true \
|
||||
../vscode-server-darwin.zip
|
||||
|
||||
# publish hockeyapp symbols
|
||||
|
||||
256
build/azure-pipelines/darwin/sql-product-build-darwin.yml
Normal file
256
build/azure-pipelines/darwin/sql-product-build-darwin.yml
Normal file
@@ -0,0 +1,256 @@
|
||||
steps:
|
||||
- task: InstallAppleCertificate@2
|
||||
displayName: 'Install developer certificate'
|
||||
inputs:
|
||||
certSecureFile: 'osx_signing_key.p12'
|
||||
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
displayName: Prepare cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
vstsFeed: 'BuildCache'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: '10.15.3'
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: '1.x'
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||
KeyVaultName: ado-secrets
|
||||
SecretsFilter: 'github-distro-mixin-password'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login azuredatastudio
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
displayName: Prepare tooling
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install dependencies
|
||||
env:
|
||||
GITHUB_TOKEN: $(github-distro-mixin-password)
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp install-sqltoolsservice
|
||||
displayName: Install sqltoolsservice
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp package-rebuild-extensions
|
||||
yarn gulp vscode-darwin-min-ci
|
||||
yarn gulp vscode-reh-darwin-min-ci
|
||||
yarn gulp vscode-reh-web-darwin-min-ci
|
||||
displayName: Build
|
||||
env:
|
||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./scripts/test.sh --build --coverage --reporter mocha-junit-reporter
|
||||
displayName: Run unit tests
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
pushd ../azuredatastudio-darwin
|
||||
ls
|
||||
|
||||
echo "Cleaning the application"
|
||||
xattr -cr *.app
|
||||
cd *.app
|
||||
find . -name '._*' -print0 | xargs -0 rm -rf --
|
||||
cd ..
|
||||
|
||||
echo "Signing the application with deep"
|
||||
codesign --deep --force --timestamp --options runtime --entitlements $(Build.SourcesDirectory)/build/azure-pipelines/darwin/entitlements.xml -s LPV3BJJYXS *.app
|
||||
|
||||
cd *.app
|
||||
ls
|
||||
echo "Signing specific components"
|
||||
find . -type f -print0 | xargs -0 file | grep ': *Mach-O' | sed 's/: *Mach-O.*//' | while read -r file; do codesign --options runtime --timestamp --entitlements $(Build.SourcesDirectory)/build/azure-pipelines/darwin/entitlements.xml -s LPV3BJJYXS --force "$file" || break; done
|
||||
|
||||
echo "Signing Electron again..."
|
||||
codesign --force --timestamp --options runtime --entitlements $(Build.SourcesDirectory)/build/azure-pipelines/darwin/entitlements.xml -s LPV3BJJYXS Contents/Frameworks/Electron\ Framework.framework
|
||||
cd ..
|
||||
|
||||
echo "Signing the entire application one more time"
|
||||
codesign --force --timestamp --options runtime --entitlements $(Build.SourcesDirectory)/build/azure-pipelines/darwin/entitlements.xml -s LPV3BJJYXS *.app
|
||||
popd
|
||||
displayName: 'Manual codesign'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
mkdir -p .build/darwin/archive
|
||||
pushd ../azuredatastudio-darwin
|
||||
ditto -c -k --keepParent *.app $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip
|
||||
popd
|
||||
displayName: 'Archive'
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
displayName: 'Publish SelfSigned'
|
||||
inputs:
|
||||
artifactName: darwin-selfsigned
|
||||
targetPath: $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
displayName: 'ESRP CodeSigning'
|
||||
inputs:
|
||||
ConnectedServiceName: 'Code Signing'
|
||||
FolderPath: '$(Build.SourcesDirectory)/.build/darwin/archive'
|
||||
Pattern: 'azuredatastudio-darwin.zip'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-401337-Apple",
|
||||
"operationCode": "MacAppDeveloperSign",
|
||||
"parameters": {
|
||||
"Hardening": "Enable"
|
||||
},
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 90
|
||||
condition: and(succeeded(), eq(variables['signtba'], true))
|
||||
|
||||
- script: |
|
||||
zip -d $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip "*.pkg"
|
||||
displayName: Clean Archive
|
||||
condition: and(succeeded(), eq(variables['signtba'], true))
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
displayName: 'Publish Signed'
|
||||
inputs:
|
||||
artifactName: darwin-signed
|
||||
targetPath: $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip
|
||||
condition: and(succeeded(), eq(variables['signtba'], true))
|
||||
|
||||
- task: EsrpCodeSigning@1
|
||||
displayName: 'ESRP Notarization'
|
||||
inputs:
|
||||
ConnectedServiceName: 'Code Signing'
|
||||
FolderPath: '$(Build.SourcesDirectory)/.build/darwin/archive'
|
||||
Pattern: 'azuredatastudio-darwin.zip'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"KeyCode": "CP-401337-Apple",
|
||||
"OperationCode": "MacAppNotarize",
|
||||
"Parameters": {
|
||||
"BundleId": "com.microsoft.azuredatastudio-$(VSCODE_QUALITY)"
|
||||
},
|
||||
"ToolName": "sign",
|
||||
"ToolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
condition: and(succeeded(), eq(variables['signtba'], true))
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
displayName: 'Publish Notarized'
|
||||
inputs:
|
||||
artifactName: darwin-notarized
|
||||
targetPath: $(Build.SourcesDirectory)/.build/darwin/archive/azuredatastudio-darwin.zip
|
||||
condition: and(succeeded(), eq(variables['signtba'], true))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./build/azure-pipelines/darwin/createDrop.sh
|
||||
displayName: Create Drop
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: drop'
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish Test Results test-results.xml'
|
||||
inputs:
|
||||
testResultsFiles: 'test-results.xml'
|
||||
searchFolder: '$(Build.SourcesDirectory)'
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish Integration and Smoke Test Results'
|
||||
inputs:
|
||||
testResultsFiles: 'dawin-integration-tests-results.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)\test-results'
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
displayName: 'Publish code coverage from $(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||
inputs:
|
||||
codeCoverageTool: Cobertura
|
||||
summaryFileLocation: '$(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||
reportDirectory: '$(Build.SourcesDirectory)/.build/coverage'
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
inputs:
|
||||
failOnAlert: true
|
||||
19
build/azure-pipelines/darwin/sql-publish.ps1
Normal file
19
build/azure-pipelines/darwin/sql-publish.ps1
Normal file
@@ -0,0 +1,19 @@
|
||||
Param(
|
||||
[string]$sourcesDir,
|
||||
[string]$artifactsDir,
|
||||
[string]$storageKey,
|
||||
[string]$documentDbKey
|
||||
)
|
||||
|
||||
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
|
||||
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
||||
|
||||
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
|
||||
$Version = $VersionJson.version
|
||||
$Quality = $VersionJson.quality
|
||||
$CommitId = $VersionJson.commit
|
||||
|
||||
$ZipName = "azuredatastudio-darwin.zip"
|
||||
$Zip = "$artifactsDir\darwin\archive\$ZipName"
|
||||
|
||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality darwin archive $ZipName $Version true $Zip $CommitId
|
||||
@@ -1,3 +1,6 @@
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
|
||||
trigger:
|
||||
branches:
|
||||
include: ['master', 'release/*']
|
||||
@@ -8,27 +11,27 @@ pr:
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'vscode-builds-subscription'
|
||||
KeyVaultName: vscode
|
||||
azureSubscription: 'azuredatastudio-adointegration'
|
||||
KeyVaultName: ado-secrets
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login vscode
|
||||
login azuredatastudio
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
|
||||
git remote add distro "https://github.com/$VSCODE_MIXIN_REPO.git"
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
|
||||
# Push master branch into oss/master
|
||||
|
||||
16
build/azure-pipelines/docker/Dockerfile
Normal file
16
build/azure-pipelines/docker/Dockerfile
Normal file
@@ -0,0 +1,16 @@
|
||||
#Download base image ubuntu 16.04
|
||||
FROM ubuntu:16.04
|
||||
|
||||
# Update Software repository
|
||||
RUN apt-get update
|
||||
|
||||
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus libgtk-3-0
|
||||
|
||||
ADD ./ /opt/ads-server
|
||||
|
||||
RUN chmod +x /opt/ads-server/server.sh && chmod +x /opt/ads-server/node
|
||||
|
||||
CMD ["/opt/ads-server/server.sh"]
|
||||
|
||||
EXPOSE 8000:8000
|
||||
EXPOSE 8001:8001
|
||||
@@ -1,10 +1,13 @@
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
|
||||
trigger: none
|
||||
pr: none
|
||||
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
@@ -31,13 +34,3 @@ steps:
|
||||
git push origin HEAD:electron-6.0.x
|
||||
|
||||
displayName: Sync & Merge Exploration
|
||||
|
||||
trigger: none
|
||||
pr: none
|
||||
|
||||
schedules:
|
||||
- cron: "0 5 * * Mon-Fri"
|
||||
displayName: Mon-Fri at 7:00
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
|
||||
39
build/azure-pipelines/exploration-merge.yml
Normal file
39
build/azure-pipelines/exploration-merge.yml
Normal file
@@ -0,0 +1,39 @@
|
||||
trigger:
|
||||
branches:
|
||||
include: ['master']
|
||||
pr: none
|
||||
|
||||
jobs:
|
||||
- job: ExplorationMerge
|
||||
pool:
|
||||
vmImage: Ubuntu-16.04
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
cat << EOF > ~/.netrc
|
||||
machine mssqltools.visualstudio.com
|
||||
login azuredatastudio
|
||||
password $(DEVOPS_PASSWORD)
|
||||
EOF
|
||||
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
|
||||
git remote add explore "$ADS_EXPLORE_REPO"
|
||||
git fetch explore
|
||||
|
||||
git checkout -b merge-branch explore/master
|
||||
|
||||
git merge origin/master
|
||||
|
||||
git push explore HEAD:master
|
||||
|
||||
displayName: Sync & Merge Explore
|
||||
env:
|
||||
ADS_EXPLORE_REPO: $(ADS_EXPLORE_REPO)
|
||||
DEVOPS_PASSWORD: $(DEVOPS_PASSWORD)
|
||||
27
build/azure-pipelines/linux/Dockerfile
Normal file
27
build/azure-pipelines/linux/Dockerfile
Normal file
@@ -0,0 +1,27 @@
|
||||
#Download base image ubuntu 16.04
|
||||
FROM ubuntu:16.04
|
||||
|
||||
# Update Software repository
|
||||
RUN apt-get update && apt-get upgrade -y
|
||||
|
||||
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 \
|
||||
libkrb5-dev git apt-transport-https ca-certificates curl gnupg-agent software-properties-common \
|
||||
libnss3 libasound2 make gcc libx11-dev fakeroot rpm libgconf-2-4 libunwind8 g++-4.8
|
||||
|
||||
RUN rm /usr/bin/gcc
|
||||
RUN rm /usr/bin/g++
|
||||
RUN ln -s /usr/bin/gcc-4.8 /usr/bin/gcc
|
||||
RUN ln -s /usr/bin/g++-4.8 /usr/bin/g++
|
||||
|
||||
#docker
|
||||
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
|
||||
RUN apt-key fingerprint 0EBFCD88
|
||||
RUN add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
|
||||
RUN apt-get update
|
||||
RUN apt-get -y install docker-ce docker-ce-cli containerd.io
|
||||
|
||||
# This image needs to be built on a linux host; some weird stuff happens and the xvfb service won't start
|
||||
# if built on a windows host.
|
||||
ADD ./xvfb.init /etc/init.d/xvfb
|
||||
RUN chmod +x /etc/init.d/xvfb
|
||||
RUN update-rc.d xvfb defaults
|
||||
@@ -2,53 +2,59 @@ steps:
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0
|
||||
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev #{{SQL CARBON EDIT}} add kerberos dep
|
||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||
sudo chmod +x /etc/init.d/xvfb
|
||||
sudo update-rc.d xvfb defaults
|
||||
sudo service xvfb start
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: '$(ArtifactFeed)'
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
- script: |
|
||||
yarn --frozen-lockfile
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install Dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: '$(ArtifactFeed)'
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
- script: |
|
||||
yarn gulp electron-x64
|
||||
yarn electron x64
|
||||
displayName: Download Electron
|
||||
- script: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
- script: | # {{SQL CARBON EDIT}} add strict null check
|
||||
yarn strict-vscode
|
||||
displayName: Run Strict Null Check
|
||||
# - script: | {{SQL CARBON EDIT}} remove monaco editor checks
|
||||
# yarn monaco-compile-check
|
||||
# displayName: Run Monaco Editor Checks
|
||||
- script: |
|
||||
yarn monaco-compile-check
|
||||
displayName: Run Monaco Editor Checks
|
||||
yarn valid-layers-check
|
||||
displayName: Run Valid Layers Checks
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
- script: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
# - script: | {{SQL CARBON EDIT}} remove step
|
||||
# yarn download-builtin-extensions
|
||||
# displayName: Download Built-in Extensions
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
displayName: Run Integration Tests
|
||||
# - script: | {{SQL CARBON EDIT}} remove step
|
||||
# DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
# displayName: Run Integration Tests
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
|
||||
37
build/azure-pipelines/linux/createDrop.sh
Executable file
37
build/azure-pipelines/linux/createDrop.sh
Executable file
@@ -0,0 +1,37 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
REPO="$(pwd)"
|
||||
ROOT="$REPO/.."
|
||||
|
||||
# Publish tarball
|
||||
mkdir -p $REPO/.build/linux/{archive,server}
|
||||
PLATFORM_LINUX="linux-x64"
|
||||
BUILDNAME="azuredatastudio-$PLATFORM_LINUX"
|
||||
BUILD="$ROOT/$BUILDNAME"
|
||||
TARBALL_FILENAME="azuredatastudio-$PLATFORM_LINUX.tar.gz"
|
||||
TARBALL_PATH="$REPO/.build/linux/archive/$TARBALL_FILENAME"
|
||||
|
||||
# create version
|
||||
PACKAGEJSON="$BUILD/resources/app/package.json"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
COMMIT_ID=$(git rev-parse HEAD)
|
||||
echo -e "{ \"version\": \"$VERSION\", \"quality\": \"$VSCODE_QUALITY\", \"commit\": \"$COMMIT_ID\" }" > "$REPO/.build/version.json"
|
||||
|
||||
rm -rf $ROOT/code-*.tar.*
|
||||
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||
|
||||
# Publish Remote Extension Host
|
||||
LEGACY_SERVER_BUILD_NAME="azuredatastudio-reh-$PLATFORM_LINUX"
|
||||
SERVER_BUILD_NAME="azuredatastudio-server-$PLATFORM_LINUX"
|
||||
SERVER_TARBALL_FILENAME="azuredatastudio-server-$PLATFORM_LINUX.tar.gz"
|
||||
SERVER_TARBALL_PATH="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME"
|
||||
|
||||
rm -rf $ROOT/azuredatastudio-server-*.tar.*
|
||||
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||
|
||||
# create docker
|
||||
mkdir -p $REPO/.build/docker
|
||||
docker build -t azuredatastudio-server -f $REPO/build/azure-pipelines/docker/Dockerfile $ROOT/$SERVER_BUILD_NAME
|
||||
docker save azuredatastudio-server | gzip > $REPO/.build/docker/azuredatastudio-server-docker.tar.gz
|
||||
|
||||
node build/azure-pipelines/common/copyArtifacts.js
|
||||
@@ -1,40 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const documentdb_1 = require("documentdb");
|
||||
function createDefaultConfig(quality) {
|
||||
return {
|
||||
id: quality,
|
||||
frozen: false
|
||||
};
|
||||
}
|
||||
function getConfig(quality) {
|
||||
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/config';
|
||||
const query = {
|
||||
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
|
||||
parameters: [
|
||||
{ name: '@quality', value: quality }
|
||||
]
|
||||
};
|
||||
return new Promise((c, e) => {
|
||||
client.queryDocuments(collection, query).toArray((err, results) => {
|
||||
if (err && err.code !== 409) {
|
||||
return e(err);
|
||||
}
|
||||
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0]);
|
||||
});
|
||||
});
|
||||
}
|
||||
getConfig(process.argv[2])
|
||||
.then(config => {
|
||||
console.log(config.frozen);
|
||||
process.exit(0);
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -21,11 +21,11 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
|
||||
@@ -21,11 +21,11 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
@@ -105,12 +105,45 @@ steps:
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
# Figure out the full absolute path of the product we just built
|
||||
# including the remote server and configure the integration tests
|
||||
# to run with these builds instead of running out of sources.
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/VSCode-linux-x64
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-x64" \
|
||||
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
|
||||
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-x64"
|
||||
displayName: Run integration tests
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp "vscode-linux-x64-build-deb"
|
||||
yarn gulp "vscode-linux-x64-build-rpm"
|
||||
yarn gulp "vscode-linux-x64-prepare-snap"
|
||||
displayName: Build packages
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: 'ESRP CodeSign'
|
||||
FolderPath: '.build/linux/rpm/x86_64'
|
||||
Pattern: '*.rpm'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-450779-Pgp",
|
||||
"operationSetCode": "LinuxSign",
|
||||
"parameters": [ ],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
displayName: Codesign rpm
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
|
||||
@@ -10,13 +10,11 @@ BUILD="$ROOT/$BUILDNAME"
|
||||
BUILD_VERSION="$(date +%s)"
|
||||
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
|
||||
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
|
||||
PACKAGEJSON="$BUILD/resources/app/package.json"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
|
||||
rm -rf $ROOT/code-*.tar.*
|
||||
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$VERSION" true "$TARBALL_PATH"
|
||||
node build/azure-pipelines/common/createAsset.js "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$TARBALL_PATH"
|
||||
|
||||
# Publish Remote Extension Host
|
||||
LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
|
||||
@@ -27,32 +25,28 @@ SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
|
||||
rm -rf $ROOT/vscode-server-*.tar.*
|
||||
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$VERSION" true "$SERVER_TARBALL_PATH"
|
||||
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
|
||||
|
||||
# Publish hockeyapp symbols
|
||||
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "x64" "$VSCODE_HOCKEYAPP_ID_LINUX64"
|
||||
|
||||
# Publish DEB
|
||||
yarn gulp "vscode-linux-x64-build-deb"
|
||||
PLATFORM_DEB="linux-deb-x64"
|
||||
DEB_ARCH="amd64"
|
||||
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
|
||||
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
|
||||
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_DEB" package "$DEB_FILENAME" "$VERSION" true "$DEB_PATH"
|
||||
node build/azure-pipelines/common/createAsset.js "$PLATFORM_DEB" package "$DEB_FILENAME" "$DEB_PATH"
|
||||
|
||||
# Publish RPM
|
||||
yarn gulp "vscode-linux-x64-build-rpm"
|
||||
PLATFORM_RPM="linux-rpm-x64"
|
||||
RPM_ARCH="x86_64"
|
||||
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
|
||||
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
|
||||
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_RPM" package "$RPM_FILENAME" "$VERSION" true "$RPM_PATH"
|
||||
node build/azure-pipelines/common/createAsset.js "$PLATFORM_RPM" package "$RPM_FILENAME" "$RPM_PATH"
|
||||
|
||||
# Publish Snap
|
||||
yarn gulp "vscode-linux-x64-prepare-snap"
|
||||
|
||||
# Pack snap tarball artifact, in order to preserve file perms
|
||||
mkdir -p $REPO/.build/linux/snap-tarball
|
||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-x64.tar.gz"
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
@@ -43,12 +43,10 @@ steps:
|
||||
# Create snap package
|
||||
BUILD_VERSION="$(date +%s)"
|
||||
SNAP_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.snap"
|
||||
PACKAGEJSON="$(ls $SNAP_ROOT/code*/usr/share/code*/resources/app/package.json)"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME"
|
||||
(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft snap --output "$SNAP_PATH")
|
||||
|
||||
# Publish snap package
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-x64" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"
|
||||
node build/azure-pipelines/common/createAsset.js "linux-snap-x64" package "$SNAP_FILENAME" "$SNAP_PATH"
|
||||
|
||||
172
build/azure-pipelines/linux/sql-product-build-linux.yml
Normal file
172
build/azure-pipelines/linux/sql-product-build-linux.yml
Normal file
@@ -0,0 +1,172 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
displayName: Prepare cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
vstsFeed: 'BuildCache'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: '10.15.1'
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||
KeyVaultName: ado-secrets
|
||||
SecretsFilter: 'github-distro-mixin-password'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login azuredatastudio
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
displayName: Prepare tooling
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install dependencies
|
||||
env:
|
||||
GITHUB_TOKEN: $(github-distro-mixin-password)
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp install-sqltoolsservice
|
||||
yarn gulp install-ssmsmin
|
||||
displayName: Install extension binaries
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-min-ci
|
||||
yarn gulp vscode-reh-linux-x64-min-ci
|
||||
yarn gulp vscode-reh-web-linux-x64-min-ci
|
||||
displayName: Build
|
||||
env:
|
||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
service xvfb start
|
||||
displayName: Start xvfb
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp package-rebuild-extensions
|
||||
yarn gulp compile-extensions
|
||||
yarn gulp package-external-extensions
|
||||
displayName: Package External extensions
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
DISPLAY=:10 ./scripts/test-extensions-unit.sh
|
||||
displayName: 'Run Stable Extension Unit Tests'
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
|
||||
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
|
||||
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
|
||||
DISPLAY=:10 ./scripts/test-extensions-unit-unstable.sh
|
||||
displayName: 'Run Unstable Extension Unit Tests'
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-build-deb
|
||||
displayName: Build Deb
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp vscode-linux-x64-build-rpm
|
||||
displayName: Build Rpm
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./build/azure-pipelines/linux/createDrop.sh
|
||||
displayName: Create Drop
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: drop'
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish Test Results test-results.xml'
|
||||
inputs:
|
||||
testResultsFiles: 'test-results.xml'
|
||||
searchFolder: '$(Build.SourcesDirectory)'
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
displayName: 'Publish code coverage from $(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||
inputs:
|
||||
codeCoverageTool: Cobertura
|
||||
summaryFileLocation: '$(Build.SourcesDirectory)/.build/coverage/cobertura-coverage.xml'
|
||||
reportDirectory: '$(Build.SourcesDirectory)/.build/coverage'
|
||||
continueOnError: true
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
inputs:
|
||||
failOnAlert: true
|
||||
36
build/azure-pipelines/linux/sql-publish.ps1
Normal file
36
build/azure-pipelines/linux/sql-publish.ps1
Normal file
@@ -0,0 +1,36 @@
|
||||
Param(
|
||||
[string]$sourcesDir,
|
||||
[string]$artifactsDir,
|
||||
[string]$storageKey,
|
||||
[string]$documentDbKey
|
||||
)
|
||||
|
||||
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
|
||||
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
||||
|
||||
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
|
||||
$Version = $VersionJson.version
|
||||
$Quality = $VersionJson.quality
|
||||
$CommitId = $VersionJson.commit
|
||||
$Arch = "x64"
|
||||
|
||||
# Publish tarball
|
||||
$PlatformLinux = "linux-$Arch"
|
||||
$TarballFilename = "azuredatastudio-linux-$Arch.tar.gz"
|
||||
$TarballPath = "$artifactsDir\linux\archive\$TarballFilename"
|
||||
|
||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformLinux archive-unsigned $TarballFilename $Version true $TarballPath $CommitId
|
||||
|
||||
# Publish DEB
|
||||
$PlatformDeb = "linux-deb-$Arch"
|
||||
$DebFilename = "$(Get-ChildItem -File -Name $artifactsDir\linux\deb\amd64\deb\*.deb)"
|
||||
$DebPath = "$artifactsDir\linux\deb\amd64\deb\$DebFilename"
|
||||
|
||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformDeb package $DebFilename $Version true $DebPath $CommitId
|
||||
|
||||
# Publish RPM
|
||||
$PlatformRpm = "linux-rpm-$Arch"
|
||||
$RpmFilename = "$(Get-ChildItem -File -Name $artifactsDir\linux\rpm\x86_64\*.rpm)"
|
||||
$RpmPath = "$artifactsDir\linux\rpm\x86_64\$RpmFilename"
|
||||
|
||||
node $sourcesDir\build\azure-pipelines\common\publish.js $Quality $PlatformRpm package $RpmFilename $Version true $RpmPath $CommitId
|
||||
@@ -21,7 +21,7 @@ function main() {
|
||||
return;
|
||||
}
|
||||
|
||||
const productJsonFilter = filter('product.json', { restore: true });
|
||||
const productJsonFilter = filter('**/product.json', { restore: true });
|
||||
|
||||
fancyLog(ansiColors.blue('[mixin]'), `Mixing in sources:`);
|
||||
return vfs
|
||||
@@ -29,7 +29,7 @@ function main() {
|
||||
.pipe(filter(f => !f.isDirectory()))
|
||||
.pipe(productJsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(json(o => Object.assign({}, require('../product.json'), o)))
|
||||
.pipe(json(o => Object.assign({}, require('../../product.json'), o)))
|
||||
.pipe(productJsonFilter.restore)
|
||||
.pipe(es.mapSync(function (f) {
|
||||
fancyLog(ansiColors.blue('[mixin]'), f.relative, ansiColors.green('✔︎'));
|
||||
@@ -38,4 +38,4 @@ function main() {
|
||||
.pipe(vfs.dest('.'));
|
||||
}
|
||||
|
||||
main();
|
||||
main();
|
||||
|
||||
@@ -56,7 +56,7 @@ jobs:
|
||||
- template: linux/snap-build-linux.yml
|
||||
|
||||
- job: LinuxArmhf
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'), ne(variables['VSCODE_QUALITY'], 'stable'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
variables:
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
- template: linux/product-build-linux-multiarch.yml
|
||||
|
||||
- job: LinuxArm64
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'), ne(variables['VSCODE_QUALITY'], 'stable'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
variables:
|
||||
@@ -78,7 +78,7 @@ jobs:
|
||||
- template: linux/product-build-linux-multiarch.yml
|
||||
|
||||
- job: LinuxAlpine
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'), ne(variables['VSCODE_QUALITY'], 'stable'))
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
variables:
|
||||
@@ -102,7 +102,7 @@ jobs:
|
||||
- job: macOS
|
||||
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), eq(variables['VSCODE_BUILD_MACOS'], 'true'))
|
||||
pool:
|
||||
vmImage: macOS 10.13
|
||||
vmImage: macOS-latest
|
||||
dependsOn:
|
||||
- Compile
|
||||
steps:
|
||||
@@ -118,6 +118,7 @@ jobs:
|
||||
- Linux
|
||||
- LinuxSnap
|
||||
- LinuxArmhf
|
||||
- LinuxArm64
|
||||
- LinuxAlpine
|
||||
- macOS
|
||||
steps:
|
||||
@@ -133,6 +134,7 @@ jobs:
|
||||
- Linux
|
||||
- LinuxSnap
|
||||
- LinuxArmhf
|
||||
- LinuxArm64
|
||||
- LinuxAlpine
|
||||
- LinuxWeb
|
||||
- macOS
|
||||
|
||||
@@ -12,23 +12,24 @@ steps:
|
||||
vstsFeed: 'npm-vscode'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
dryRun: true
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
versionSpec: "12.13.0"
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
versionSpec: "1.x"
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'vscode-builds-subscription'
|
||||
KeyVaultName: vscode
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -41,7 +42,7 @@ steps:
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
displayName: Prepare tooling
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -49,33 +50,33 @@ steps:
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: 'npm-vscode'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: 'npm-vscode'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
# Mixin must run before optimize, because the CSS loader will
|
||||
# inline small SVGs
|
||||
@@ -83,20 +84,28 @@ steps:
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp hygiene
|
||||
yarn monaco-compile-check
|
||||
displayName: Run hygiene checks
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
yarn valid-layers-check
|
||||
displayName: Run hygiene, monaco compile & valid layers checks
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -
|
||||
./build/azure-pipelines/common/extract-telemetry.sh
|
||||
displayName: Extract Telemetry
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
|
||||
./build/azure-pipelines/common/publish-webview.sh
|
||||
displayName: Publish Webview
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
@@ -106,14 +115,22 @@ steps:
|
||||
yarn gulp minify-vscode-reh
|
||||
yarn gulp minify-vscode-reh-web
|
||||
displayName: Compile
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
||||
node build/azure-pipelines/upload-sourcemaps
|
||||
displayName: Upload sourcemaps
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VERSION=`node -p "require(\"./package.json\").version"`
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
node build/azure-pipelines/common/createBuild.js $VERSION
|
||||
displayName: Create build
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
@@ -122,4 +139,4 @@ steps:
|
||||
vstsFeed: 'npm-vscode'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const cp = require("child_process");
|
||||
let tag = '';
|
||||
try {
|
||||
tag = cp
|
||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
||||
.toString()
|
||||
.trim();
|
||||
if (!isValidTag(tag)) {
|
||||
throw Error(`Invalid tag ${tag}`);
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
console.error('Failed to update types');
|
||||
process.exit(1);
|
||||
}
|
||||
function isValidTag(t) {
|
||||
if (t.split('.').length !== 3) {
|
||||
return false;
|
||||
}
|
||||
const [major, minor, bug] = t.split('.');
|
||||
// Only release for tags like 1.34.0
|
||||
if (bug !== '0') {
|
||||
return false;
|
||||
}
|
||||
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@@ -35,9 +35,9 @@ function isValidTag(t: string) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
|
||||
if (isNaN(parseInt(major, 10)) || isNaN(parseInt(minor, 10))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,11 +9,27 @@ pr: none
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
versionSpec: "1.x"
|
||||
|
||||
- bash: |
|
||||
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
|
||||
CHANNEL="G1C14HJ2F"
|
||||
|
||||
if [ "$TAG_VERSION" == "1.999.0" ]; then
|
||||
MESSAGE="<!here>. Someone pushed 1.999.0 tag. Please delete it ASAP from remote and local."
|
||||
|
||||
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
|
||||
-H 'Content-type: application/json; charset=utf-8' \
|
||||
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \
|
||||
https://slack.com/api/chat.postMessage
|
||||
|
||||
exit 1
|
||||
fi
|
||||
displayName: Check 1.999.0 tag
|
||||
|
||||
- bash: |
|
||||
# Install build dependencies
|
||||
|
||||
@@ -1,62 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const cp = require("child_process");
|
||||
const path = require("path");
|
||||
let tag = '';
|
||||
try {
|
||||
tag = cp
|
||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
||||
.toString()
|
||||
.trim();
|
||||
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
|
||||
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
|
||||
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
|
||||
updateDTSFile(outPath, tag);
|
||||
console.log(`Done updating vscode.d.ts at ${outPath}`);
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
console.error('Failed to update types');
|
||||
process.exit(1);
|
||||
}
|
||||
function updateDTSFile(outPath, tag) {
|
||||
const oldContent = fs.readFileSync(outPath, 'utf-8');
|
||||
const newContent = getNewFileContent(oldContent, tag);
|
||||
fs.writeFileSync(outPath, newContent);
|
||||
}
|
||||
function getNewFileContent(content, tag) {
|
||||
const oldheader = [
|
||||
`/*---------------------------------------------------------------------------------------------`,
|
||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
||||
` * Licensed under the Source EULA. See License.txt in the project root for license information.`,
|
||||
` *--------------------------------------------------------------------------------------------*/`
|
||||
].join('\n');
|
||||
return getNewFileHeader(tag) + content.slice(oldheader.length);
|
||||
}
|
||||
function getNewFileHeader(tag) {
|
||||
const [major, minor] = tag.split('.');
|
||||
const shorttag = `${major}.${minor}`;
|
||||
const header = [
|
||||
`// Type definitions for Visual Studio Code ${shorttag}`,
|
||||
`// Project: https://github.com/microsoft/vscode`,
|
||||
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/Microsoft>`,
|
||||
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
|
||||
``,
|
||||
`/*---------------------------------------------------------------------------------------------`,
|
||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
||||
` * Licensed under the Source EULA.`,
|
||||
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
|
||||
` *--------------------------------------------------------------------------------------------*/`,
|
||||
``,
|
||||
`/**`,
|
||||
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
|
||||
` * See https://code.visualstudio.com/api for more information`,
|
||||
` */`
|
||||
].join('\n');
|
||||
return header;
|
||||
}
|
||||
@@ -19,4 +19,4 @@ steps:
|
||||
(cd build ; yarn)
|
||||
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
node build/azure-pipelines/common/release.js
|
||||
node build/azure-pipelines/common/releaseBuild.js
|
||||
|
||||
74
build/azure-pipelines/sql-product-build.yml
Normal file
74
build/azure-pipelines/sql-product-build.yml
Normal file
@@ -0,0 +1,74 @@
|
||||
resources:
|
||||
containers:
|
||||
- container: linux-x64
|
||||
image: sqltoolscontainers.azurecr.io/linux-build-agent:1
|
||||
endpoint: ContainerRegistry
|
||||
|
||||
jobs:
|
||||
- job: Compile
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
container: linux-x64
|
||||
steps:
|
||||
- template: sql-product-compile.yml
|
||||
|
||||
- job: macOS
|
||||
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
|
||||
pool:
|
||||
vmImage: macOS-latest
|
||||
dependsOn:
|
||||
- Compile
|
||||
steps:
|
||||
- template: darwin/sql-product-build-darwin.yml
|
||||
timeoutInMinutes: 180
|
||||
|
||||
- job: Linux
|
||||
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
container: linux-x64
|
||||
dependsOn:
|
||||
- Compile
|
||||
steps:
|
||||
- template: linux/sql-product-build-linux.yml
|
||||
|
||||
- job: Windows
|
||||
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
dependsOn:
|
||||
- Compile
|
||||
steps:
|
||||
- template: win32/sql-product-build-win32.yml
|
||||
|
||||
- job: Windows_Test
|
||||
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
|
||||
pool:
|
||||
name: mssqltools
|
||||
dependsOn:
|
||||
- Linux
|
||||
- Windows
|
||||
steps:
|
||||
- template: win32/sql-product-test-win32.yml
|
||||
|
||||
- job: Release
|
||||
condition: and(succeeded(), or(eq(variables['VSCODE_RELEASE'], 'true'), and(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['Build.Reason'], 'Schedule'))))
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
dependsOn:
|
||||
- macOS
|
||||
- Linux
|
||||
- Windows
|
||||
- Windows_Test
|
||||
steps:
|
||||
- template: sql-release.yml
|
||||
|
||||
trigger: none
|
||||
pr: none
|
||||
|
||||
schedules:
|
||||
- cron: "0 5 * * Mon-Fri"
|
||||
displayName: Mon-Fri at 7:00
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
113
build/azure-pipelines/sql-product-compile.yml
Normal file
113
build/azure-pipelines/sql-product-compile.yml
Normal file
@@ -0,0 +1,113 @@
|
||||
steps:
|
||||
- script: |
|
||||
mkdir -p .build
|
||||
echo -n $BUILD_SOURCEVERSION > .build/commit
|
||||
echo -n $VSCODE_QUALITY > .build/quality
|
||||
displayName: Prepare cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
vstsFeed: 'BuildCache'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||
KeyVaultName: ado-secrets
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login azuredatastudio
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "andresse@microsoft.com"
|
||||
git config user.name "AzureDataStudio"
|
||||
displayName: Prepare tooling
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
|
||||
displayName: Install dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn postinstall
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
# Mixin must run before optimize, because the CSS loader will
|
||||
# inline small SVGs
|
||||
- script: |
|
||||
set -e
|
||||
node build/azure-pipelines/mixin
|
||||
displayName: Mix in quality
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp hygiene
|
||||
yarn strict-vscode
|
||||
yarn valid-layers-check
|
||||
displayName: Run hygiene, tslint
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp compile-build
|
||||
yarn gulp compile-extensions-build
|
||||
yarn gulp minify-vscode
|
||||
yarn gulp minify-vscode-reh
|
||||
yarn gulp minify-vscode-reh-web
|
||||
displayName: Compile
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
vstsFeed: 'BuildCache'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
5
build/azure-pipelines/sql-release.yml
Normal file
5
build/azure-pipelines/sql-release.yml
Normal file
@@ -0,0 +1,5 @@
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
echo "##vso[build.addbuildtag]Release"
|
||||
displayName: Set For Release
|
||||
@@ -1,11 +1,11 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
|
||||
@@ -21,11 +21,11 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
|
||||
@@ -7,12 +7,9 @@ ROOT="$REPO/.."
|
||||
WEB_BUILD_NAME="vscode-web"
|
||||
WEB_TARBALL_FILENAME="vscode-web.tar.gz"
|
||||
WEB_TARBALL_PATH="$ROOT/$WEB_TARBALL_FILENAME"
|
||||
BUILD="$ROOT/$WEB_BUILD_NAME"
|
||||
PACKAGEJSON="$BUILD/package.json"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
|
||||
rm -rf $ROOT/vscode-web.tar.*
|
||||
|
||||
(cd $ROOT && tar --owner=0 --group=0 -czf $WEB_TARBALL_PATH $WEB_BUILD_NAME)
|
||||
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "web-standalone" archive-unsigned "$WEB_TARBALL_FILENAME" "$VERSION" true "$WEB_TARBALL_PATH"
|
||||
node build/azure-pipelines/common/createAsset.js web-standalone archive-unsigned "$WEB_TARBALL_FILENAME" "$WEB_TARBALL_PATH"
|
||||
|
||||
@@ -1,50 +1,57 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
versionSpec: "12.13.0"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
versionSpec: "1.x"
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '2.x'
|
||||
addToPath: true
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: '$(ArtifactFeed)'
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
- powershell: |
|
||||
yarn --frozen-lockfile
|
||||
env:
|
||||
CHILD_CONCURRENCY: "1"
|
||||
displayName: Install Dependencies
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
vstsFeed: '$(ArtifactFeed)'
|
||||
keyfile: '.yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'npm-cache' # {{SQL CARBON EDIT}} update build cache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
- powershell: |
|
||||
yarn gulp electron
|
||||
displayName: Download Electron
|
||||
- powershell: |
|
||||
yarn electron
|
||||
- script: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
- powershell: |
|
||||
yarn monaco-compile-check
|
||||
displayName: Run Monaco Editor Checks
|
||||
- script: | # {{SQL CARBON EDIT}} add step
|
||||
yarn strict-vscode
|
||||
displayName: Run Strict Null Check
|
||||
# - powershell: | {{SQL CARBON EDIT}} remove step
|
||||
# yarn monaco-compile-check
|
||||
# displayName: Run Monaco Editor Checks
|
||||
- script: |
|
||||
yarn valid-layers-check
|
||||
displayName: Run Valid Layers Checks
|
||||
- powershell: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
- powershell: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
# - powershell: | {{SQL CARBON EDIT}} remove step
|
||||
# yarn download-builtin-extensions
|
||||
# displayName: Download Built-in Extensions
|
||||
- powershell: |
|
||||
.\scripts\test.bat --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests
|
||||
- powershell: |
|
||||
.\scripts\test-integration.bat --tfs "Integration Tests"
|
||||
displayName: Run Integration Tests
|
||||
# - powershell: | {{SQL CARBON EDIT}} remove step
|
||||
# .\scripts\test-integration.bat --tfs "Integration Tests"
|
||||
# displayName: Run Integration Tests
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
|
||||
20
build/azure-pipelines/win32/createDrop.ps1
Normal file
20
build/azure-pipelines/win32/createDrop.ps1
Normal file
@@ -0,0 +1,20 @@
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
$Arch = "x64"
|
||||
|
||||
$Repo = "$(pwd)"
|
||||
$Root = "$Repo\.."
|
||||
$LegacyServer = "$Root\azuredatastudio-reh-win32-$Arch"
|
||||
$ServerName = "azuredatastudio-server-win32-$Arch"
|
||||
$Server = "$Root\$ServerName"
|
||||
$ServerZipLocation = "$Repo\.build\win32-$Arch\server"
|
||||
$ServerZip = "$ServerZipLocation\azuredatastudio-server-win32-$Arch.zip"
|
||||
|
||||
# Create server archive
|
||||
New-Item $ServerZipLocation -ItemType Directory # this will throw even when success for we don't want to exec this
|
||||
$global:LASTEXITCODE = 0
|
||||
exec { Rename-Item -Path $LegacyServer -NewName $ServerName } "Rename Item"
|
||||
exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r } "Zip Server"
|
||||
|
||||
exec { node build/azure-pipelines/common/copyArtifacts.js } "Copy Artifacts"
|
||||
@@ -21,11 +21,11 @@ steps:
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
versionSpec: "12.13.0"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
@@ -107,16 +107,21 @@ steps:
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn gulp "electron-$(VSCODE_ARCH)" }
|
||||
exec { yarn electron $(VSCODE_ARCH) }
|
||||
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
|
||||
displayName: Run unit tests
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
- powershell: |
|
||||
# Figure out the full absolute path of the product we just built
|
||||
# including the remote server and configure the integration tests
|
||||
# to run with these builds instead of running out of sources.
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn gulp "electron-$(VSCODE_ARCH)" }
|
||||
exec { .\scripts\test-integration.bat --build --tfs "Integration Tests" }
|
||||
$AppRoot = "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
|
||||
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||
$AppNameShort = $AppProductJson.nameShort
|
||||
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
|
||||
displayName: Run integration tests
|
||||
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
|
||||
|
||||
|
||||
@@ -23,14 +23,13 @@ exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r }
|
||||
# get version
|
||||
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
|
||||
$Version = $PackageJson.version
|
||||
$Quality = "$env:VSCODE_QUALITY"
|
||||
|
||||
$AssetPlatform = if ("$Arch" -eq "ia32") { "win32" } else { "win32-x64" }
|
||||
|
||||
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform-archive" archive "VSCode-win32-$Arch-$Version.zip" $Version true $Zip }
|
||||
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform" setup "VSCodeSetup-$Arch-$Version.exe" $Version true $SystemExe }
|
||||
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform-user" setup "VSCodeUserSetup-$Arch-$Version.exe" $Version true $UserExe }
|
||||
exec { node build/azure-pipelines/common/publish.js $Quality "server-$AssetPlatform" archive "vscode-server-win32-$Arch.zip" $Version true $ServerZip }
|
||||
exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform-archive" archive "VSCode-win32-$Arch-$Version.zip" $Zip }
|
||||
exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform" setup "VSCodeSetup-$Arch-$Version.exe" $SystemExe }
|
||||
exec { node build/azure-pipelines/common/createAsset.js "$AssetPlatform-user" setup "VSCodeUserSetup-$Arch-$Version.exe" $UserExe }
|
||||
exec { node build/azure-pipelines/common/createAsset.js "server-$AssetPlatform" archive "vscode-server-win32-$Arch.zip" $ServerZip }
|
||||
|
||||
# publish hockeyapp symbols
|
||||
$hockeyAppId = if ("$Arch" -eq "ia32") { "$env:VSCODE_HOCKEYAPP_ID_WIN32" } else { "$env:VSCODE_HOCKEYAPP_ID_WIN64" }
|
||||
|
||||
270
build/azure-pipelines/win32/sql-product-build-win32.yml
Normal file
270
build/azure-pipelines/win32/sql-product-build-win32.yml
Normal file
@@ -0,0 +1,270 @@
|
||||
steps:
|
||||
- powershell: |
|
||||
mkdir .build -ea 0
|
||||
"$env:BUILD_SOURCEVERSION" | Out-File -Encoding ascii -NoNewLine .build\commit
|
||||
"$env:VSCODE_QUALITY" | Out-File -Encoding ascii -NoNewLine .build\quality
|
||||
displayName: Prepare cache flag
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
|
||||
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
|
||||
vstsFeed: 'BuildCache'
|
||||
platformIndependent: true
|
||||
alias: 'Compilation'
|
||||
|
||||
- powershell: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
exit 1
|
||||
displayName: Check RestoreCache
|
||||
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
|
||||
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '2.x'
|
||||
addToPath: true
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||
KeyVaultName: ado-secrets
|
||||
SecretsFilter: 'github-distro-mixin-password'
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
"machine github.com`nlogin azuredatastudio`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
|
||||
|
||||
exec { git config user.email "andresse@microsoft.com" }
|
||||
exec { git config user.name "AzureDataStudio" }
|
||||
displayName: Prepare tooling
|
||||
|
||||
- powershell: |
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
displayName: Merge distro
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$env:CHILD_CONCURRENCY="1"
|
||||
exec { yarn --frozen-lockfile }
|
||||
displayName: Install dependencies
|
||||
env:
|
||||
GITHUB_TOKEN: $(github-distro-mixin-password)
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
inputs:
|
||||
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
|
||||
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
|
||||
vstsFeed: 'BuildCache'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn postinstall }
|
||||
displayName: Run postinstall scripts
|
||||
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { node build/azure-pipelines/mixin }
|
||||
displayName: Mix in quality
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn gulp "install-sqltoolsservice" }
|
||||
displayName: Install sqltoolsservice
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn gulp "package-rebuild-extensions" }
|
||||
exec { yarn gulp "vscode-win32-x64-min-ci" }
|
||||
exec { yarn gulp "vscode-reh-win32-x64-min-ci" }
|
||||
exec { yarn gulp "vscode-reh-web-win32-x64-min-ci" }
|
||||
displayName: Build
|
||||
env:
|
||||
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { .\scripts\test-unstable.bat --build --coverage --reporter mocha-junit-reporter }
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||
displayName: Run unstable tests
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
displayName: 'Sign out code'
|
||||
inputs:
|
||||
ConnectedServiceName: 'Code Signing'
|
||||
FolderPath: '$(agent.builddirectory)/azuredatastudio-win32-x64'
|
||||
Pattern: '*.exe,*.node,resources/app/node_modules.asar.unpacked/*.dll,swiftshader/*.dll,d3dcompiler_47.dll,libGLESv2.dll,ffmpeg.dll,libEGL.dll,Microsoft.SqlTools.Hosting.dll,Microsoft.SqlTools.ResourceProvider.Core.dll,Microsoft.SqlTools.ResourceProvider.DefaultImpl.dll,MicrosoftSqlToolsCredentials.dll,MicrosoftSqlToolsServiceLayer.dll,Newtonsoft.Json.dll,SqlSerializationService.dll,SqlToolsResourceProviderService.dll,Microsoft.SqlServer.*.dll,Microsoft.Data.Tools.Sql.BatchParser.dll'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "Azure Data Studio"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "https://github.com/microsoft/azuredatastudio"
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "FileDigest",
|
||||
"parameterValue": "/fd sha256"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||
}
|
||||
],
|
||||
"toolName": "signtool.exe",
|
||||
"toolVersion": "6.2.9304.0"
|
||||
},
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolVerify",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "VerifyAll",
|
||||
"parameterValue": "/all"
|
||||
}
|
||||
],
|
||||
"toolName": "signtool.exe",
|
||||
"toolVersion": "6.2.9304.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 600
|
||||
MaxConcurrency: 5
|
||||
MaxRetryAttempts: 20
|
||||
condition: and(succeeded(), eq(variables['signed'], true))
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn gulp "vscode-win32-x64-user-setup" }
|
||||
exec { yarn gulp "vscode-win32-x64-system-setup" }
|
||||
exec { yarn gulp "vscode-win32-x64-archive" }
|
||||
displayName: Archive & User & System setup
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
displayName: 'Sign installers'
|
||||
inputs:
|
||||
ConnectedServiceName: 'Code Signing'
|
||||
FolderPath: '.build'
|
||||
Pattern: '*.exe'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "Azure Data Studio"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "https://github.com/microsoft/azuredatastudio"
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "FileDigest",
|
||||
"parameterValue": "/fd sha256"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||
}
|
||||
],
|
||||
"toolName": "signtool.exe",
|
||||
"toolVersion": "6.2.9304.0"
|
||||
},
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolVerify",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "VerifyAll",
|
||||
"parameterValue": "/all"
|
||||
}
|
||||
],
|
||||
"toolName": "signtool.exe",
|
||||
"toolVersion": "6.2.9304.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 600
|
||||
MaxConcurrency: 5
|
||||
MaxRetryAttempts: 20
|
||||
condition: and(succeeded(), eq(variables['signed'], true))
|
||||
|
||||
- task: ArchiveFiles@2
|
||||
displayName: 'Archive build scripts source'
|
||||
inputs:
|
||||
rootFolderOrFile: '$(Build.SourcesDirectory)/build'
|
||||
archiveType: tar
|
||||
archiveFile: '$(Build.BinariesDirectory)/source.tar.gz'
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: build scripts source'
|
||||
inputs:
|
||||
PathtoPublish: '$(Build.BinariesDirectory)/source.tar.gz'
|
||||
ArtifactName: source
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
.\build\azure-pipelines\win32\createDrop.ps1
|
||||
displayName: Create Drop
|
||||
|
||||
- task: PublishBuildArtifacts@1
|
||||
displayName: 'Publish Artifact: drop'
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish Test Results test-results.xml'
|
||||
inputs:
|
||||
testResultsFiles: 'test-results.xml'
|
||||
searchFolder: '$(Build.SourcesDirectory)'
|
||||
failTaskOnFailedTests: true
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
inputs:
|
||||
failOnAlert: true
|
||||
99
build/azure-pipelines/win32/sql-product-test-win32.yml
Normal file
99
build/azure-pipelines/win32/sql-product-test-win32.yml
Normal file
@@ -0,0 +1,99 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
|
||||
inputs:
|
||||
versionSpec: "1.x"
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$env:CHILD_CONCURRENCY="1"
|
||||
exec { git clean -fxd }
|
||||
displayName: Clean repo
|
||||
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
buildType: 'current'
|
||||
targetPath: '$(Build.SourcesDirectory)\.build'
|
||||
artifactName: drop
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$env:CHILD_CONCURRENCY="1"
|
||||
exec { yarn --frozen-lockfile }
|
||||
displayName: Install dependencies
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { .\node_modules\7zip\7zip-lite\7z.exe x $(Build.SourcesDirectory)\.build\win32-x64/archive/azuredatastudio-win32-x64.zip -o$(Agent.TempDirectory)\azuredatastudio-win32-x64 }
|
||||
displayName: Unzip artifact
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: SqlToolsSecretStore'
|
||||
inputs:
|
||||
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
|
||||
KeyVaultName: SqlToolsSecretStore
|
||||
SecretsFilter: 'ads-integration-test-azure-server,ads-integration-test-azure-server-password,ads-integration-test-azure-server-username,ads-integration-test-bdc-server,ads-integration-test-bdc-server-password,ads-integration-test-bdc-server-username,ads-integration-test-standalone-server,ads-integration-test-standalone-server-password,ads-integration-test-standalone-server-username,ads-integration-test-standalone-server-2019,ads-integration-test-standalone-server-password-2019,ads-integration-test-standalone-server-username-2019'
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$AppRoot = "$(Agent.TempDirectory)\azuredatastudio-win32-x64"
|
||||
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||
$AppNameShort = $AppProductJson.nameShort
|
||||
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:INTEGRATION_TEST_CLI_PATH = "$AppRoot\bin\$AppNameShort"; .\scripts\sql-test-integration.bat }
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
displayName: Run stable tests
|
||||
env:
|
||||
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
|
||||
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
|
||||
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
|
||||
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
|
||||
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
|
||||
STANDALONE_SQL: $(ads-integration-test-standalone-server)
|
||||
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
|
||||
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
|
||||
AZURE_SQL: $(ads-integration-test-azure-server)
|
||||
STANDALONE_SQL_USERNAME_2019: $(ads-integration-test-standalone-server-username-2019)
|
||||
STANDALONE_SQL_PWD_2019: $(ads-integration-test-standalone-server-password-2019)
|
||||
STANDALONE_SQL_2019: $(ads-integration-test-standalone-server-2019)
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$AppRoot = "$(Agent.TempDirectory)\azuredatastudio-win32-x64"
|
||||
$AppProductJson = Get-Content -Raw -Path "$AppRoot\resources\app\product.json" | ConvertFrom-Json
|
||||
$AppNameShort = $AppProductJson.nameShort
|
||||
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; .\scripts\sql-test-integration-unstable.bat }
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_UNSTABLE_TESTS'], 'true'))
|
||||
displayName: Run unstable integration tests
|
||||
env:
|
||||
BDC_BACKEND_USERNAME: $(ads-integration-test-bdc-server-username)
|
||||
BDC_BACKEND_PWD: $(ads-integration-test-bdc-server-password)
|
||||
BDC_BACKEND_HOSTNAME: $(ads-integration-test-bdc-server)
|
||||
STANDALONE_SQL_USERNAME: $(ads-integration-test-standalone-server-username)
|
||||
STANDALONE_SQL_PWD: $(ads-integration-test-standalone-server-password)
|
||||
STANDALONE_SQL: $(ads-integration-test-standalone-server)
|
||||
AZURE_SQL_USERNAME: $(ads-integration-test-azure-server-username)
|
||||
AZURE_SQL_PWD: $(ads-integration-test-azure-server-password)
|
||||
AZURE_SQL: $(ads-integration-test-azure-server)
|
||||
STANDALONE_SQL_USERNAME_2019: $(ads-integration-test-standalone-server-username-2019)
|
||||
STANDALONE_SQL_PWD_2019: $(ads-integration-test-standalone-server-password-2019)
|
||||
STANDALONE_SQL_2019: $(ads-integration-test-standalone-server-2019)
|
||||
|
||||
- task: PublishTestResults@2
|
||||
displayName: 'Publish Integration and Smoke Test Results'
|
||||
inputs:
|
||||
testResultsFiles: '*.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)\test-results'
|
||||
mergeTestResults: true
|
||||
failTaskOnFailedTests: true
|
||||
continueOnError: true
|
||||
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
|
||||
29
build/azure-pipelines/win32/sql-publish.ps1
Normal file
29
build/azure-pipelines/win32/sql-publish.ps1
Normal file
@@ -0,0 +1,29 @@
|
||||
Param(
|
||||
[string]$sourcesDir,
|
||||
[string]$artifactsDir,
|
||||
[string]$storageKey,
|
||||
[string]$documentDbKey
|
||||
)
|
||||
|
||||
$env:AZURE_STORAGE_ACCESS_KEY_2 = $storageKey
|
||||
$env:AZURE_DOCUMENTDB_MASTERKEY = $documentDbKey
|
||||
|
||||
$ExeName = "AzureDataStudioSetup.exe"
|
||||
$SystemExe = "$artifactsDir\win32-x64\system-setup\$ExeName"
|
||||
$UserExe = "$artifactsDir\win32-x64\user-setup\$ExeName"
|
||||
$UserExeName = "AzureDataStudioUserSetup.exe"
|
||||
$ZipName = "azuredatastudio-win32-x64.zip"
|
||||
$Zip = "$artifactsDir\win32-x64\archive\$ZipName"
|
||||
|
||||
$VersionJson = Get-Content -Raw -Path "$artifactsDir\version.json" | ConvertFrom-Json
|
||||
$Version = $VersionJson.version
|
||||
$Quality = $VersionJson.quality
|
||||
$CommitId = $VersionJson.commit
|
||||
|
||||
$assetPlatform = "win32-x64"
|
||||
|
||||
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform-archive" archive $ZipName $Version true $Zip $CommitId
|
||||
|
||||
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform" setup $ExeName $Version true $SystemExe $CommitId
|
||||
|
||||
node $sourcesDir/build/azure-pipelines/common/publish.js $Quality "$assetPlatform-user" setup $UserExeName $Version true $UserExe $CommitId
|
||||
@@ -1,7 +0,0 @@
|
||||
[
|
||||
{
|
||||
"name": "Microsoft.sqlservernotebook",
|
||||
"version": "0.2.1",
|
||||
"repo": "https://github.com/Microsoft/azuredatastudio"
|
||||
}
|
||||
]
|
||||
@@ -1,2 +1,7 @@
|
||||
[
|
||||
{
|
||||
"name": "Microsoft.sqlservernotebook",
|
||||
"version": "0.3.4",
|
||||
"repo": "https://github.com/Microsoft/azuredatastudio"
|
||||
}
|
||||
]
|
||||
|
||||
@@ -10,6 +10,7 @@ const os = require('os');
|
||||
const { remote } = require('electron');
|
||||
const dialog = remote.dialog;
|
||||
|
||||
const productJsonPath = path.join(__dirname, '..', '..', 'product.json');
|
||||
const builtInExtensionsPath = path.join(__dirname, '..', 'builtInExtensions.json');
|
||||
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
||||
|
||||
@@ -51,6 +52,7 @@ function render(el, state) {
|
||||
}
|
||||
|
||||
const ul = document.createElement('ul');
|
||||
const { quality } = readJson(productJsonPath);
|
||||
const { builtin, control } = state;
|
||||
|
||||
for (const ext of builtin) {
|
||||
@@ -61,6 +63,10 @@ function render(el, state) {
|
||||
|
||||
const name = document.createElement('code');
|
||||
name.textContent = ext.name;
|
||||
if (quality && ext.forQualities && !ext.forQualities.includes(quality)) {
|
||||
name.textContent += ` (only on ${ext.forQualities.join(', ')})`;
|
||||
}
|
||||
|
||||
li.appendChild(name);
|
||||
|
||||
const form = document.createElement('form');
|
||||
@@ -123,4 +129,4 @@ function main() {
|
||||
render(el, { builtin, control });
|
||||
}
|
||||
|
||||
window.onload = main;
|
||||
window.onload = main;
|
||||
|
||||
@@ -17,14 +17,14 @@ const compilation = require('./lib/compilation');
|
||||
const monacoapi = require('./monaco/api');
|
||||
const fs = require('fs');
|
||||
|
||||
var root = path.dirname(__dirname);
|
||||
var sha1 = util.getVersion(root);
|
||||
var semver = require('./monaco/package.json').version;
|
||||
var headerVersion = semver + '(' + sha1 + ')';
|
||||
let root = path.dirname(__dirname);
|
||||
let sha1 = util.getVersion(root);
|
||||
let semver = require('./monaco/package.json').version;
|
||||
let headerVersion = semver + '(' + sha1 + ')';
|
||||
|
||||
// Build
|
||||
|
||||
var editorEntryPoints = [
|
||||
let editorEntryPoints = [
|
||||
{
|
||||
name: 'vs/editor/editor.main',
|
||||
include: [],
|
||||
@@ -40,16 +40,11 @@ var editorEntryPoints = [
|
||||
}
|
||||
];
|
||||
|
||||
var editorResources = [
|
||||
'out-build/vs/{base,editor}/**/*.{svg,png}',
|
||||
'!out-build/vs/base/browser/ui/splitview/**/*',
|
||||
'!out-build/vs/base/browser/ui/toolbar/**/*',
|
||||
'!out-build/vs/base/browser/ui/octiconLabel/**/*',
|
||||
'!out-build/vs/workbench/**',
|
||||
'!**/test/**'
|
||||
let editorResources = [
|
||||
'out-editor-build/vs/base/browser/ui/codiconLabel/**/*.ttf'
|
||||
];
|
||||
|
||||
var BUNDLED_FILE_HEADER = [
|
||||
let BUNDLED_FILE_HEADER = [
|
||||
'/*!-----------------------------------------------------------',
|
||||
' * Copyright (c) Microsoft Corporation. All rights reserved.',
|
||||
' * Version: ' + headerVersion,
|
||||
@@ -62,7 +57,6 @@ var BUNDLED_FILE_HEADER = [
|
||||
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
|
||||
|
||||
const extractEditorSrcTask = task.define('extract-editor-src', () => {
|
||||
console.log(`If the build fails, consider tweaking shakeLevel below to a lower value.`);
|
||||
const apiusages = monacoapi.execute().usageContent;
|
||||
const extrausages = fs.readFileSync(path.join(root, 'build', 'monaco', 'monaco.usage.recipe')).toString();
|
||||
standalone.extractEditor({
|
||||
@@ -76,25 +70,15 @@ const extractEditorSrcTask = task.define('extract-editor-src', () => {
|
||||
apiusages,
|
||||
extrausages
|
||||
],
|
||||
typings: [
|
||||
'typings/lib.ie11_safe_es6.d.ts',
|
||||
'typings/thenable.d.ts',
|
||||
'typings/es6-promise.d.ts',
|
||||
'typings/require-monaco.d.ts',
|
||||
"typings/lib.es2018.promise.d.ts",
|
||||
'vs/monaco.d.ts'
|
||||
],
|
||||
libs: [
|
||||
`lib.es5.d.ts`,
|
||||
`lib.dom.d.ts`,
|
||||
`lib.webworker.importscripts.d.ts`
|
||||
],
|
||||
redirects: {
|
||||
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
|
||||
},
|
||||
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
|
||||
importIgnorePattern: /(^vs\/css!)|(promise-polyfill\/polyfill)/,
|
||||
destRoot: path.join(root, 'out-editor-src')
|
||||
destRoot: path.join(root, 'out-editor-src'),
|
||||
redirects: []
|
||||
});
|
||||
});
|
||||
|
||||
@@ -145,23 +129,75 @@ const createESMSourcesAndResourcesTask = task.define('extract-editor-esm', () =>
|
||||
});
|
||||
|
||||
const compileEditorESMTask = task.define('compile-editor-esm', () => {
|
||||
console.log(`Launching the TS compiler at ${path.join(__dirname, '../out-editor-esm')}...`);
|
||||
let result;
|
||||
if (process.platform === 'win32') {
|
||||
const result = cp.spawnSync(`..\\node_modules\\.bin\\tsc.cmd`, {
|
||||
result = cp.spawnSync(`..\\node_modules\\.bin\\tsc.cmd`, {
|
||||
cwd: path.join(__dirname, '../out-editor-esm')
|
||||
});
|
||||
console.log(result.stdout.toString());
|
||||
console.log(result.stderr.toString());
|
||||
} else {
|
||||
const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
|
||||
result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
|
||||
cwd: path.join(__dirname, '../out-editor-esm')
|
||||
});
|
||||
console.log(result.stdout.toString());
|
||||
console.log(result.stderr.toString());
|
||||
}
|
||||
|
||||
console.log(result.stdout.toString());
|
||||
console.log(result.stderr.toString());
|
||||
|
||||
if (result.status !== 0) {
|
||||
console.log(`The TS Compilation failed, preparing analysis folder...`);
|
||||
const destPath = path.join(__dirname, '../../vscode-monaco-editor-esm-analysis');
|
||||
return util.rimraf(destPath)().then(() => {
|
||||
fs.mkdirSync(destPath);
|
||||
|
||||
// initialize a new repository
|
||||
cp.spawnSync(`git`, [`init`], {
|
||||
cwd: destPath
|
||||
});
|
||||
|
||||
// build a list of files to copy
|
||||
const files = util.rreddir(path.join(__dirname, '../out-editor-esm'));
|
||||
|
||||
// copy files from src
|
||||
for (const file of files) {
|
||||
const srcFilePath = path.join(__dirname, '../src', file);
|
||||
const dstFilePath = path.join(destPath, file);
|
||||
if (fs.existsSync(srcFilePath)) {
|
||||
util.ensureDir(path.dirname(dstFilePath));
|
||||
const contents = fs.readFileSync(srcFilePath).toString().replace(/\r\n|\r|\n/g, '\n');
|
||||
fs.writeFileSync(dstFilePath, contents);
|
||||
}
|
||||
}
|
||||
|
||||
// create an initial commit to diff against
|
||||
cp.spawnSync(`git`, [`add`, `.`], {
|
||||
cwd: destPath
|
||||
});
|
||||
|
||||
// create the commit
|
||||
cp.spawnSync(`git`, [`commit`, `-m`, `"original sources"`, `--no-gpg-sign`], {
|
||||
cwd: destPath
|
||||
});
|
||||
|
||||
// copy files from esm
|
||||
for (const file of files) {
|
||||
const srcFilePath = path.join(__dirname, '../out-editor-esm', file);
|
||||
const dstFilePath = path.join(destPath, file);
|
||||
if (fs.existsSync(srcFilePath)) {
|
||||
util.ensureDir(path.dirname(dstFilePath));
|
||||
const contents = fs.readFileSync(srcFilePath).toString().replace(/\r\n|\r|\n/g, '\n');
|
||||
fs.writeFileSync(dstFilePath, contents);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Open in VS Code the folder at '${destPath}' and you can alayze the compilation error`);
|
||||
throw new Error('Standalone Editor compilation failed. If this is the build machine, simply launch `yarn run gulp editor-distro` on your machine to further analyze the compilation problem.');
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
function toExternalDTS(contents) {
|
||||
let lines = contents.split('\n');
|
||||
let lines = contents.split(/\r\n|\r|\n/);
|
||||
let killNextCloseCurlyBrace = false;
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
let line = lines[i];
|
||||
@@ -227,7 +263,7 @@ const finalEditorResourcesTask = task.define('final-editor-resources', () => {
|
||||
// package.json
|
||||
gulp.src('build/monaco/package.json')
|
||||
.pipe(es.through(function (data) {
|
||||
var json = JSON.parse(data.contents.toString());
|
||||
let json = JSON.parse(data.contents.toString());
|
||||
json.private = false;
|
||||
data.contents = Buffer.from(JSON.stringify(json, null, ' '));
|
||||
this.emit('data', data);
|
||||
@@ -271,10 +307,10 @@ const finalEditorResourcesTask = task.define('final-editor-resources', () => {
|
||||
return;
|
||||
}
|
||||
|
||||
var relativePathToMap = path.relative(path.join(data.relative), path.join('min-maps', data.relative + '.map'));
|
||||
let relativePathToMap = path.relative(path.join(data.relative), path.join('min-maps', data.relative + '.map'));
|
||||
|
||||
var strContents = data.contents.toString();
|
||||
var newStr = '//# sourceMappingURL=' + relativePathToMap.replace(/\\/g, '/');
|
||||
let strContents = data.contents.toString();
|
||||
let newStr = '//# sourceMappingURL=' + relativePathToMap.replace(/\\/g, '/');
|
||||
strContents = strContents.replace(/\/\/# sourceMappingURL=[^ ]+$/, newStr);
|
||||
|
||||
data.contents = Buffer.from(strContents);
|
||||
@@ -317,6 +353,13 @@ gulp.task('editor-distro',
|
||||
)
|
||||
);
|
||||
|
||||
gulp.task('monacodts', task.define('monacodts', () => {
|
||||
const result = monacoapi.execute();
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
fs.writeFileSync(path.join(root, 'src/vs/editor/common/standalone/standaloneEnums.ts'), result.enums);
|
||||
return Promise.resolve(true);
|
||||
}));
|
||||
|
||||
//#region monaco type checking
|
||||
|
||||
function createTscCompileTask(watch) {
|
||||
|
||||
@@ -21,10 +21,15 @@ const nlsDev = require('vscode-nls-dev');
|
||||
const root = path.dirname(__dirname);
|
||||
const commit = util.getVersion(root);
|
||||
const plumber = require('gulp-plumber');
|
||||
const _ = require('underscore');
|
||||
const ext = require('./lib/extensions');
|
||||
|
||||
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
|
||||
// {{SQL CARBON EDIT}}
|
||||
const sqlLocalizedExtensions = [
|
||||
'dacpac',
|
||||
'schema-compare'
|
||||
];
|
||||
// {{SQL CARBON EDIT}}
|
||||
|
||||
const compilations = glob.sync('**/tsconfig.json', {
|
||||
cwd: extensionsPath,
|
||||
@@ -37,38 +42,38 @@ const tasks = compilations.map(function (tsconfigFile) {
|
||||
const absolutePath = path.join(extensionsPath, tsconfigFile);
|
||||
const relativeDirname = path.dirname(tsconfigFile);
|
||||
|
||||
const tsconfig = require(absolutePath);
|
||||
const tsOptions = _.assign({}, tsconfig.extends ? require(path.join(extensionsPath, relativeDirname, tsconfig.extends)).compilerOptions : {}, tsconfig.compilerOptions);
|
||||
tsOptions.verbose = false;
|
||||
tsOptions.sourceMap = true;
|
||||
const overrideOptions = {};
|
||||
overrideOptions.sourceMap = true;
|
||||
|
||||
const name = relativeDirname.replace(/\//g, '-');
|
||||
|
||||
const root = path.join('extensions', relativeDirname);
|
||||
const srcBase = path.join(root, 'src');
|
||||
const src = path.join(srcBase, '**');
|
||||
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
|
||||
|
||||
const out = path.join(root, 'out');
|
||||
const baseUrl = getBaseUrl(out);
|
||||
|
||||
let headerId, headerOut;
|
||||
let index = relativeDirname.indexOf('/');
|
||||
if (index < 0) {
|
||||
headerId = 'vscode.' + relativeDirname;
|
||||
headerId = 'microsoft.' + relativeDirname; // {{SQL CARBON EDIT}}
|
||||
headerOut = 'out';
|
||||
} else {
|
||||
headerId = 'vscode.' + relativeDirname.substr(0, index);
|
||||
headerId = 'microsoft.' + relativeDirname.substr(0, index); // {{SQL CARBON EDIT}}
|
||||
headerOut = relativeDirname.substr(index + 1) + '/out';
|
||||
}
|
||||
|
||||
function createPipeline(build, emitError) {
|
||||
const reporter = createReporter();
|
||||
|
||||
tsOptions.inlineSources = !!build;
|
||||
tsOptions.base = path.dirname(absolutePath);
|
||||
overrideOptions.inlineSources = Boolean(build);
|
||||
overrideOptions.base = path.dirname(absolutePath);
|
||||
|
||||
const compilation = tsb.create(tsOptions, null, null, err => reporter(err.toString()));
|
||||
const compilation = tsb.create(absolutePath, overrideOptions, false, err => reporter(err.toString()));
|
||||
|
||||
return function () {
|
||||
const pipeline = function () {
|
||||
const input = es.through();
|
||||
const tsFilter = filter(['**/*.ts', '!**/lib/lib*.d.ts', '!**/node_modules/**'], { restore: true });
|
||||
const output = input
|
||||
@@ -98,15 +103,20 @@ const tasks = compilations.map(function (tsconfigFile) {
|
||||
|
||||
return es.duplex(input, output);
|
||||
};
|
||||
}
|
||||
|
||||
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
|
||||
// add src-stream for project files
|
||||
pipeline.tsProjectSrc = () => {
|
||||
return compilation.src(srcOpts);
|
||||
};
|
||||
return pipeline;
|
||||
}
|
||||
|
||||
const cleanTask = task.define(`clean-extension-${name}`, util.rimraf(out));
|
||||
|
||||
const compileTask = task.define(`compile-extension:${name}`, task.series(cleanTask, () => {
|
||||
const pipeline = createPipeline(false, true);
|
||||
const input = gulp.src(src, srcOpts);
|
||||
const pipeline = createPipeline(sqlLocalizedExtensions.includes(name), true); // {{SQL CARBON EDIT}}
|
||||
const nonts = gulp.src(src, srcOpts).pipe(filter(['**', '!**/*.ts']));
|
||||
const input = es.merge(nonts, pipeline.tsProjectSrc());
|
||||
|
||||
return input
|
||||
.pipe(pipeline())
|
||||
@@ -115,8 +125,9 @@ const tasks = compilations.map(function (tsconfigFile) {
|
||||
|
||||
const watchTask = task.define(`watch-extension:${name}`, task.series(cleanTask, () => {
|
||||
const pipeline = createPipeline(false);
|
||||
const input = gulp.src(src, srcOpts);
|
||||
const watchInput = watcher(src, srcOpts);
|
||||
const nonts = gulp.src(src, srcOpts).pipe(filter(['**', '!**/*.ts']));
|
||||
const input = es.merge(nonts, pipeline.tsProjectSrc());
|
||||
const watchInput = watcher(src, { ...srcOpts, ...{ readDelay: 200 } });
|
||||
|
||||
return watchInput
|
||||
.pipe(util.incremental(pipeline, input))
|
||||
@@ -125,7 +136,8 @@ const tasks = compilations.map(function (tsconfigFile) {
|
||||
|
||||
const compileBuildTask = task.define(`compile-build-extension-${name}`, task.series(cleanTask, () => {
|
||||
const pipeline = createPipeline(true, true);
|
||||
const input = gulp.src(src, srcOpts);
|
||||
const nonts = gulp.src(src, srcOpts).pipe(filter(['**', '!**/*.ts']));
|
||||
const input = es.merge(nonts, pipeline.tsProjectSrc());
|
||||
|
||||
return input
|
||||
.pipe(pipeline())
|
||||
@@ -156,8 +168,8 @@ const cleanExtensionsBuildTask = task.define('clean-extensions-build', util.rimr
|
||||
const compileExtensionsBuildTask = task.define('compile-extensions-build', task.series(
|
||||
cleanExtensionsBuildTask,
|
||||
task.define('bundle-extensions-build', () => ext.packageLocalExtensionsStream().pipe(gulp.dest('.build'))),
|
||||
task.define('bundle-marketplace-extensions-build', () => ext.packageMarketplaceExtensionsStream().pipe(gulp.dest('.build'))),
|
||||
task.define('bundle-marketplace-extensions-build', () => ext.packageMarketplaceExtensionsStream().pipe(gulp.dest('.build')))
|
||||
));
|
||||
|
||||
gulp.task(compileExtensionsBuildTask);
|
||||
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;
|
||||
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;
|
||||
|
||||
@@ -8,15 +8,14 @@
|
||||
const gulp = require('gulp');
|
||||
const filter = require('gulp-filter');
|
||||
const es = require('event-stream');
|
||||
const gulptslint = require('gulp-tslint');
|
||||
const gulpeslint = require('gulp-eslint');
|
||||
const tsfmt = require('typescript-formatter');
|
||||
const tslint = require('tslint');
|
||||
const VinylFile = require('vinyl');
|
||||
const vfs = require('vinyl-fs');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const pall = require('p-all');
|
||||
const task = require('./lib/task');
|
||||
|
||||
/**
|
||||
* Hygiene works by creating cascading subsets of all our files and
|
||||
@@ -55,8 +54,10 @@ const indentationFilter = [
|
||||
'!src/vs/base/node/terminateProcess.sh',
|
||||
'!src/vs/base/node/cpuUsage.sh',
|
||||
'!test/assert.js',
|
||||
'!build/testSetup.js',
|
||||
|
||||
// except specific folders
|
||||
'!test/automation/out/**',
|
||||
'!test/smoke/out/**',
|
||||
'!extensions/vscode-api-tests/testWorkspace/**',
|
||||
'!extensions/vscode-api-tests/testWorkspace2/**',
|
||||
@@ -65,11 +66,12 @@ const indentationFilter = [
|
||||
|
||||
// except multiple specific files
|
||||
'!**/package.json',
|
||||
'!**/package-lock.json', // {{SQL CARBON EDIT}}
|
||||
'!**/yarn.lock',
|
||||
'!**/yarn-error.log',
|
||||
|
||||
// except multiple specific folders
|
||||
'!**/octicons/**',
|
||||
'!**/codicon/**',
|
||||
'!**/fixtures/**',
|
||||
'!**/lib/**',
|
||||
'!extensions/**/out/**',
|
||||
@@ -83,7 +85,7 @@ const indentationFilter = [
|
||||
'!src/typings/**/*.d.ts',
|
||||
'!extensions/**/*.d.ts',
|
||||
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns}',
|
||||
'!build/{lib,tslintRules,download}/**/*.js',
|
||||
'!build/{lib,download}/**/*.js',
|
||||
'!build/**/*.sh',
|
||||
'!build/azure-pipelines/**/*.js',
|
||||
'!build/azure-pipelines/**/*.config',
|
||||
@@ -99,7 +101,9 @@ const indentationFilter = [
|
||||
'!extensions/admin-tool-ext-win/ssmsmin/**',
|
||||
'!extensions/resource-deployment/notebooks/**',
|
||||
'!extensions/mssql/notebooks/**',
|
||||
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts'
|
||||
'!extensions/integration-tests/testData/**',
|
||||
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts',
|
||||
'!extensions/big-data-cluster/src/bigDataCluster/controller/clusterApiGenerated2.ts'
|
||||
];
|
||||
|
||||
const copyrightFilter = [
|
||||
@@ -120,6 +124,7 @@ const copyrightFilter = [
|
||||
'!**/*.opts',
|
||||
'!**/*.disabled',
|
||||
'!**/*.code-workspace',
|
||||
'!**/*.js.map',
|
||||
'!**/promise-polyfill/polyfill.js',
|
||||
'!build/**/*.init',
|
||||
'!resources/linux/snap/snapcraft.yaml',
|
||||
@@ -130,41 +135,43 @@ const copyrightFilter = [
|
||||
'!extensions/html-language-features/server/src/modes/typescript/*',
|
||||
'!extensions/*/server/bin/*',
|
||||
'!src/vs/editor/test/node/classification/typescript-test.ts',
|
||||
'!scripts/code-web.js',
|
||||
// {{SQL CARBON EDIT}}
|
||||
'!extensions/notebook/src/intellisense/text.ts',
|
||||
'!extensions/mssql/src/objectExplorerNodeProvider/webhdfs.ts',
|
||||
'!src/sql/workbench/parts/notebook/browser/outputs/tableRenderers.ts',
|
||||
'!src/sql/workbench/parts/notebook/common/models/url.ts',
|
||||
'!src/sql/workbench/parts/notebook/browser/models/renderMimeInterfaces.ts',
|
||||
'!src/sql/workbench/parts/notebook/browser/models/outputProcessor.ts',
|
||||
'!src/sql/workbench/parts/notebook/browser/models/mimemodel.ts',
|
||||
'!src/sql/workbench/parts/notebook/browser/cellViews/media/*.css',
|
||||
'!extensions/mssql/src/hdfs/webhdfs.ts',
|
||||
'!src/sql/workbench/contrib/notebook/browser/outputs/tableRenderers.ts',
|
||||
'!src/sql/workbench/contrib/notebook/common/models/url.ts',
|
||||
'!src/sql/workbench/contrib/notebook/browser/models/renderMimeInterfaces.ts',
|
||||
'!src/sql/workbench/contrib/notebook/browser/models/outputProcessor.ts',
|
||||
'!src/sql/workbench/contrib/notebook/browser/models/mimemodel.ts',
|
||||
'!src/sql/workbench/contrib/notebook/browser/cellViews/media/*.css',
|
||||
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
|
||||
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
|
||||
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
|
||||
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
|
||||
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
|
||||
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
|
||||
'!src/sql/workbench/parts/notebook/browser/outputs/sanitizer.ts',
|
||||
'!src/sql/workbench/parts/notebook/browser/outputs/renderers.ts',
|
||||
'!src/sql/workbench/parts/notebook/browser/outputs/registry.ts',
|
||||
'!src/sql/workbench/parts/notebook/browser/outputs/factories.ts',
|
||||
'!src/sql/workbench/parts/notebook/common/models/nbformat.ts',
|
||||
'!src/sql/workbench/contrib/notebook/browser/outputs/sanitizer.ts',
|
||||
'!src/sql/workbench/contrib/notebook/browser/outputs/renderers.ts',
|
||||
'!src/sql/workbench/contrib/notebook/browser/outputs/registry.ts',
|
||||
'!src/sql/workbench/contrib/notebook/browser/outputs/factories.ts',
|
||||
'!src/sql/workbench/contrib/notebook/common/models/nbformat.ts',
|
||||
'!extensions/markdown-language-features/media/tomorrow.css',
|
||||
'!src/sql/workbench/browser/modelComponents/media/highlight.css',
|
||||
'!src/sql/workbench/parts/notebook/electron-browser/cellViews/media/highlight.css',
|
||||
'!src/sql/workbench/contrib/notebook/electron-browser/cellViews/media/highlight.css',
|
||||
'!extensions/mssql/sqltoolsservice/**',
|
||||
'!extensions/import/flatfileimportservice/**',
|
||||
'!extensions/notebook/src/prompts/**',
|
||||
'!extensions/mssql/src/prompts/**',
|
||||
'!extensions/notebook/resources/jupyter_config/**',
|
||||
'!extensions/query-history/images/**',
|
||||
'!**/*.gif',
|
||||
'!**/*.xlf',
|
||||
'!**/*.dacpac',
|
||||
'!**/*.bacpac'
|
||||
];
|
||||
|
||||
const eslintFilter = [
|
||||
const jsHygieneFilter = [
|
||||
'src/**/*.js',
|
||||
'build/gulpfile.*.js',
|
||||
'!src/vs/loader.js',
|
||||
@@ -177,7 +184,10 @@ const eslintFilter = [
|
||||
'!**/test/**'
|
||||
];
|
||||
|
||||
const tslintBaseFilter = [
|
||||
const tsHygieneFilter = [
|
||||
'src/**/*.ts',
|
||||
'test/**/*.ts',
|
||||
'extensions/**/*.ts',
|
||||
'!**/fixtures/**',
|
||||
'!**/typings/**',
|
||||
'!**/node_modules/**',
|
||||
@@ -186,41 +196,17 @@ const tslintBaseFilter = [
|
||||
'!extensions/vscode-api-tests/testWorkspace2/**',
|
||||
'!extensions/**/*.test.ts',
|
||||
'!extensions/html-language-features/server/lib/jquery.d.ts',
|
||||
// {{SQL CARBON EDIT}}
|
||||
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts'
|
||||
'!extensions/big-data-cluster/src/bigDataCluster/controller/apiGenerated.ts', // {{SQL CARBON EDIT}},
|
||||
'!extensions/big-data-cluster/src/bigDataCluster/controller/tokenApiGenerated.ts', // {{SQL CARBON EDIT}},
|
||||
'!src/vs/workbench/services/themes/common/textMateScopeMatcher.ts', // {{SQL CARBON EDIT}} skip this because we have no plans on touching this and its not ours
|
||||
'!src/vs/workbench/contrib/extensions/browser/extensionTipsService.ts' // {{SQL CARBON EDIT}} skip this because known issue
|
||||
];
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const useStrictFilter = [
|
||||
'src/**'
|
||||
];
|
||||
|
||||
const sqlFilter = [
|
||||
'src/sql/**'
|
||||
];
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
|
||||
const tslintCoreFilter = [
|
||||
'src/**/*.ts',
|
||||
'test/**/*.ts',
|
||||
'!extensions/**/*.ts',
|
||||
'!test/smoke/**',
|
||||
...tslintBaseFilter
|
||||
];
|
||||
|
||||
const tslintExtensionsFilter = [
|
||||
const sqlHygieneFilter = [ // for rules we want to only apply to our code
|
||||
'src/sql/**/*.ts',
|
||||
'!**/node_modules/**',
|
||||
'extensions/**/*.ts',
|
||||
'!src/**/*.ts',
|
||||
'!test/**/*.ts',
|
||||
...tslintBaseFilter
|
||||
];
|
||||
|
||||
const tslintHygieneFilter = [
|
||||
'src/**/*.ts',
|
||||
'test/**/*.ts',
|
||||
'extensions/**/*.ts',
|
||||
...tslintBaseFilter
|
||||
'!extensions/{git,search-result,vscode-test-resolver,extension-editing,json-language-features,vscode-colorize-tests}/**/*.ts',
|
||||
];
|
||||
|
||||
const copyrightHeaderLines = [
|
||||
@@ -232,28 +218,45 @@ const copyrightHeaderLines = [
|
||||
|
||||
gulp.task('eslint', () => {
|
||||
return vfs.src(all, { base: '.', follow: true, allowEmpty: true })
|
||||
.pipe(filter(eslintFilter))
|
||||
.pipe(gulpeslint('src/.eslintrc'))
|
||||
.pipe(filter(jsHygieneFilter.concat(tsHygieneFilter)))
|
||||
.pipe(gulpeslint({
|
||||
configFile: '.eslintrc.json',
|
||||
rulePaths: ['./build/lib/eslint']
|
||||
}))
|
||||
.pipe(gulpeslint.formatEach('compact'))
|
||||
.pipe(gulpeslint.failAfterError());
|
||||
.pipe(gulpeslint.results(results => {
|
||||
if (results.warningCount > 0 || results.errorCount > 0) {
|
||||
throw new Error('eslint failed with warnings and/or errors');
|
||||
}
|
||||
}));
|
||||
});
|
||||
|
||||
gulp.task('tslint', () => {
|
||||
return es.merge([
|
||||
function checkPackageJSON(actualPath) {
|
||||
const actual = require(path.join(__dirname, '..', actualPath));
|
||||
const rootPackageJSON = require('../package.json');
|
||||
|
||||
// Core: include type information (required by certain rules like no-nodejs-globals)
|
||||
vfs.src(all, { base: '.', follow: true, allowEmpty: true })
|
||||
.pipe(filter(tslintCoreFilter))
|
||||
.pipe(gulptslint.default({ rulesDirectory: 'build/lib/tslint', program: tslint.Linter.createProgram('src/tsconfig.json') }))
|
||||
.pipe(gulptslint.default.report({ emitError: true })),
|
||||
for (let depName in actual.dependencies) {
|
||||
const depVersion = actual.dependencies[depName];
|
||||
const rootDepVersion = rootPackageJSON.dependencies[depName];
|
||||
if (!rootDepVersion) {
|
||||
// missing in root is allowed
|
||||
continue;
|
||||
}
|
||||
if (depVersion !== rootDepVersion) {
|
||||
this.emit('error', `The dependency ${depName} in '${actualPath}' (${depVersion}) is different than in the root package.json (${rootDepVersion})`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Exenstions: do not include type information
|
||||
vfs.src(all, { base: '.', follow: true, allowEmpty: true })
|
||||
.pipe(filter(tslintExtensionsFilter))
|
||||
.pipe(gulptslint.default({ rulesDirectory: 'build/lib/tslint' }))
|
||||
.pipe(gulptslint.default.report({ emitError: true }))
|
||||
]).pipe(es.through());
|
||||
const checkPackageJSONTask = task.define('check-package-json', () => {
|
||||
return gulp.src('package.json')
|
||||
.pipe(es.through(function () {
|
||||
checkPackageJSON.call(this, 'remote/package.json');
|
||||
checkPackageJSON.call(this, 'remote/web/package.json');
|
||||
}));
|
||||
});
|
||||
gulp.task(checkPackageJSONTask);
|
||||
|
||||
|
||||
function hygiene(some) {
|
||||
let errorCount = 0;
|
||||
@@ -304,23 +307,6 @@ function hygiene(some) {
|
||||
this.emit('data', file);
|
||||
});
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
// Check for unnecessary 'use strict' lines. These are automatically added by the alwaysStrict compiler option so don't need to be added manually
|
||||
const useStrict = es.through(function (file) {
|
||||
const lines = file.__lines;
|
||||
// Only take the first 10 lines to reduce false positives- the compiler will throw an error if it's not the first non-comment line in a file
|
||||
// (10 is used to account for copyright and extraneous newlines)
|
||||
lines.slice(0, 10).forEach((line, i) => {
|
||||
if (/\s*'use\s*strict\s*'/.test(line)) {
|
||||
console.error(file.relative + '(' + (i + 1) + ',1): Unnecessary \'use strict\' - this is already added by the compiler');
|
||||
errorCount++;
|
||||
}
|
||||
});
|
||||
|
||||
this.emit('data', file);
|
||||
});
|
||||
// {{SQL CARBON EDIT}} END
|
||||
|
||||
const formatting = es.map(function (file, cb) {
|
||||
tsfmt.processString(file.path, file.contents.toString('utf8'), {
|
||||
verify: false,
|
||||
@@ -331,8 +317,6 @@ function hygiene(some) {
|
||||
replace: undefined,
|
||||
tsconfig: undefined,
|
||||
tsconfigFile: undefined,
|
||||
tslint: undefined,
|
||||
tslintFile: undefined,
|
||||
tsfmtFile: undefined,
|
||||
vscode: undefined,
|
||||
vscodeFile: undefined
|
||||
@@ -341,7 +325,7 @@ function hygiene(some) {
|
||||
let formatted = result.dest.replace(/\r\n/gm, '\n');
|
||||
|
||||
if (original !== formatted) {
|
||||
console.error("File not formatted. Run the 'Format Document' command to fix it:", file.relative);
|
||||
console.error('File not formatted. Run the \'Format Document\' command to fix it:', file.relative);
|
||||
errorCount++;
|
||||
}
|
||||
cb(null, file);
|
||||
@@ -351,35 +335,19 @@ function hygiene(some) {
|
||||
});
|
||||
});
|
||||
|
||||
const tslintConfiguration = tslint.Configuration.findConfiguration('tslint.json', '.');
|
||||
const tslintOptions = { fix: false, formatter: 'json' };
|
||||
const tsLinter = new tslint.Linter(tslintOptions);
|
||||
|
||||
const tsl = es.through(function (file) {
|
||||
const contents = file.contents.toString('utf8');
|
||||
tsLinter.lint(file.relative, contents, tslintConfiguration.results);
|
||||
this.emit('data', file);
|
||||
});
|
||||
|
||||
let input;
|
||||
|
||||
if (Array.isArray(some) || typeof some === 'string' || !some) {
|
||||
input = vfs.src(some || all, { base: '.', follow: true, allowEmpty: true });
|
||||
const options = { base: '.', follow: true, allowEmpty: true };
|
||||
if (some) {
|
||||
input = vfs.src(some, options).pipe(filter(all)); // split this up to not unnecessarily filter all a second time
|
||||
} else {
|
||||
input = vfs.src(all, options);
|
||||
}
|
||||
} else {
|
||||
input = some;
|
||||
}
|
||||
|
||||
const tslintSqlConfiguration = tslint.Configuration.findConfiguration('tslint-sql.json', '.');
|
||||
const tslintSqlOptions = { fix: false, formatter: 'json' };
|
||||
const sqlTsLinter = new tslint.Linter(tslintSqlOptions);
|
||||
|
||||
const sqlTsl = es.through(function (file) {
|
||||
const contents = file.contents.toString('utf8');
|
||||
sqlTsLinter.lint(file.relative, contents, tslintSqlConfiguration.results);
|
||||
|
||||
this.emit('data', file);
|
||||
});
|
||||
|
||||
const productJsonFilter = filter('product.json', { restore: true });
|
||||
|
||||
const result = input
|
||||
@@ -393,23 +361,35 @@ function hygiene(some) {
|
||||
.pipe(copyrights);
|
||||
|
||||
const typescript = result
|
||||
.pipe(filter(tslintHygieneFilter))
|
||||
.pipe(formatting)
|
||||
.pipe(tsl)
|
||||
// {{SQL CARBON EDIT}}
|
||||
.pipe(filter(useStrictFilter))
|
||||
.pipe(useStrict)
|
||||
.pipe(filter(sqlFilter))
|
||||
.pipe(sqlTsl);
|
||||
.pipe(filter(tsHygieneFilter))
|
||||
.pipe(formatting);
|
||||
|
||||
const javascript = result
|
||||
.pipe(filter(eslintFilter))
|
||||
.pipe(gulpeslint('src/.eslintrc'))
|
||||
.pipe(filter(jsHygieneFilter.concat(tsHygieneFilter)))
|
||||
.pipe(gulpeslint({
|
||||
configFile: '.eslintrc.json',
|
||||
rulePaths: ['./build/lib/eslint']
|
||||
}))
|
||||
.pipe(gulpeslint.formatEach('compact'))
|
||||
.pipe(gulpeslint.failAfterError());
|
||||
.pipe(gulpeslint.results(results => {
|
||||
errorCount += results.warningCount;
|
||||
errorCount += results.errorCount;
|
||||
}));
|
||||
|
||||
const sqlJavascript = result
|
||||
.pipe(filter(sqlHygieneFilter))
|
||||
.pipe(gulpeslint({
|
||||
configFile: '.eslintrc.sql.json',
|
||||
rulePaths: ['./build/lib/eslint']
|
||||
}))
|
||||
.pipe(gulpeslint.formatEach('compact'))
|
||||
.pipe(gulpeslint.results(results => {
|
||||
errorCount += results.warningCount;
|
||||
errorCount += results.errorCount;
|
||||
}));
|
||||
|
||||
let count = 0;
|
||||
return es.merge(typescript, javascript)
|
||||
return es.merge(typescript, javascript, sqlJavascript)
|
||||
.pipe(es.through(function (data) {
|
||||
count++;
|
||||
if (process.env['TRAVIS'] && count % 10 === 0) {
|
||||
@@ -418,33 +398,6 @@ function hygiene(some) {
|
||||
this.emit('data', data);
|
||||
}, function () {
|
||||
process.stdout.write('\n');
|
||||
|
||||
const tslintResult = tsLinter.getResult();
|
||||
if (tslintResult.failures.length > 0) {
|
||||
for (const failure of tslintResult.failures) {
|
||||
const name = failure.getFileName();
|
||||
const position = failure.getStartPosition();
|
||||
const line = position.getLineAndCharacter().line;
|
||||
const character = position.getLineAndCharacter().character;
|
||||
|
||||
console.error(`${name}:${line + 1}:${character + 1}:${failure.getFailure()}`);
|
||||
}
|
||||
errorCount += tslintResult.failures.length;
|
||||
}
|
||||
|
||||
const sqlTslintResult = sqlTsLinter.getResult();
|
||||
if (sqlTslintResult.failures.length > 0) {
|
||||
for (const failure of sqlTslintResult.failures) {
|
||||
const name = failure.getFileName();
|
||||
const position = failure.getStartPosition();
|
||||
const line = position.getLineAndCharacter().line;
|
||||
const character = position.getLineAndCharacter().character;
|
||||
|
||||
console.error(`${name}:${line + 1}:${character + 1}:${failure.getFailure()}`);
|
||||
}
|
||||
errorCount += sqlTslintResult.failures.length;
|
||||
}
|
||||
|
||||
if (errorCount > 0) {
|
||||
this.emit('error', 'Hygiene failed with ' + errorCount + ' errors. Check \'build/gulpfile.hygiene.js\'.');
|
||||
} else {
|
||||
@@ -486,7 +439,7 @@ function createGitIndexVinyls(paths) {
|
||||
.then(r => r.filter(p => !!p));
|
||||
}
|
||||
|
||||
gulp.task('hygiene', () => hygiene());
|
||||
gulp.task('hygiene', task.series(checkPackageJSONTask, () => hygiene()));
|
||||
|
||||
// this allows us to run hygiene as a git pre-commit hook
|
||||
if (require.main === module) {
|
||||
|
||||
@@ -1,79 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const gulp = require('gulp');
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const jeditor = require('gulp-json-editor');
|
||||
const product = require('../product.json');
|
||||
|
||||
gulp.task('mixin', function () {
|
||||
// {{SQL CARBON EDIT}}
|
||||
const updateUrl = process.env['SQLOPS_UPDATEURL'];
|
||||
if (!updateUrl) {
|
||||
console.log('Missing SQLOPS_UPDATEURL, skipping mixin');
|
||||
return;
|
||||
}
|
||||
|
||||
const quality = process.env['VSCODE_QUALITY'];
|
||||
|
||||
if (!quality) {
|
||||
console.log('Missing VSCODE_QUALITY, skipping mixin');
|
||||
return;
|
||||
}
|
||||
|
||||
// {{SQL CARBON EDIT}} - apply ADS insiders values if needed
|
||||
let newValues = {
|
||||
"nameShort": product.nameShort,
|
||||
"nameLong": product.nameLong,
|
||||
"applicationName": product.applicationName,
|
||||
"dataFolderName": product.dataFolderName,
|
||||
"win32MutexName": product.win32MutexName,
|
||||
"win32DirName": product.win32DirName,
|
||||
"win32NameVersion": product.win32NameVersion,
|
||||
"win32RegValueName": product.win32RegValueName,
|
||||
"win32AppId": product.win32AppId,
|
||||
"win32x64AppId": product.win32x64AppId,
|
||||
"win32UserAppId": product.win32UserAppId,
|
||||
"win32x64UserAppId": product.win32x64UserAppId,
|
||||
"win32AppUserModelId": product.win32AppUserModelId,
|
||||
"win32ShellNameShort": product.win32ShellNameShort,
|
||||
"darwinBundleIdentifier": product.darwinBundleIdentifier,
|
||||
"updateUrl": updateUrl,
|
||||
"quality": quality,
|
||||
"extensionsGallery": {
|
||||
"serviceUrl": 'https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery.json'
|
||||
}
|
||||
};
|
||||
|
||||
if (quality === 'insider') {
|
||||
let dashSuffix = '-insiders';
|
||||
let dotSuffix = '.insiders';
|
||||
let displaySuffix = ' - Insiders';
|
||||
|
||||
newValues.extensionsGallery.serviceUrl = `https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery-${quality}.json`;
|
||||
newValues.nameShort += dashSuffix;
|
||||
newValues.nameLong += displaySuffix;
|
||||
newValues.applicationName += dashSuffix;
|
||||
newValues.dataFolderName += dashSuffix;
|
||||
newValues.win32MutexName += dashSuffix;
|
||||
newValues.win32DirName += displaySuffix;
|
||||
newValues.win32NameVersion += displaySuffix;
|
||||
newValues.win32RegValueName += dashSuffix;
|
||||
newValues.win32AppId = "{{9F0801B2-DEE3-4272-A2C6-FBDF25BAAF0F}";
|
||||
newValues.win32x64AppId = "{{6748A5FD-29EB-4BA6-B3C6-E7B981B8D6B0}";
|
||||
newValues.win32UserAppId = "{{0F8CD1ED-483C-40EB-8AD2-8ED784651AA1}";
|
||||
newValues.win32x64UserAppId += dashSuffix;
|
||||
newValues.win32AppUserModelId += dotSuffix;
|
||||
newValues.win32ShellNameShort += displaySuffix;
|
||||
newValues.darwinBundleIdentifier += dotSuffix;
|
||||
}
|
||||
|
||||
return gulp.src('./product.json')
|
||||
.pipe(jeditor(newValues))
|
||||
.pipe(gulp.dest('.'));
|
||||
});
|
||||
@@ -118,7 +118,7 @@ function mixinServer(watch) {
|
||||
const packageJSONPath = path.join(path.dirname(__dirname), 'package.json');
|
||||
function exec(cmdLine) {
|
||||
console.log(cmdLine);
|
||||
cp.execSync(cmdLine, { stdio: "inherit" });
|
||||
cp.execSync(cmdLine, { stdio: 'inherit' });
|
||||
}
|
||||
function checkout() {
|
||||
const packageJSON = JSON.parse(fs.readFileSync(packageJSONPath).toString());
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const gulp = require('gulp');
|
||||
const util = require('./lib/util');
|
||||
const tsfmt = require('typescript-formatter');
|
||||
@@ -12,8 +11,13 @@ const es = require('event-stream');
|
||||
const filter = require('gulp-filter');
|
||||
const del = require('del');
|
||||
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
|
||||
const platformInfo = require('service-downloader/out/platform').PlatformInformation;
|
||||
const platform = require('service-downloader/out/platform').PlatformInformation;
|
||||
const path = require('path');
|
||||
const ext = require('./lib/extensions');
|
||||
const task = require('./lib/task');
|
||||
const glob = require('glob');
|
||||
const vsce = require('vsce');
|
||||
const mkdirp = require('mkdirp');
|
||||
|
||||
gulp.task('clean-mssql-extension', util.rimraf('extensions/mssql/node_modules'));
|
||||
gulp.task('clean-credentials-extension', util.rimraf('extensions/credentials/node_modules'));
|
||||
@@ -22,77 +26,83 @@ gulp.task('fmt', () => formatStagedFiles());
|
||||
const formatFiles = (some) => {
|
||||
const formatting = es.map(function (file, cb) {
|
||||
|
||||
tsfmt.processString(file.path, file.contents.toString('utf8'), {
|
||||
replace: true,
|
||||
tsfmt: true,
|
||||
tslint: true,
|
||||
tsconfig: true
|
||||
// verbose: true
|
||||
}).then(result => {
|
||||
console.info('ran formatting on file ' + file.path + ' result: ' + result.message);
|
||||
if (result.error) {
|
||||
console.error(result.message);
|
||||
}
|
||||
cb(null, file);
|
||||
tsfmt.processString(file.path, file.contents.toString('utf8'), {
|
||||
replace: true,
|
||||
tsfmt: true,
|
||||
tslint: true,
|
||||
tsconfig: true
|
||||
// verbose: true
|
||||
}).then(result => {
|
||||
console.info('ran formatting on file ' + file.path + ' result: ' + result.message);
|
||||
if (result.error) {
|
||||
console.error(result.message);
|
||||
}
|
||||
cb(null, file);
|
||||
|
||||
}, err => {
|
||||
cb(err);
|
||||
});
|
||||
}, err => {
|
||||
cb(err);
|
||||
});
|
||||
return gulp.src(some, { base: '.' })
|
||||
.pipe(filter(f => !f.stat.isDirectory()))
|
||||
.pipe(formatting);
|
||||
});
|
||||
return gulp.src(some, {
|
||||
base: '.'
|
||||
})
|
||||
.pipe(filter(f => !f.stat.isDirectory()))
|
||||
.pipe(formatting);
|
||||
|
||||
};
|
||||
|
||||
const formatStagedFiles = () => {
|
||||
const cp = require('child_process');
|
||||
cp.exec('git diff --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
|
||||
if (err) {
|
||||
console.error();
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
cp.exec('git diff --name-only', {
|
||||
maxBuffer: 2000 * 1024
|
||||
}, (err, out) => {
|
||||
if (err) {
|
||||
console.error();
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const some = out
|
||||
.split(/\r?\n/)
|
||||
.filter(l => !!l)
|
||||
.filter(l => l.match(/.*.ts$/i));
|
||||
const some = out
|
||||
.split(/\r?\n/)
|
||||
.filter(l => !!l)
|
||||
.filter(l => l.match(/.*.ts$/i));
|
||||
|
||||
formatFiles(some).on('error', err => {
|
||||
console.error();
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
formatFiles(some).on('error', err => {
|
||||
console.error();
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
|
||||
cp.exec('git diff --cached --name-only', { maxBuffer: 2000 * 1024 }, (err, out) => {
|
||||
if (err) {
|
||||
console.error();
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
cp.exec('git diff --cached --name-only', {
|
||||
maxBuffer: 2000 * 1024
|
||||
}, (err, out) => {
|
||||
if (err) {
|
||||
console.error();
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const some = out
|
||||
.split(/\r?\n/)
|
||||
.filter(l => !!l)
|
||||
.filter(l => l.match(/.*.ts$/i));
|
||||
const some = out
|
||||
.split(/\r?\n/)
|
||||
.filter(l => !!l)
|
||||
.filter(l => l.match(/.*.ts$/i));
|
||||
|
||||
formatFiles(some).on('error', err => {
|
||||
console.error();
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
formatFiles(some).on('error', err => {
|
||||
console.error();
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
function installService() {
|
||||
let config = require('../extensions/mssql/src/config.json');
|
||||
return platformInfo.getCurrent().then(p => {
|
||||
let config = require('../extensions/mssql/config.json');
|
||||
return platform.getCurrent().then(p => {
|
||||
let runtime = p.runtimeId;
|
||||
// fix path since it won't be correct
|
||||
config.installDirectory = path.join(__dirname, '../extensions/mssql/src', config.installDirectory);
|
||||
var installer = new serviceDownloader(config);
|
||||
let installer = new serviceDownloader(config);
|
||||
let serviceInstallFolder = installer.getInstallDirectory(runtime);
|
||||
console.log('Cleaning up the install folder: ' + serviceInstallFolder);
|
||||
return del(serviceInstallFolder + '/*').then(() => {
|
||||
@@ -108,25 +118,50 @@ gulp.task('install-sqltoolsservice', () => {
|
||||
return installService();
|
||||
});
|
||||
|
||||
function installSsmsMin() {
|
||||
const config = require('../extensions/admin-tool-ext-win/src/config.json');
|
||||
return platformInfo.getCurrent().then(p => {
|
||||
const runtime = p.runtimeId;
|
||||
// fix path since it won't be correct
|
||||
config.installDirectory = path.join(__dirname, '..', 'extensions', 'admin-tool-ext-win', config.installDirectory);
|
||||
var installer = new serviceDownloader(config);
|
||||
const serviceInstallFolder = installer.getInstallDirectory(runtime);
|
||||
const serviceCleanupFolder = path.join(serviceInstallFolder, '..');
|
||||
console.log('Cleaning up the install folder: ' + serviceCleanupFolder);
|
||||
return del(serviceCleanupFolder + '/*').then(() => {
|
||||
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
|
||||
return installer.installService(runtime);
|
||||
}, delError => {
|
||||
console.log('failed to delete the install folder error: ' + delError);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
gulp.task('install-ssmsmin', () => {
|
||||
return installSsmsMin();
|
||||
const config = require('../extensions/admin-tool-ext-win/config.json');
|
||||
const runtime = 'Windows_64'; // admin-tool-ext is a windows only extension, and we only ship a 64 bit version, so locking the binaries as such
|
||||
// fix path since it won't be correct
|
||||
config.installDirectory = path.join(__dirname, '..', 'extensions', 'admin-tool-ext-win', config.installDirectory);
|
||||
let installer = new serviceDownloader(config);
|
||||
const serviceInstallFolder = installer.getInstallDirectory(runtime);
|
||||
const serviceCleanupFolder = path.join(serviceInstallFolder, '..');
|
||||
console.log('Cleaning up the install folder: ' + serviceCleanupFolder);
|
||||
return del(serviceCleanupFolder + '/*').then(() => {
|
||||
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
|
||||
return installer.installService(runtime);
|
||||
}, delError => {
|
||||
console.log('failed to delete the install folder error: ' + delError);
|
||||
});
|
||||
});
|
||||
|
||||
const root = path.dirname(__dirname);
|
||||
|
||||
gulp.task('package-external-extensions', task.series(
|
||||
task.define('bundle-external-extensions-build', () => ext.packageExternalExtensionsStream().pipe(gulp.dest('.build/external'))),
|
||||
task.define('create-external-extension-vsix-build', () => {
|
||||
const vsixes = glob.sync('.build/external/extensions/*/package.json').map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
}).map(element => {
|
||||
const pkgJson = require(path.join(element.path, 'package.json'));
|
||||
const vsixDirectory = path.join(root, '.build', 'extensions');
|
||||
mkdirp.sync(vsixDirectory);
|
||||
const packagePath = path.join(vsixDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
|
||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
||||
return vsce.createVSIX({
|
||||
cwd: element.path,
|
||||
packagePath: packagePath,
|
||||
useYarn: true
|
||||
});
|
||||
});
|
||||
|
||||
return Promise.all(vsixes);
|
||||
})
|
||||
));
|
||||
|
||||
gulp.task('package-rebuild-extensions', task.series(
|
||||
task.define('clean-rebuild-extensions', () => ext.cleanRebuildExtensions('.build/extensions')),
|
||||
task.define('rebuild-extensions-build', () => ext.packageRebuildExtensionsStream().pipe(gulp.dest('.build'))),
|
||||
));
|
||||
|
||||
@@ -29,9 +29,8 @@ const packageJson = require('../package.json');
|
||||
const product = require('../product.json');
|
||||
const crypto = require('crypto');
|
||||
const i18n = require('./lib/i18n');
|
||||
const ext = require('./lib/extensions'); // {{SQL CARBON EDIT}}
|
||||
const deps = require('./dependencies');
|
||||
const getElectronVersion = require('./lib/electron').getElectronVersion;
|
||||
const { config } = require('./lib/electron');
|
||||
const createAsar = require('./lib/asar').createAsar;
|
||||
const { compileBuildTask } = require('./gulpfile.compile');
|
||||
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
|
||||
@@ -44,6 +43,7 @@ const nodeModules = [
|
||||
'electron',
|
||||
'original-fs',
|
||||
'rxjs/Observable',
|
||||
'rxjs/add/observable/fromPromise',
|
||||
'rxjs/Subject',
|
||||
'rxjs/Observer',
|
||||
'slickgrid/lib/jquery.event.drag-2.3.0',
|
||||
@@ -60,8 +60,7 @@ const nodeModules = [
|
||||
const vscodeEntryPoints = _.flatten([
|
||||
buildfile.entrypoint('vs/workbench/workbench.desktop.main'),
|
||||
buildfile.base,
|
||||
buildfile.serviceWorker,
|
||||
buildfile.workbench,
|
||||
buildfile.workbenchDesktop,
|
||||
buildfile.code
|
||||
]);
|
||||
|
||||
@@ -79,7 +78,7 @@ const vscodeResources = [
|
||||
'out-build/vs/base/common/performance.js',
|
||||
'out-build/vs/base/node/languagePacks.js',
|
||||
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh,ps.sh}',
|
||||
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
|
||||
'out-build/vs/base/browser/ui/codiconLabel/codicon/**',
|
||||
'out-build/vs/workbench/browser/media/*-theme.css',
|
||||
'out-build/vs/workbench/contrib/debug/**/*.json',
|
||||
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
|
||||
@@ -87,15 +86,13 @@ const vscodeResources = [
|
||||
'out-build/vs/workbench/contrib/webview/electron-browser/pre/*.js',
|
||||
'out-build/vs/**/markdown.css',
|
||||
'out-build/vs/workbench/contrib/tasks/**/*.json',
|
||||
'out-build/vs/workbench/contrib/welcome/walkThrough/**/*.md',
|
||||
'out-build/vs/platform/files/**/*.exe',
|
||||
'out-build/vs/platform/files/**/*.md',
|
||||
'out-build/vs/code/electron-browser/workbench/**',
|
||||
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
|
||||
'out-build/vs/code/electron-browser/issue/issueReporter.js',
|
||||
'out-build/vs/code/electron-browser/processExplorer/processExplorer.js',
|
||||
// {{SQL CARBON EDIT}}
|
||||
'out-build/sql/workbench/electron-browser/splashscreen/*',
|
||||
'out-build/sql/workbench/electron-browser/splashscreen/*', // {{SQL CARBON EDIT}} STart
|
||||
'out-build/sql/**/*.{svg,png,cur,html}',
|
||||
'out-build/sql/base/browser/ui/table/media/*.{gif,png,svg}',
|
||||
'out-build/sql/base/browser/ui/checkbox/media/*.{gif,png,svg}',
|
||||
@@ -113,7 +110,8 @@ const vscodeResources = [
|
||||
'out-build/sql/media/objectTypes/*.svg',
|
||||
'out-build/sql/media/icons/*.svg',
|
||||
'out-build/sql/workbench/parts/notebook/media/**/*.svg',
|
||||
'out-build/sql/setup.js',
|
||||
'out-build/sql/setup.js', // {{SQL CARBON EDIT}} end
|
||||
'out-build/vs/platform/auth/common/auth.css',
|
||||
'!**/test/**'
|
||||
];
|
||||
|
||||
@@ -125,6 +123,7 @@ const optimizeVSCodeTask = task.define('optimize-vscode', task.series(
|
||||
resources: vscodeResources,
|
||||
loaderConfig: common.loaderConfig(nodeModules),
|
||||
out: 'out-vscode',
|
||||
inlineAmdImages: true,
|
||||
bundleInfo: undefined
|
||||
})
|
||||
));
|
||||
@@ -144,73 +143,6 @@ const minifyVSCodeTask = task.define('minify-vscode', task.series(
|
||||
));
|
||||
gulp.task(minifyVSCodeTask);
|
||||
|
||||
// Package
|
||||
|
||||
// @ts-ignore JSON checking: darwinCredits is optional
|
||||
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
|
||||
|
||||
function darwinBundleDocumentType(extensions, icon) {
|
||||
return {
|
||||
name: product.nameLong + ' document',
|
||||
role: 'Editor',
|
||||
ostypes: ["TEXT", "utxt", "TUTX", "****"],
|
||||
extensions: extensions,
|
||||
iconFile: icon
|
||||
};
|
||||
}
|
||||
|
||||
const config = {
|
||||
version: getElectronVersion(),
|
||||
productAppName: product.nameLong,
|
||||
companyName: 'Microsoft Corporation',
|
||||
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
|
||||
darwinIcon: 'resources/darwin/code.icns',
|
||||
darwinBundleIdentifier: product.darwinBundleIdentifier,
|
||||
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
||||
darwinHelpBookFolder: 'VS Code HelpBook',
|
||||
darwinHelpBookName: 'VS Code HelpBook',
|
||||
darwinBundleDocumentTypes: [
|
||||
// {{SQL CARBON EDIT}} - Remove most document types and replace with ours
|
||||
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
|
||||
],
|
||||
darwinBundleURLTypes: [{
|
||||
role: 'Viewer',
|
||||
name: product.nameLong,
|
||||
urlSchemes: [product.urlProtocol]
|
||||
}],
|
||||
darwinForceDarkModeSupport: true,
|
||||
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
|
||||
linuxExecutableName: product.applicationName,
|
||||
winIcon: 'resources/win32/code.ico',
|
||||
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
|
||||
|
||||
// @ts-ignore JSON checking: electronRepository is optional
|
||||
repo: product.electronRepository || undefined
|
||||
};
|
||||
|
||||
function getElectron(arch) {
|
||||
return () => {
|
||||
const electronOpts = _.extend({}, config, {
|
||||
platform: process.platform,
|
||||
arch,
|
||||
ffmpegChromium: true,
|
||||
keepDefaultApp: true
|
||||
});
|
||||
|
||||
return gulp.src('package.json')
|
||||
.pipe(json({ name: product.nameShort }))
|
||||
.pipe(electron(electronOpts))
|
||||
.pipe(filter(['**', '!**/app/package.json']))
|
||||
.pipe(vfs.dest('.build/electron'));
|
||||
};
|
||||
}
|
||||
|
||||
gulp.task(task.define('electron', task.series(util.rimraf('.build/electron'), getElectron(process.arch))));
|
||||
gulp.task(task.define('electron-ia32', task.series(util.rimraf('.build/electron'), getElectron('ia32'))));
|
||||
gulp.task(task.define('electron-x64', task.series(util.rimraf('.build/electron'), getElectron('x64'))));
|
||||
gulp.task(task.define('electron-arm', task.series(util.rimraf('.build/electron'), getElectron('armv7l'))));
|
||||
gulp.task(task.define('electron-arm64', task.series(util.rimraf('.build/electron'), getElectron('arm64'))));
|
||||
|
||||
/**
|
||||
* Compute checksums for some files.
|
||||
*
|
||||
@@ -219,9 +151,9 @@ gulp.task(task.define('electron-arm64', task.series(util.rimraf('.build/electron
|
||||
* @return {Object} A map of paths to checksums.
|
||||
*/
|
||||
function computeChecksums(out, filenames) {
|
||||
var result = {};
|
||||
let result = {};
|
||||
filenames.forEach(function (filename) {
|
||||
var fullPath = path.join(process.cwd(), out, filename);
|
||||
let fullPath = path.join(process.cwd(), out, filename);
|
||||
result[filename] = computeChecksum(fullPath);
|
||||
});
|
||||
return result;
|
||||
@@ -234,9 +166,9 @@ function computeChecksums(out, filenames) {
|
||||
* @return {string} The checksum for `filename`.
|
||||
*/
|
||||
function computeChecksum(filename) {
|
||||
var contents = fs.readFileSync(filename);
|
||||
let contents = fs.readFileSync(filename);
|
||||
|
||||
var hash = crypto
|
||||
let hash = crypto
|
||||
.createHash('md5')
|
||||
.update(contents)
|
||||
.digest('base64')
|
||||
@@ -265,10 +197,7 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
||||
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }))
|
||||
.pipe(util.setExecutableBit(['**/*.sh']));
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
ext.packageBuiltInExtensions();
|
||||
|
||||
const extensions = gulp.src('.build/extensions/**', { base: '.build', dot: true });
|
||||
const extensions = gulp.src(['.build/extensions/**', '!.build/extensions/node_modules/**'], { base: '.build', dot: true }); // {{SQL CARBON EDIT}} - don't package the node_modules directory
|
||||
|
||||
const sources = es.merge(src, extensions)
|
||||
.pipe(filter(['**', '!**/*.js.map'], { dot: true }));
|
||||
@@ -308,7 +237,6 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
||||
const api = gulp.src('src/vs/vscode.d.ts').pipe(rename('out/vs/vscode.d.ts'));
|
||||
// {{SQL CARBON EDIT}}
|
||||
const dataApi = gulp.src('src/sql/azdata.d.ts').pipe(rename('out/sql/azdata.d.ts'));
|
||||
const sqlopsAPI = gulp.src('src/sql/sqlops.d.ts').pipe(rename('out/sql/sqlops.d.ts'));
|
||||
|
||||
const telemetry = gulp.src('.build/telemetry/**', { base: '.build/telemetry', dot: true });
|
||||
|
||||
@@ -325,8 +253,7 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
|
||||
productJsonStream,
|
||||
license,
|
||||
api,
|
||||
dataApi,
|
||||
sqlopsAPI, // {{SQL CARBON EDIT}}
|
||||
dataApi, // {{SQL CARBON EDIT}}
|
||||
telemetry,
|
||||
sources,
|
||||
deps
|
||||
@@ -468,7 +395,7 @@ gulp.task(task.define(
|
||||
optimizeVSCodeTask,
|
||||
function () {
|
||||
const pathToMetadata = './out-vscode/nls.metadata.json';
|
||||
const pathToExtensions = './extensions/*';
|
||||
const pathToExtensions = '.build/extensions/*';
|
||||
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
|
||||
|
||||
return es.merge(
|
||||
@@ -489,7 +416,7 @@ gulp.task(task.define(
|
||||
optimizeVSCodeTask,
|
||||
function () {
|
||||
const pathToMetadata = './out-vscode/nls.metadata.json';
|
||||
const pathToExtensions = './extensions/*';
|
||||
const pathToExtensions = '.build/extensions/*';
|
||||
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
|
||||
|
||||
return es.merge(
|
||||
|
||||
@@ -23,7 +23,7 @@ const commit = util.getVersion(root);
|
||||
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
|
||||
|
||||
function getDebPackageArch(arch) {
|
||||
return { x64: 'amd64', arm: 'armhf', arm64: "arm64" }[arch];
|
||||
return { x64: 'amd64', arm: 'armhf', arm64: 'arm64' }[arch];
|
||||
}
|
||||
|
||||
function prepareDebPackage(arch) {
|
||||
@@ -43,7 +43,8 @@ function prepareDebPackage(arch) {
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(replace('@@ICON@@', product.linuxIconName))
|
||||
.pipe(replace('@@EXEC@@', `/usr/share/${product.applicationName}/${product.applicationName}`))
|
||||
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
|
||||
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
|
||||
|
||||
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
|
||||
@@ -117,7 +118,7 @@ function getRpmBuildPath(rpmArch) {
|
||||
}
|
||||
|
||||
function getRpmPackageArch(arch) {
|
||||
return { x64: 'x86_64', arm: 'armhf', arm64: "arm64" }[arch];
|
||||
return { x64: 'x86_64', arm: 'armhf', arm64: 'arm64' }[arch];
|
||||
}
|
||||
|
||||
function prepareRpmPackage(arch) {
|
||||
@@ -136,6 +137,7 @@ function prepareRpmPackage(arch) {
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(replace('@@EXEC@@', `/usr/share/${product.applicationName}/${product.applicationName}`))
|
||||
.pipe(replace('@@ICON@@', product.linuxIconName))
|
||||
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
|
||||
|
||||
@@ -206,21 +208,25 @@ function prepareSnapPackage(arch) {
|
||||
const destination = getSnapBuildPath(arch);
|
||||
|
||||
return function () {
|
||||
// A desktop file that is placed in snap/gui will be placed into meta/gui verbatim.
|
||||
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
|
||||
.pipe(rename(`usr/share/applications/${product.applicationName}.desktop`));
|
||||
.pipe(rename(`snap/gui/${product.applicationName}.desktop`));
|
||||
|
||||
// A desktop file that is placed in snap/gui will be placed into meta/gui verbatim.
|
||||
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
|
||||
.pipe(rename(`usr/share/applications/${product.applicationName}-url-handler.desktop`));
|
||||
.pipe(rename(`snap/gui/${product.applicationName}-url-handler.desktop`));
|
||||
|
||||
const desktops = es.merge(desktop, desktopUrlHandler)
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
|
||||
.pipe(replace('@@EXEC@@', `${product.applicationName} --force-user-env`))
|
||||
.pipe(replace('@@ICON@@', `\${SNAP}/meta/gui/${product.linuxIconName}.png`))
|
||||
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
|
||||
|
||||
// An icon that is placed in snap/gui will be placed into meta/gui verbatim.
|
||||
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
||||
.pipe(rename(`usr/share/pixmaps/${product.linuxIconName}.png`));
|
||||
.pipe(rename(`snap/gui/${product.linuxIconName}.png`));
|
||||
|
||||
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
|
||||
.pipe(rename(function (p) { p.dirname = `usr/share/${product.applicationName}/${p.dirname}`; }));
|
||||
@@ -241,7 +247,8 @@ function prepareSnapPackage(arch) {
|
||||
|
||||
function buildSnapPackage(arch) {
|
||||
const snapBuildPath = getSnapBuildPath(arch);
|
||||
return shell.task(`cd ${snapBuildPath} && snapcraft build`);
|
||||
// Default target for snapcraft runs: pull, build, stage and prime, and finally assembles the snap.
|
||||
return shell.task(`cd ${snapBuildPath} && snapcraft`);
|
||||
}
|
||||
|
||||
const BUILD_TARGETS = [
|
||||
|
||||
@@ -6,150 +6,11 @@
|
||||
'use strict';
|
||||
|
||||
const gulp = require('gulp');
|
||||
const path = require('path');
|
||||
const es = require('event-stream');
|
||||
const util = require('./lib/util');
|
||||
const task = require('./lib/task');
|
||||
const common = require('./lib/optimize');
|
||||
const product = require('../product.json');
|
||||
const rename = require('gulp-rename');
|
||||
const filter = require('gulp-filter');
|
||||
const json = require('gulp-json-editor');
|
||||
const _ = require('underscore');
|
||||
const deps = require('./dependencies');
|
||||
const vfs = require('vinyl-fs');
|
||||
const packageJson = require('../package.json');
|
||||
const { compileBuildTask } = require('./gulpfile.compile');
|
||||
|
||||
const REPO_ROOT = path.dirname(__dirname);
|
||||
const commit = util.getVersion(REPO_ROOT);
|
||||
const BUILD_ROOT = path.dirname(REPO_ROOT);
|
||||
const WEB_FOLDER = path.join(REPO_ROOT, 'remote', 'web');
|
||||
const noop = () => { return Promise.resolve(); };
|
||||
|
||||
const productionDependencies = deps.getProductionDependencies(WEB_FOLDER);
|
||||
|
||||
const nodeModules = Object.keys(product.dependencies || {})
|
||||
.concat(_.uniq(productionDependencies.map(d => d.name)));
|
||||
|
||||
const vscodeWebResources = [
|
||||
|
||||
// Workbench
|
||||
'out-build/vs/{base,platform,editor,workbench}/**/*.{svg,png,html}',
|
||||
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
|
||||
'out-build/vs/**/markdown.css',
|
||||
|
||||
// Webview
|
||||
'out-build/vs/workbench/contrib/webview/browser/pre/*.js',
|
||||
|
||||
// Extension Worker
|
||||
'out-build/vs/workbench/services/extensions/worker/extensionHostWorkerMain.js',
|
||||
|
||||
// Excludes
|
||||
'!out-build/vs/**/{node,electron-browser,electron-main}/**',
|
||||
'!out-build/vs/editor/standalone/**',
|
||||
'!out-build/vs/workbench/**/*-tb.png',
|
||||
'!**/test/**'
|
||||
];
|
||||
|
||||
const buildfile = require('../src/buildfile');
|
||||
|
||||
const vscodeWebEntryPoints = [
|
||||
buildfile.workbenchWeb,
|
||||
buildfile.serviceWorker,
|
||||
buildfile.workerExtensionHost,
|
||||
buildfile.keyboardMaps,
|
||||
buildfile.base
|
||||
];
|
||||
|
||||
const optimizeVSCodeWebTask = task.define('optimize-vscode-web', task.series(
|
||||
util.rimraf('out-vscode-web'),
|
||||
common.optimizeTask({
|
||||
src: 'out-build',
|
||||
entryPoints: _.flatten(vscodeWebEntryPoints),
|
||||
otherSources: [],
|
||||
resources: vscodeWebResources,
|
||||
loaderConfig: common.loaderConfig(nodeModules),
|
||||
out: 'out-vscode-web',
|
||||
bundleInfo: undefined
|
||||
})
|
||||
));
|
||||
|
||||
const minifyVSCodeWebTask = task.define('minify-vscode-web', task.series(
|
||||
optimizeVSCodeWebTask,
|
||||
util.rimraf('out-vscode-web-min'),
|
||||
common.minifyTask('out-vscode-web', `https://ticino.blob.core.windows.net/sourcemaps/${commit}/core`)
|
||||
));
|
||||
gulp.task(minifyVSCodeWebTask);
|
||||
|
||||
function packageTask(sourceFolderName, destinationFolderName) {
|
||||
const destination = path.join(BUILD_ROOT, destinationFolderName);
|
||||
|
||||
return () => {
|
||||
const src = gulp.src(sourceFolderName + '/**', { base: '.' })
|
||||
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + sourceFolderName), 'out'); }))
|
||||
.pipe(filter(['**', '!**/*.js.map']));
|
||||
|
||||
const sources = es.merge(src);
|
||||
|
||||
let version = packageJson.version;
|
||||
const quality = product.quality;
|
||||
|
||||
if (quality && quality !== 'stable') {
|
||||
version += '-' + quality;
|
||||
}
|
||||
|
||||
const name = product.nameShort;
|
||||
const packageJsonStream = gulp.src(['remote/web/package.json'], { base: 'remote/web' })
|
||||
.pipe(json({ name, version }));
|
||||
|
||||
const date = new Date().toISOString();
|
||||
|
||||
const productJsonStream = gulp.src(['product.json'], { base: '.' })
|
||||
.pipe(json({ commit, date }));
|
||||
|
||||
const license = gulp.src(['remote/LICENSE'], { base: 'remote' });
|
||||
|
||||
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(REPO_ROOT, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`, `!${d}/.bin/**`]));
|
||||
|
||||
const deps = gulp.src(dependenciesSrc, { base: 'remote/web', dot: true })
|
||||
.pipe(filter(['**', '!**/package-lock.json']))
|
||||
.pipe(util.cleanNodeModules(path.join(__dirname, '.nativeignore')));
|
||||
|
||||
const favicon = gulp.src('resources/server/favicon.ico', { base: 'resources/server' });
|
||||
|
||||
let all = es.merge(
|
||||
packageJsonStream,
|
||||
productJsonStream,
|
||||
license,
|
||||
sources,
|
||||
deps,
|
||||
favicon
|
||||
);
|
||||
|
||||
let result = all
|
||||
.pipe(util.skipDirectories())
|
||||
.pipe(util.fixWin32DirectoryPermissions());
|
||||
|
||||
return result.pipe(vfs.dest(destination));
|
||||
};
|
||||
}
|
||||
|
||||
const dashed = (str) => (str ? `-${str}` : ``);
|
||||
|
||||
['', 'min'].forEach(minified => {
|
||||
const sourceFolderName = `out-vscode-web${dashed(minified)}`;
|
||||
const destinationFolderName = `vscode-web`;
|
||||
|
||||
const vscodeWebTaskCI = task.define(`vscode-web${dashed(minified)}-ci`, task.series(
|
||||
minified ? minifyVSCodeWebTask : optimizeVSCodeWebTask,
|
||||
util.rimraf(path.join(BUILD_ROOT, destinationFolderName)),
|
||||
packageTask(sourceFolderName, destinationFolderName)
|
||||
));
|
||||
gulp.task(vscodeWebTaskCI);
|
||||
|
||||
const vscodeWebTask = task.define(`vscode-web${dashed(minified)}`, task.series(
|
||||
compileBuildTask,
|
||||
vscodeWebTaskCI
|
||||
));
|
||||
gulp.task(vscodeWebTask);
|
||||
});
|
||||
gulp.task('minify-vscode-web', noop);
|
||||
gulp.task('vscode-web', noop);
|
||||
gulp.task('vscode-web-min', noop);
|
||||
gulp.task('vscode-web-ci', noop);
|
||||
gulp.task('vscode-web-min-ci', noop);
|
||||
|
||||
@@ -23,7 +23,7 @@ const repoPath = path.dirname(__dirname);
|
||||
// {{SQL CARBON EDIT}}
|
||||
const buildPath = arch => path.join(path.dirname(repoPath), `azuredatastudio-win32-${arch}`);
|
||||
const zipDir = arch => path.join(repoPath, '.build', `win32-${arch}`, 'archive');
|
||||
const zipPath = arch => path.join(zipDir(arch), `VSCode-win32-${arch}.zip`);
|
||||
const zipPath = arch => path.join(zipDir(arch), `azuredatastudio-win32-${arch}.zip`);
|
||||
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
|
||||
const issPath = path.join(__dirname, 'win32', 'code.iss');
|
||||
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup'))), 'bin', 'ISCC.exe');
|
||||
|
||||
@@ -18,10 +18,15 @@ const fancyLog = require('fancy-log');
|
||||
const ansiColors = require('ansi-colors');
|
||||
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
// {{SQL CARBON EDIT}}
|
||||
const builtInExtensions = require('../builtInExtensions-insiders.json');
|
||||
// {{SQL CARBON EDIT}} - END
|
||||
const builtInExtensions = require('../builtInExtensions.json');
|
||||
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
|
||||
const ENABLE_LOGGING = !process.env['VSCODE_BUILD_BUILTIN_EXTENSIONS_SILENCE_PLEASE'];
|
||||
|
||||
function log() {
|
||||
if (ENABLE_LOGGING) {
|
||||
fancyLog.apply(this, arguments);
|
||||
}
|
||||
}
|
||||
|
||||
function getExtensionPath(extension) {
|
||||
return path.join(root, '.build', 'builtInExtensions', extension.name);
|
||||
@@ -46,7 +51,7 @@ function isUpToDate(extension) {
|
||||
|
||||
function syncMarketplaceExtension(extension) {
|
||||
if (isUpToDate(extension)) {
|
||||
fancyLog(ansiColors.blue('[marketplace]'), `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));
|
||||
log(ansiColors.blue('[marketplace]'), `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
|
||||
@@ -55,13 +60,13 @@ function syncMarketplaceExtension(extension) {
|
||||
return ext.fromMarketplace(extension.name, extension.version, extension.metadata)
|
||||
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
|
||||
.pipe(vfs.dest('.build/builtInExtensions'))
|
||||
.on('end', () => fancyLog(ansiColors.blue('[marketplace]'), extension.name, ansiColors.green('✔︎')));
|
||||
.on('end', () => log(ansiColors.blue('[marketplace]'), extension.name, ansiColors.green('✔︎')));
|
||||
}
|
||||
|
||||
function syncExtension(extension, controlState) {
|
||||
switch (controlState) {
|
||||
case 'disabled':
|
||||
fancyLog(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
|
||||
log(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
|
||||
return es.readArray([]);
|
||||
|
||||
case 'marketplace':
|
||||
@@ -69,15 +74,15 @@ function syncExtension(extension, controlState) {
|
||||
|
||||
default:
|
||||
if (!fs.existsSync(controlState)) {
|
||||
fancyLog(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
||||
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
||||
return es.readArray([]);
|
||||
|
||||
} else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
|
||||
fancyLog(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
||||
log(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
||||
return es.readArray([]);
|
||||
}
|
||||
|
||||
fancyLog(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎'));
|
||||
log(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
}
|
||||
@@ -96,8 +101,8 @@ function writeControlFile(control) {
|
||||
}
|
||||
|
||||
function main() {
|
||||
fancyLog('Syncronizing built-in extensions...');
|
||||
fancyLog(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
|
||||
log('Syncronizing built-in extensions...');
|
||||
log(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
|
||||
|
||||
const control = readControlFile();
|
||||
const streams = [];
|
||||
|
||||
@@ -11,7 +11,6 @@ const bom = require("gulp-bom");
|
||||
const sourcemaps = require("gulp-sourcemaps");
|
||||
const tsb = require("gulp-tsb");
|
||||
const path = require("path");
|
||||
const _ = require("underscore");
|
||||
const monacodts = require("../monaco/api");
|
||||
const nls = require("./nls");
|
||||
const reporter_1 = require("./reporter");
|
||||
@@ -22,14 +21,7 @@ const watch = require('./watch');
|
||||
const reporter = reporter_1.createReporter();
|
||||
function getTypeScriptCompilerOptions(src) {
|
||||
const rootDir = path.join(__dirname, `../../${src}`);
|
||||
const tsconfig = require(`../../${src}/tsconfig.json`);
|
||||
let options;
|
||||
if (tsconfig.extends) {
|
||||
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
|
||||
}
|
||||
else {
|
||||
options = tsconfig.compilerOptions;
|
||||
}
|
||||
let options = {};
|
||||
options.verbose = false;
|
||||
options.sourceMap = true;
|
||||
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
||||
@@ -38,49 +30,46 @@ function getTypeScriptCompilerOptions(src) {
|
||||
options.rootDir = rootDir;
|
||||
options.baseUrl = rootDir;
|
||||
options.sourceRoot = util.toFileUri(rootDir);
|
||||
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
|
||||
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1;
|
||||
return options;
|
||||
}
|
||||
function createCompile(src, build, emitError) {
|
||||
const opts = _.clone(getTypeScriptCompilerOptions(src));
|
||||
opts.inlineSources = !!build;
|
||||
opts.noFilesystemLookup = true;
|
||||
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
|
||||
return function (token) {
|
||||
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
|
||||
const overrideOptions = Object.assign(Object.assign({}, getTypeScriptCompilerOptions(src)), { inlineSources: Boolean(build) });
|
||||
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
|
||||
function pipeline(token) {
|
||||
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
|
||||
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
|
||||
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path)));
|
||||
const input = es.through();
|
||||
const output = input
|
||||
.pipe(utf8Filter)
|
||||
.pipe(bom())
|
||||
.pipe(bom()) // this is required to preserve BOM in test files that loose it otherwise
|
||||
.pipe(utf8Filter.restore)
|
||||
.pipe(tsFilter)
|
||||
.pipe(util.loadSourcemaps())
|
||||
.pipe(ts(token))
|
||||
.pipe(compilation(token))
|
||||
.pipe(noDeclarationsFilter)
|
||||
.pipe(build ? nls() : es.through())
|
||||
.pipe(noDeclarationsFilter.restore)
|
||||
.pipe(sourcemaps.write('.', {
|
||||
addComment: false,
|
||||
includeContent: !!build,
|
||||
sourceRoot: opts.sourceRoot
|
||||
sourceRoot: overrideOptions.sourceRoot
|
||||
}))
|
||||
.pipe(tsFilter.restore)
|
||||
.pipe(reporter.end(!!emitError));
|
||||
return es.duplex(input, output);
|
||||
}
|
||||
pipeline.tsProjectSrc = () => {
|
||||
return compilation.src({ base: src });
|
||||
};
|
||||
return pipeline;
|
||||
}
|
||||
const typesDts = [
|
||||
'node_modules/typescript/lib/*.d.ts',
|
||||
'node_modules/@types/**/*.d.ts',
|
||||
'!node_modules/@types/webpack/**/*',
|
||||
'!node_modules/@types/uglify-js/**/*',
|
||||
];
|
||||
function compileTask(src, out, build) {
|
||||
return function () {
|
||||
const compile = createCompile(src, build, true);
|
||||
const srcPipe = es.merge(gulp.src(`${src}/**`, { base: `${src}` }), gulp.src(typesDts));
|
||||
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
|
||||
let generator = new MonacoGenerator(false);
|
||||
if (src === 'src') {
|
||||
generator.execute();
|
||||
@@ -95,8 +84,8 @@ exports.compileTask = compileTask;
|
||||
function watchTask(out, build) {
|
||||
return function () {
|
||||
const compile = createCompile('src', build);
|
||||
const src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src(typesDts));
|
||||
const watchSrc = watch('src/**', { base: 'src' });
|
||||
const src = gulp.src('src/**', { base: 'src' });
|
||||
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
|
||||
let generator = new MonacoGenerator(true);
|
||||
generator.execute();
|
||||
return watchSrc
|
||||
|
||||
@@ -12,27 +12,21 @@ import * as bom from 'gulp-bom';
|
||||
import * as sourcemaps from 'gulp-sourcemaps';
|
||||
import * as tsb from 'gulp-tsb';
|
||||
import * as path from 'path';
|
||||
import * as _ from 'underscore';
|
||||
import * as monacodts from '../monaco/api';
|
||||
import * as nls from './nls';
|
||||
import { createReporter } from './reporter';
|
||||
import * as util from './util';
|
||||
import * as fancyLog from 'fancy-log';
|
||||
import * as ansiColors from 'ansi-colors';
|
||||
import ts = require('typescript');
|
||||
|
||||
const watch = require('./watch');
|
||||
|
||||
const reporter = createReporter();
|
||||
|
||||
function getTypeScriptCompilerOptions(src: string) {
|
||||
function getTypeScriptCompilerOptions(src: string): ts.CompilerOptions {
|
||||
const rootDir = path.join(__dirname, `../../${src}`);
|
||||
const tsconfig = require(`../../${src}/tsconfig.json`);
|
||||
let options: { [key: string]: any };
|
||||
if (tsconfig.extends) {
|
||||
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
|
||||
} else {
|
||||
options = tsconfig.compilerOptions;
|
||||
}
|
||||
let options: ts.CompilerOptions = {};
|
||||
options.verbose = false;
|
||||
options.sourceMap = true;
|
||||
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
||||
@@ -41,18 +35,17 @@ function getTypeScriptCompilerOptions(src: string) {
|
||||
options.rootDir = rootDir;
|
||||
options.baseUrl = rootDir;
|
||||
options.sourceRoot = util.toFileUri(rootDir);
|
||||
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
|
||||
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 0 : 1;
|
||||
return options;
|
||||
}
|
||||
|
||||
function createCompile(src: string, build: boolean, emitError?: boolean): (token?: util.ICancellationToken) => NodeJS.ReadWriteStream {
|
||||
const opts = _.clone(getTypeScriptCompilerOptions(src));
|
||||
opts.inlineSources = !!build;
|
||||
opts.noFilesystemLookup = true;
|
||||
function createCompile(src: string, build: boolean, emitError?: boolean) {
|
||||
const projectPath = path.join(__dirname, '../../', src, 'tsconfig.json');
|
||||
const overrideOptions = { ...getTypeScriptCompilerOptions(src), inlineSources: Boolean(build) };
|
||||
|
||||
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
|
||||
const compilation = tsb.create(projectPath, overrideOptions, false, err => reporter(err));
|
||||
|
||||
return function (token?: util.ICancellationToken) {
|
||||
function pipeline(token?: util.ICancellationToken) {
|
||||
|
||||
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
|
||||
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
|
||||
@@ -61,43 +54,35 @@ function createCompile(src: string, build: boolean, emitError?: boolean): (token
|
||||
const input = es.through();
|
||||
const output = input
|
||||
.pipe(utf8Filter)
|
||||
.pipe(bom())
|
||||
.pipe(bom()) // this is required to preserve BOM in test files that loose it otherwise
|
||||
.pipe(utf8Filter.restore)
|
||||
.pipe(tsFilter)
|
||||
.pipe(util.loadSourcemaps())
|
||||
.pipe(ts(token))
|
||||
.pipe(compilation(token))
|
||||
.pipe(noDeclarationsFilter)
|
||||
.pipe(build ? nls() : es.through())
|
||||
.pipe(noDeclarationsFilter.restore)
|
||||
.pipe(sourcemaps.write('.', {
|
||||
addComment: false,
|
||||
includeContent: !!build,
|
||||
sourceRoot: opts.sourceRoot
|
||||
sourceRoot: overrideOptions.sourceRoot
|
||||
}))
|
||||
.pipe(tsFilter.restore)
|
||||
.pipe(reporter.end(!!emitError));
|
||||
|
||||
return es.duplex(input, output);
|
||||
}
|
||||
pipeline.tsProjectSrc = () => {
|
||||
return compilation.src({ base: src });
|
||||
};
|
||||
return pipeline;
|
||||
}
|
||||
|
||||
const typesDts = [
|
||||
'node_modules/typescript/lib/*.d.ts',
|
||||
'node_modules/@types/**/*.d.ts',
|
||||
'!node_modules/@types/webpack/**/*',
|
||||
'!node_modules/@types/uglify-js/**/*',
|
||||
];
|
||||
|
||||
export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream {
|
||||
|
||||
return function () {
|
||||
const compile = createCompile(src, build, true);
|
||||
|
||||
const srcPipe = es.merge(
|
||||
gulp.src(`${src}/**`, { base: `${src}` }),
|
||||
gulp.src(typesDts),
|
||||
);
|
||||
|
||||
const srcPipe = gulp.src(`${src}/**`, { base: `${src}` });
|
||||
let generator = new MonacoGenerator(false);
|
||||
if (src === 'src') {
|
||||
generator.execute();
|
||||
@@ -115,11 +100,8 @@ export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteSt
|
||||
return function () {
|
||||
const compile = createCompile('src', build);
|
||||
|
||||
const src = es.merge(
|
||||
gulp.src('src/**', { base: 'src' }),
|
||||
gulp.src(typesDts),
|
||||
);
|
||||
const watchSrc = watch('src/**', { base: 'src' });
|
||||
const src = gulp.src('src/**', { base: 'src' });
|
||||
const watchSrc = watch('src/**', { base: 'src', readDelay: 200 });
|
||||
|
||||
let generator = new MonacoGenerator(true);
|
||||
generator.execute();
|
||||
|
||||
@@ -2,29 +2,88 @@
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const vfs = require("vinyl-fs");
|
||||
const filter = require("gulp-filter");
|
||||
const json = require("gulp-json-editor");
|
||||
const _ = require("underscore");
|
||||
const util = require("./util");
|
||||
const electron = require('gulp-atom-electron');
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
|
||||
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
|
||||
const commit = util.getVersion(root);
|
||||
function getElectronVersion() {
|
||||
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
|
||||
// @ts-ignore
|
||||
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
|
||||
|
||||
return target;
|
||||
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
|
||||
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
|
||||
return target;
|
||||
}
|
||||
exports.getElectronVersion = getElectronVersion;
|
||||
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
|
||||
function darwinBundleDocumentType(extensions, icon) {
|
||||
return {
|
||||
name: product.nameLong + ' document',
|
||||
role: 'Editor',
|
||||
ostypes: ["TEXT", "utxt", "TUTX", "****"],
|
||||
extensions: extensions,
|
||||
iconFile: icon
|
||||
};
|
||||
}
|
||||
exports.config = {
|
||||
version: getElectronVersion(),
|
||||
productAppName: product.nameLong,
|
||||
companyName: 'Microsoft Corporation',
|
||||
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
|
||||
darwinIcon: 'resources/darwin/code.icns',
|
||||
darwinBundleIdentifier: product.darwinBundleIdentifier,
|
||||
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
||||
darwinHelpBookFolder: 'VS Code HelpBook',
|
||||
darwinHelpBookName: 'VS Code HelpBook',
|
||||
darwinBundleDocumentTypes: [
|
||||
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
|
||||
],
|
||||
darwinBundleURLTypes: [{
|
||||
role: 'Viewer',
|
||||
name: product.nameLong,
|
||||
urlSchemes: [product.urlProtocol]
|
||||
}],
|
||||
darwinForceDarkModeSupport: true,
|
||||
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
|
||||
linuxExecutableName: product.applicationName,
|
||||
winIcon: 'resources/win32/code.ico',
|
||||
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
|
||||
repo: product.electronRepository || undefined
|
||||
};
|
||||
function getElectron(arch) {
|
||||
return () => {
|
||||
const electronOpts = _.extend({}, exports.config, {
|
||||
platform: process.platform,
|
||||
arch,
|
||||
ffmpegChromium: true,
|
||||
keepDefaultApp: true
|
||||
});
|
||||
return vfs.src('package.json')
|
||||
.pipe(json({ name: product.nameShort }))
|
||||
.pipe(electron(electronOpts))
|
||||
.pipe(filter(['**', '!**/app/package.json']))
|
||||
.pipe(vfs.dest('.build/electron'));
|
||||
};
|
||||
}
|
||||
async function main(arch = process.arch) {
|
||||
const version = getElectronVersion();
|
||||
const electronPath = path.join(root, '.build', 'electron');
|
||||
const versionFile = path.join(electronPath, 'version');
|
||||
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
|
||||
if (!isUpToDate) {
|
||||
await util.rimraf(electronPath)();
|
||||
await util.streamToPromise(getElectron(arch)());
|
||||
}
|
||||
}
|
||||
|
||||
module.exports.getElectronVersion = getElectronVersion;
|
||||
|
||||
// returns 0 if the right version of electron is in .build/electron
|
||||
// @ts-ignore
|
||||
if (require.main === module) {
|
||||
const version = getElectronVersion();
|
||||
const versionFile = path.join(root, '.build', 'electron', 'version');
|
||||
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
|
||||
|
||||
process.exit(isUpToDate ? 0 : 1);
|
||||
main(process.argv[2]).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
100
build/lib/electron.ts
Normal file
100
build/lib/electron.ts
Normal file
@@ -0,0 +1,100 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as vfs from 'vinyl-fs';
|
||||
import * as filter from 'gulp-filter';
|
||||
import * as json from 'gulp-json-editor';
|
||||
import * as _ from 'underscore';
|
||||
import * as util from './util';
|
||||
|
||||
const electron = require('gulp-atom-electron');
|
||||
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const product = JSON.parse(fs.readFileSync(path.join(root, 'product.json'), 'utf8'));
|
||||
const commit = util.getVersion(root);
|
||||
|
||||
export function getElectronVersion(): string {
|
||||
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
|
||||
const target = /^target "(.*)"$/m.exec(yarnrc)![1];
|
||||
return target;
|
||||
}
|
||||
|
||||
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
|
||||
|
||||
function darwinBundleDocumentType(extensions: string[], icon: string) {
|
||||
return {
|
||||
name: product.nameLong + ' document',
|
||||
role: 'Editor',
|
||||
ostypes: ["TEXT", "utxt", "TUTX", "****"],
|
||||
extensions: extensions,
|
||||
iconFile: icon
|
||||
};
|
||||
}
|
||||
|
||||
export const config = {
|
||||
version: getElectronVersion(),
|
||||
productAppName: product.nameLong,
|
||||
companyName: 'Microsoft Corporation',
|
||||
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
|
||||
darwinIcon: 'resources/darwin/code.icns',
|
||||
darwinBundleIdentifier: product.darwinBundleIdentifier,
|
||||
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
||||
darwinHelpBookFolder: 'VS Code HelpBook',
|
||||
darwinHelpBookName: 'VS Code HelpBook',
|
||||
darwinBundleDocumentTypes: [
|
||||
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
|
||||
],
|
||||
darwinBundleURLTypes: [{
|
||||
role: 'Viewer',
|
||||
name: product.nameLong,
|
||||
urlSchemes: [product.urlProtocol]
|
||||
}],
|
||||
darwinForceDarkModeSupport: true,
|
||||
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
|
||||
linuxExecutableName: product.applicationName,
|
||||
winIcon: 'resources/win32/code.ico',
|
||||
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
|
||||
repo: product.electronRepository || undefined
|
||||
};
|
||||
|
||||
function getElectron(arch: string): () => NodeJS.ReadWriteStream {
|
||||
return () => {
|
||||
const electronOpts = _.extend({}, config, {
|
||||
platform: process.platform,
|
||||
arch,
|
||||
ffmpegChromium: true,
|
||||
keepDefaultApp: true
|
||||
});
|
||||
|
||||
return vfs.src('package.json')
|
||||
.pipe(json({ name: product.nameShort }))
|
||||
.pipe(electron(electronOpts))
|
||||
.pipe(filter(['**', '!**/app/package.json']))
|
||||
.pipe(vfs.dest('.build/electron'));
|
||||
};
|
||||
}
|
||||
|
||||
async function main(arch = process.arch): Promise<void> {
|
||||
const version = getElectronVersion();
|
||||
const electronPath = path.join(root, '.build', 'electron');
|
||||
const versionFile = path.join(electronPath, 'version');
|
||||
const isUpToDate = fs.existsSync(versionFile) && fs.readFileSync(versionFile, 'utf8') === `${version}`;
|
||||
|
||||
if (!isUpToDate) {
|
||||
await util.rimraf(electronPath)();
|
||||
await util.streamToPromise(getElectron(arch)());
|
||||
}
|
||||
}
|
||||
|
||||
if (require.main === module) {
|
||||
main(process.argv[2]).catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
59
build/lib/eslint/code-import-patterns.js
Normal file
59
build/lib/eslint/code-import-patterns.js
Normal file
@@ -0,0 +1,59 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const path_1 = require("path");
|
||||
const minimatch = require("minimatch");
|
||||
const utils_1 = require("./utils");
|
||||
module.exports = new class {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
badImport: 'Imports violates \'{{restrictions}}\' restrictions. See https://github.com/microsoft/vscode/wiki/Source-Code-Organization'
|
||||
},
|
||||
docs: {
|
||||
url: 'https://github.com/microsoft/vscode/wiki/Source-Code-Organization'
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const configs = context.options;
|
||||
for (const config of configs) {
|
||||
if (minimatch(context.getFilename(), config.target)) {
|
||||
return utils_1.createImportRuleListener((node, value) => this._checkImport(context, config, node, value));
|
||||
}
|
||||
}
|
||||
return {};
|
||||
}
|
||||
_checkImport(context, config, node, path) {
|
||||
// resolve relative paths
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(context.getFilename(), path);
|
||||
}
|
||||
let restrictions;
|
||||
if (typeof config.restrictions === 'string') {
|
||||
restrictions = [config.restrictions];
|
||||
}
|
||||
else {
|
||||
restrictions = config.restrictions;
|
||||
}
|
||||
let matched = false;
|
||||
for (const pattern of restrictions) {
|
||||
if (minimatch(path, pattern)) {
|
||||
matched = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!matched) {
|
||||
// None of the restrictions matched
|
||||
context.report({
|
||||
loc: node.loc,
|
||||
messageId: 'badImport',
|
||||
data: {
|
||||
restrictions: restrictions.join(' or ')
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
75
build/lib/eslint/code-import-patterns.ts
Normal file
75
build/lib/eslint/code-import-patterns.ts
Normal file
@@ -0,0 +1,75 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as eslint from 'eslint';
|
||||
import { TSESTree } from '@typescript-eslint/experimental-utils';
|
||||
import { join } from 'path';
|
||||
import * as minimatch from 'minimatch';
|
||||
import { createImportRuleListener } from './utils';
|
||||
|
||||
interface ImportPatternsConfig {
|
||||
target: string;
|
||||
restrictions: string | string[];
|
||||
}
|
||||
|
||||
export = new class implements eslint.Rule.RuleModule {
|
||||
|
||||
readonly meta: eslint.Rule.RuleMetaData = {
|
||||
messages: {
|
||||
badImport: 'Imports violates \'{{restrictions}}\' restrictions. See https://github.com/microsoft/vscode/wiki/Source-Code-Organization'
|
||||
},
|
||||
docs: {
|
||||
url: 'https://github.com/microsoft/vscode/wiki/Source-Code-Organization'
|
||||
}
|
||||
};
|
||||
|
||||
create(context: eslint.Rule.RuleContext): eslint.Rule.RuleListener {
|
||||
|
||||
const configs = <ImportPatternsConfig[]>context.options;
|
||||
|
||||
for (const config of configs) {
|
||||
if (minimatch(context.getFilename(), config.target)) {
|
||||
return createImportRuleListener((node, value) => this._checkImport(context, config, node, value));
|
||||
}
|
||||
}
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
private _checkImport(context: eslint.Rule.RuleContext, config: ImportPatternsConfig, node: TSESTree.Node, path: string) {
|
||||
|
||||
// resolve relative paths
|
||||
if (path[0] === '.') {
|
||||
path = join(context.getFilename(), path);
|
||||
}
|
||||
|
||||
let restrictions: string[];
|
||||
if (typeof config.restrictions === 'string') {
|
||||
restrictions = [config.restrictions];
|
||||
} else {
|
||||
restrictions = config.restrictions;
|
||||
}
|
||||
|
||||
let matched = false;
|
||||
for (const pattern of restrictions) {
|
||||
if (minimatch(path, pattern)) {
|
||||
matched = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!matched) {
|
||||
// None of the restrictions matched
|
||||
context.report({
|
||||
loc: node.loc,
|
||||
messageId: 'badImport',
|
||||
data: {
|
||||
restrictions: restrictions.join(' or ')
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
68
build/lib/eslint/code-layering.js
Normal file
68
build/lib/eslint/code-layering.js
Normal file
@@ -0,0 +1,68 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const path_1 = require("path");
|
||||
const utils_1 = require("./utils");
|
||||
module.exports = new class {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
layerbreaker: 'Bad layering. You are not allowed to access {{from}} from here, allowed layers are: [{{allowed}}]'
|
||||
},
|
||||
docs: {
|
||||
url: 'https://github.com/microsoft/vscode/wiki/Source-Code-Organization'
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const fileDirname = path_1.dirname(context.getFilename());
|
||||
const parts = fileDirname.split(/\\|\//);
|
||||
const ruleArgs = context.options[0];
|
||||
let config;
|
||||
for (let i = parts.length - 1; i >= 0; i--) {
|
||||
if (ruleArgs[parts[i]]) {
|
||||
config = {
|
||||
allowed: new Set(ruleArgs[parts[i]]).add(parts[i]),
|
||||
disallowed: new Set()
|
||||
};
|
||||
Object.keys(ruleArgs).forEach(key => {
|
||||
if (!config.allowed.has(key)) {
|
||||
config.disallowed.add(key);
|
||||
}
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (!config) {
|
||||
// nothing
|
||||
return {};
|
||||
}
|
||||
return utils_1.createImportRuleListener((node, path) => {
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(path_1.dirname(context.getFilename()), path);
|
||||
}
|
||||
const parts = path_1.dirname(path).split(/\\|\//);
|
||||
for (let i = parts.length - 1; i >= 0; i--) {
|
||||
const part = parts[i];
|
||||
if (config.allowed.has(part)) {
|
||||
// GOOD - same layer
|
||||
break;
|
||||
}
|
||||
if (config.disallowed.has(part)) {
|
||||
// BAD - wrong layer
|
||||
context.report({
|
||||
loc: node.loc,
|
||||
messageId: 'layerbreaker',
|
||||
data: {
|
||||
from: part,
|
||||
allowed: [...config.allowed.keys()].join(', ')
|
||||
}
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
83
build/lib/eslint/code-layering.ts
Normal file
83
build/lib/eslint/code-layering.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as eslint from 'eslint';
|
||||
import { join, dirname } from 'path';
|
||||
import { createImportRuleListener } from './utils';
|
||||
|
||||
type Config = {
|
||||
allowed: Set<string>;
|
||||
disallowed: Set<string>;
|
||||
};
|
||||
|
||||
export = new class implements eslint.Rule.RuleModule {
|
||||
|
||||
readonly meta: eslint.Rule.RuleMetaData = {
|
||||
messages: {
|
||||
layerbreaker: 'Bad layering. You are not allowed to access {{from}} from here, allowed layers are: [{{allowed}}]'
|
||||
},
|
||||
docs: {
|
||||
url: 'https://github.com/microsoft/vscode/wiki/Source-Code-Organization'
|
||||
}
|
||||
};
|
||||
|
||||
create(context: eslint.Rule.RuleContext): eslint.Rule.RuleListener {
|
||||
|
||||
const fileDirname = dirname(context.getFilename());
|
||||
const parts = fileDirname.split(/\\|\//);
|
||||
const ruleArgs = <Record<string, string[]>>context.options[0];
|
||||
|
||||
let config: Config | undefined;
|
||||
for (let i = parts.length - 1; i >= 0; i--) {
|
||||
if (ruleArgs[parts[i]]) {
|
||||
config = {
|
||||
allowed: new Set(ruleArgs[parts[i]]).add(parts[i]),
|
||||
disallowed: new Set()
|
||||
};
|
||||
Object.keys(ruleArgs).forEach(key => {
|
||||
if (!config!.allowed.has(key)) {
|
||||
config!.disallowed.add(key);
|
||||
}
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!config) {
|
||||
// nothing
|
||||
return {};
|
||||
}
|
||||
|
||||
return createImportRuleListener((node, path) => {
|
||||
if (path[0] === '.') {
|
||||
path = join(dirname(context.getFilename()), path);
|
||||
}
|
||||
|
||||
const parts = dirname(path).split(/\\|\//);
|
||||
for (let i = parts.length - 1; i >= 0; i--) {
|
||||
const part = parts[i];
|
||||
|
||||
if (config!.allowed.has(part)) {
|
||||
// GOOD - same layer
|
||||
break;
|
||||
}
|
||||
|
||||
if (config!.disallowed.has(part)) {
|
||||
// BAD - wrong layer
|
||||
context.report({
|
||||
loc: node.loc,
|
||||
messageId: 'layerbreaker',
|
||||
data: {
|
||||
from: part,
|
||||
allowed: [...config!.allowed.keys()].join(', ')
|
||||
}
|
||||
});
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
38
build/lib/eslint/code-no-nls-in-standalone-editor.js
Normal file
38
build/lib/eslint/code-no-nls-in-standalone-editor.js
Normal file
@@ -0,0 +1,38 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
const path_1 = require("path");
|
||||
const utils_1 = require("./utils");
|
||||
module.exports = new class NoNlsInStandaloneEditorRule {
|
||||
constructor() {
|
||||
this.meta = {
|
||||
messages: {
|
||||
noNls: 'Not allowed to import vs/nls in standalone editor modules. Use standaloneStrings.ts'
|
||||
}
|
||||
};
|
||||
}
|
||||
create(context) {
|
||||
const fileName = context.getFilename();
|
||||
if (/vs(\/|\\)editor(\/|\\)standalone(\/|\\)/.test(fileName)
|
||||
|| /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone(\/|\\)/.test(fileName)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.api/.test(fileName)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.main/.test(fileName)
|
||||
|| /vs(\/|\\)editor(\/|\\)editor.worker/.test(fileName)) {
|
||||
return utils_1.createImportRuleListener((node, path) => {
|
||||
// resolve relative paths
|
||||
if (path[0] === '.') {
|
||||
path = path_1.join(context.getFilename(), path);
|
||||
}
|
||||
if (/vs(\/|\\)nls/.test(path)) {
|
||||
context.report({
|
||||
loc: node.loc,
|
||||
messageId: 'noNls'
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
return {};
|
||||
}
|
||||
};
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user