mirror of
https://github.com/ckaczor/azuredatastudio.git
synced 2026-02-17 11:01:37 -05:00
Compare commits
1146 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
78a42e1d11 | ||
|
|
d2e758c0d7 | ||
|
|
6f5ad3a8a3 | ||
|
|
3e446980df | ||
|
|
053636af9c | ||
|
|
e3b166846d | ||
|
|
fcba0d1558 | ||
|
|
35e3a42017 | ||
|
|
401d4b2211 | ||
|
|
be3e7e3dc1 | ||
|
|
1e12e61243 | ||
|
|
f3b12dd5ac | ||
|
|
73bb5501bd | ||
|
|
47e3761159 | ||
|
|
cf4dd48784 | ||
|
|
1404133283 | ||
|
|
ecfcb92a89 | ||
|
|
10b066d300 | ||
|
|
8a8cb3ab27 | ||
|
|
f19f21d547 | ||
|
|
92fbfcdac9 | ||
|
|
4189e761ff | ||
|
|
a8b3f056a0 | ||
|
|
cc6dea0631 | ||
|
|
8c4f6f9e5f | ||
|
|
708461eab5 | ||
|
|
8ec1a05296 | ||
|
|
c4bf1b4180 | ||
|
|
495c9330f6 | ||
|
|
49619e5b39 | ||
|
|
7b88800c62 | ||
|
|
ecef90dc8b | ||
|
|
e8d4fba3c0 | ||
|
|
384d87f84d | ||
|
|
8b349dbcde | ||
|
|
7f5e00fd81 | ||
|
|
e5858dee52 | ||
|
|
bae573453a | ||
|
|
3e68c3ee0c | ||
|
|
e44e0a7c89 | ||
|
|
678b2737bd | ||
|
|
6b5193908c | ||
|
|
87f1f11509 | ||
|
|
0503c8d8fe | ||
|
|
bc7ac519d0 | ||
|
|
00c3758d86 | ||
|
|
e5256b0a61 | ||
|
|
eb3c6cadd2 | ||
|
|
d701a20cd5 | ||
|
|
095f35d07e | ||
|
|
53cd22f142 | ||
|
|
8cf4120c27 | ||
|
|
b34e3cbe90 | ||
|
|
4ef25ecf37 | ||
|
|
f5d647f05c | ||
|
|
7b6181de2a | ||
|
|
20bbaa3fe6 | ||
|
|
a2c9a0a1ae | ||
|
|
98c6af628b | ||
|
|
f39647f243 | ||
|
|
c2cec5d93f | ||
|
|
5a11cf1a6f | ||
|
|
b0b1b59147 | ||
|
|
77fb060fde | ||
|
|
caba5c9d26 | ||
|
|
97d36e2281 | ||
|
|
32235b0cb6 | ||
|
|
6142109bf5 | ||
|
|
144a7f941b | ||
|
|
f01c318c30 | ||
|
|
ac76302d6c | ||
|
|
a906a9c862 | ||
|
|
9a3daabeb4 | ||
|
|
9687159484 | ||
|
|
6a0ffdfa60 | ||
|
|
e3f26e8f12 | ||
|
|
cf85bb14f5 | ||
|
|
4fe81d8449 | ||
|
|
46b8d55280 | ||
|
|
08cf731c87 | ||
|
|
4c1af148c7 | ||
|
|
83410565da | ||
|
|
de81c37611 | ||
|
|
176719000d | ||
|
|
1411ad4503 | ||
|
|
77b351adf3 | ||
|
|
b37b14eabd | ||
|
|
578ac6cae5 | ||
|
|
72c3239d63 | ||
|
|
99614ecc8f | ||
|
|
1ececc3035 | ||
|
|
433e5633cf | ||
|
|
b9a0c9ce7e | ||
|
|
47cf496c36 | ||
|
|
32313c71e4 | ||
|
|
453caa92d4 | ||
|
|
7a689b93db | ||
|
|
639efbcfad | ||
|
|
1c706fdfca | ||
|
|
fab8de632d | ||
|
|
27cbd53253 | ||
|
|
d67fd038dc | ||
|
|
36fe725cf0 | ||
|
|
373c3488bb | ||
|
|
58e5e095e5 | ||
|
|
9e7282d16a | ||
|
|
561b7575ba | ||
|
|
cecc899949 | ||
|
|
59b0e6737f | ||
|
|
449cd9ea27 | ||
|
|
256ef072df | ||
|
|
26d8b32717 | ||
|
|
7a31d66d2c | ||
|
|
2ed9a93bae | ||
|
|
f494c7af4e | ||
|
|
363af2a85c | ||
|
|
6ff34d9894 | ||
|
|
a566fa9728 | ||
|
|
248f2f5071 | ||
|
|
a79f1ac830 | ||
|
|
073a372d4d | ||
|
|
6c69eaef4c | ||
|
|
b0fdaedfdb | ||
|
|
d089d6642a | ||
|
|
5aa730b5d4 | ||
|
|
0832dd2a45 | ||
|
|
a03507c998 | ||
|
|
f1e38b655e | ||
|
|
33a9f2e3e4 | ||
|
|
eaa5f504e3 | ||
|
|
5a7562a37b | ||
|
|
95a50b7892 | ||
|
|
86a3217e98 | ||
|
|
d15a3fcc98 | ||
|
|
ff38bc8143 | ||
|
|
14a6bf581c | ||
|
|
730ad4b814 | ||
|
|
f05260d95a | ||
|
|
673ecc3870 | ||
|
|
97a37e6834 | ||
|
|
d9b48bae80 | ||
|
|
cbaa0a132f | ||
|
|
4ad5520568 | ||
|
|
43457c0184 | ||
|
|
1150433c0a | ||
|
|
76a84a2cf4 | ||
|
|
68328f65b5 | ||
|
|
b7956c5fbf | ||
|
|
44d6bb66da | ||
|
|
7d67711336 | ||
|
|
f320deaa73 | ||
|
|
a518c4a529 | ||
|
|
da164cec0a | ||
|
|
bb470c3676 | ||
|
|
685a608518 | ||
|
|
5be2121a3e | ||
|
|
bf643cc85f | ||
|
|
eda96c046a | ||
|
|
137c78c04e | ||
|
|
d9e1aa57c9 | ||
|
|
912c80e496 | ||
|
|
7390dce536 | ||
|
|
67859ab139 | ||
|
|
540635c54f | ||
|
|
6197279e83 | ||
|
|
4ad226570a | ||
|
|
50242b2c35 | ||
|
|
9f7d96bad3 | ||
|
|
8d70544374 | ||
|
|
4b6214c9a4 | ||
|
|
aaa2ef3a97 | ||
|
|
639bd5a550 | ||
|
|
763080aea0 | ||
|
|
fb713e0762 | ||
|
|
a8d41a6717 | ||
|
|
34bc0efc1c | ||
|
|
23e4a30cd1 | ||
|
|
ba58b0f429 | ||
|
|
89e6e062ab | ||
|
|
33ff8ec5a3 | ||
|
|
9f4053d051 | ||
|
|
1bb9d142f1 | ||
|
|
b10bd70d67 | ||
|
|
dc2ff2295e | ||
|
|
f231d6945f | ||
|
|
8786caf630 | ||
|
|
5c520bc82e | ||
|
|
1ccf408654 | ||
|
|
76ebfe38a1 | ||
|
|
364206010b | ||
|
|
559c675164 | ||
|
|
1773dede25 | ||
|
|
fa52478ffa | ||
|
|
ae0603c041 | ||
|
|
a364af5c4c | ||
|
|
6b76611c93 | ||
|
|
d7df71c8ba | ||
|
|
ea464abaaf | ||
|
|
a026b682c4 | ||
|
|
8e3fa660fd | ||
|
|
c9257822ec | ||
|
|
783bb8bd92 | ||
|
|
bf19ab6ad9 | ||
|
|
c4e59027fc | ||
|
|
f9b7bc26c0 | ||
|
|
a197cd6158 | ||
|
|
ae5b506848 | ||
|
|
f8ab5fef78 | ||
|
|
0893ba33fc | ||
|
|
75ce8d6c20 | ||
|
|
6d56701b5b | ||
|
|
ea8aa92dd5 | ||
|
|
9fc634dfe0 | ||
|
|
4b1088edbc | ||
|
|
8e355a14e9 | ||
|
|
ef2b6f91f1 | ||
|
|
9cae7a0a49 | ||
|
|
9a55ca3021 | ||
|
|
e8b20c86c1 | ||
|
|
dc78a4af88 | ||
|
|
34273907f7 | ||
|
|
ca1c5899a1 | ||
|
|
0064f17ad4 | ||
|
|
f8ccafd2af | ||
|
|
d386311e54 | ||
|
|
0e2475aa72 | ||
|
|
b4cce9f147 | ||
|
|
3e8f25d864 | ||
|
|
4c68580e82 | ||
|
|
3bc82c10b1 | ||
|
|
c7d94055a4 | ||
|
|
34316b0ffd | ||
|
|
3d494dcd73 | ||
|
|
85b2c4de4a | ||
|
|
d0ce6bb066 | ||
|
|
38c6495fd8 | ||
|
|
bcc449b524 | ||
|
|
361ada4963 | ||
|
|
2e8d62d0ca | ||
|
|
fb97bf6041 | ||
|
|
6d9c95720d | ||
|
|
db5a0a892a | ||
|
|
7b01a6ca61 | ||
|
|
fbbf767700 | ||
|
|
776e2cf6e7 | ||
|
|
34ca0e8671 | ||
|
|
0d37047c37 | ||
|
|
25c8f60e6e | ||
|
|
aae1480e4f | ||
|
|
5e8a52bcc0 | ||
|
|
cf8f8907ee | ||
|
|
ca36f20c6b | ||
|
|
bdae02e51e | ||
|
|
2d7eb0dcb5 | ||
|
|
2e0756ad8d | ||
|
|
ef2bbce34b | ||
|
|
65e59dc57d | ||
|
|
2ca39b571a | ||
|
|
48a6157efb | ||
|
|
ab3a64604a | ||
|
|
e9ddf43c6c | ||
|
|
2549d91ddf | ||
|
|
81ae86ff79 | ||
|
|
7670104e4d | ||
|
|
3fc2ad5bc9 | ||
|
|
c84367e2ee | ||
|
|
77413ad25c | ||
|
|
c1f73255b5 | ||
|
|
fe7ec76cd5 | ||
|
|
0bc9849ad8 | ||
|
|
c9a4f8f664 | ||
|
|
1315b8e42a | ||
|
|
7a03da42ec | ||
|
|
8b9bb2a8fc | ||
|
|
162dfbaab0 | ||
|
|
71b6e35231 | ||
|
|
9268513128 | ||
|
|
5d44b6a6a7 | ||
|
|
586fe10525 | ||
|
|
a59d1d3c05 | ||
|
|
1fce604a11 | ||
|
|
8ea831c845 | ||
|
|
94061fa634 | ||
|
|
fe17955fa1 | ||
|
|
3158e9f63a | ||
|
|
fcb2b53bf8 | ||
|
|
5470702c16 | ||
|
|
8c05c6e122 | ||
|
|
cc397a012f | ||
|
|
e12827de3f | ||
|
|
02a646c7ea | ||
|
|
86986efb15 | ||
|
|
a718fb3cae | ||
|
|
48ba9ce175 | ||
|
|
be60ad6766 | ||
|
|
b1b58f2550 | ||
|
|
99d00e2057 | ||
|
|
7da0dddaa9 | ||
|
|
3ea45e4ef5 | ||
|
|
dbac187b44 | ||
|
|
43293b98c0 | ||
|
|
34eef8e97d | ||
|
|
5a48fd80cd | ||
|
|
5260afc15d | ||
|
|
66b4e08026 | ||
|
|
2b8e0cc6c4 | ||
|
|
a501214bfa | ||
|
|
2b9a8b9136 | ||
|
|
1cc3cb5408 | ||
|
|
7df793f208 | ||
|
|
ca2b7cc4bc | ||
|
|
9e0a74da3d | ||
|
|
da5076a4dc | ||
|
|
005808f003 | ||
|
|
1dd1919200 | ||
|
|
0a9db55dc4 | ||
|
|
4551ba5b7c | ||
|
|
0f25ad5676 | ||
|
|
c6564c0d84 | ||
|
|
a7e94d433f | ||
|
|
64480a35ac | ||
|
|
4f96d5caf2 | ||
|
|
7cca1b9f48 | ||
|
|
56ebbedbfd | ||
|
|
4e5b8ce875 | ||
|
|
5c5ee50983 | ||
|
|
0a67488447 | ||
|
|
022761aa4b | ||
|
|
b9d985b663 | ||
|
|
08ed9d285e | ||
|
|
ab0cd71d10 | ||
|
|
df7645e4e5 | ||
|
|
39f9c72390 | ||
|
|
0023714884 | ||
|
|
ec9fdc517f | ||
|
|
db387eb770 | ||
|
|
354ed22706 | ||
|
|
a69f194d8b | ||
|
|
f5d13319a2 | ||
|
|
9a0e691635 | ||
|
|
6c7cb185a1 | ||
|
|
ab22b93ce0 | ||
|
|
c7f5278430 | ||
|
|
56ad0dbaf2 | ||
|
|
74c92cd460 | ||
|
|
16ebb4322a | ||
|
|
fccd026812 | ||
|
|
b6e49f2bc0 | ||
|
|
188ccf849d | ||
|
|
1bfdce9642 | ||
|
|
495254b0be | ||
|
|
d209ff7b9a | ||
|
|
98eeb50060 | ||
|
|
8a68f0aaf9 | ||
|
|
039859213c | ||
|
|
a3c022aebf | ||
|
|
3a9b32b6e8 | ||
|
|
c1cb9000a9 | ||
|
|
104b99ffa0 | ||
|
|
a89d7f327a | ||
|
|
91bc4bde3c | ||
|
|
80da7ad496 | ||
|
|
64bf211a45 | ||
|
|
2558a66a48 | ||
|
|
df22eab4ec | ||
|
|
c2678cf818 | ||
|
|
f9af34b103 | ||
|
|
97cab22e00 | ||
|
|
64416e05c1 | ||
|
|
b21125ff2d | ||
|
|
e72d0d03ed | ||
|
|
0f12d15020 | ||
|
|
44a2d009c0 | ||
|
|
48682bacde | ||
|
|
f70369c2a6 | ||
|
|
f7fc94520a | ||
|
|
77c6f5c9a2 | ||
|
|
dffa47301b | ||
|
|
08f47e7e14 | ||
|
|
56342af140 | ||
|
|
aacc0eca67 | ||
|
|
02916aeffa | ||
|
|
e42bfada9d | ||
|
|
72fb114dec | ||
|
|
9ba1561386 | ||
|
|
39772c2dbe | ||
|
|
cbf3ca726f | ||
|
|
f1e21ebe9d | ||
|
|
49c36cc040 | ||
|
|
9b90400abd | ||
|
|
8cda364210 | ||
|
|
ca98ef879d | ||
|
|
bb9c85cd8f | ||
|
|
91b946bf3d | ||
|
|
64377000c6 | ||
|
|
23f4931a1d | ||
|
|
3625834028 | ||
|
|
705e7b30bc | ||
|
|
6528c0817d | ||
|
|
f3d7392af3 | ||
|
|
5d5f44ba11 | ||
|
|
34457880c7 | ||
|
|
d63f07d29a | ||
|
|
5c2cbc9d29 | ||
|
|
8dbfa10646 | ||
|
|
9e804089e0 | ||
|
|
51145903aa | ||
|
|
036c49f398 | ||
|
|
46b85ebc6b | ||
|
|
07bc5e2de9 | ||
|
|
e822091907 | ||
|
|
7d46e77922 | ||
|
|
ad528ad3d5 | ||
|
|
e8b4c03770 | ||
|
|
33aacc1798 | ||
|
|
58959ef35e | ||
|
|
c66b349cec | ||
|
|
5c90df092b | ||
|
|
32374f264f | ||
|
|
5e62229f25 | ||
|
|
1b24dff738 | ||
|
|
161135cd90 | ||
|
|
5fb583da06 | ||
|
|
8b40d20eab | ||
|
|
432034d2cb | ||
|
|
0e168e36fc | ||
|
|
f248260584 | ||
|
|
880e3e10da | ||
|
|
f33b95ee82 | ||
|
|
4a71eb9b90 | ||
|
|
3372a5ad4b | ||
|
|
8326f05f66 | ||
|
|
7ce921d449 | ||
|
|
31f7364f08 | ||
|
|
5119d28b9d | ||
|
|
a2a5fe3bee | ||
|
|
6222d8c977 | ||
|
|
d3699a261a | ||
|
|
9c0e56d640 | ||
|
|
ddd89fc52a | ||
|
|
b852f032d3 | ||
|
|
1fec26c6b3 | ||
|
|
d3483afaed | ||
|
|
910e4815fa | ||
|
|
ef118e3351 | ||
|
|
2beedb10d4 | ||
|
|
41bf10d989 | ||
|
|
ac3b6aef27 | ||
|
|
8956b591f7 | ||
|
|
2f8519cb6b | ||
|
|
15a19c044d | ||
|
|
ec47ff7479 | ||
|
|
82f707ee89 | ||
|
|
dfcab8db6a | ||
|
|
5c10127758 | ||
|
|
b376f36733 | ||
|
|
96c0f62cf5 | ||
|
|
a96f996b59 | ||
|
|
b75d0b6cb5 | ||
|
|
a5bc65fbfb | ||
|
|
dcdbc95ae7 | ||
|
|
72e7e5e025 | ||
|
|
57242a2e13 | ||
|
|
6dbf757385 | ||
|
|
c5a32d8373 | ||
|
|
34288435ec | ||
|
|
177b48c3f2 | ||
|
|
642f5d4405 | ||
|
|
92b1c59e48 | ||
|
|
cb1682542b | ||
|
|
9e56187c16 | ||
|
|
51851efda5 | ||
|
|
11e4b743e0 | ||
|
|
3349151d4c | ||
|
|
c8f6937166 | ||
|
|
ad36c1df3d | ||
|
|
a5b8924e2d | ||
|
|
bc898cc2c2 | ||
|
|
1247b6e8eb | ||
|
|
2111c3de1a | ||
|
|
bcea1b66be | ||
|
|
4f8d14ed3e | ||
|
|
442adfbbc3 | ||
|
|
fe12233954 | ||
|
|
c725f6f572 | ||
|
|
1870d83081 | ||
|
|
8315dacda4 | ||
|
|
18c54f41bd | ||
|
|
293f9c22c4 | ||
|
|
a74510544f | ||
|
|
9b053c50c2 | ||
|
|
88712f46bf | ||
|
|
d6df20b0e8 | ||
|
|
5dc37f7557 | ||
|
|
445d306586 | ||
|
|
d332ae1132 | ||
|
|
37f45b10a3 | ||
|
|
d9b6ec0654 | ||
|
|
f98428aea5 | ||
|
|
b3be1d79cd | ||
|
|
ea8f885f05 | ||
|
|
2de47c2a50 | ||
|
|
30b8e105f9 | ||
|
|
2a44fab8ba | ||
|
|
6a06a99e46 | ||
|
|
3670dfbebd | ||
|
|
daf929ecc7 | ||
|
|
f96a17c930 | ||
|
|
2fb06e7f4f | ||
|
|
4ece9b0085 | ||
|
|
a4bd31e96a | ||
|
|
8bdcc3267a | ||
|
|
9e9164c4ee | ||
|
|
5dc6a39652 | ||
|
|
ada0966832 | ||
|
|
e6faef27ab | ||
|
|
acc27d0829 | ||
|
|
ab54f7bb45 | ||
|
|
88161cc37d | ||
|
|
0975e6834e | ||
|
|
22ec1d5f0a | ||
|
|
01784dd186 | ||
|
|
02cf91c158 | ||
|
|
0532346f4f | ||
|
|
657adafb7d | ||
|
|
818c0789ea | ||
|
|
cb5bcf2248 | ||
|
|
9bd7e30d18 | ||
|
|
572010ded1 | ||
|
|
1f22326e78 | ||
|
|
504d5c91bc | ||
|
|
a34692b6f2 | ||
|
|
73b5d23210 | ||
|
|
e31de8b137 | ||
|
|
cef5bbb2be | ||
|
|
80a8e1a4da | ||
|
|
fcb8fe50fe | ||
|
|
5235c8aad6 | ||
|
|
5b67525211 | ||
|
|
fb697729c0 | ||
|
|
76fe0fef49 | ||
|
|
6295d03801 | ||
|
|
07166fb3cd | ||
|
|
219dfe66d0 | ||
|
|
22c62fb524 | ||
|
|
f8706abebe | ||
|
|
e83a6f9c2e | ||
|
|
63485c8c78 | ||
|
|
72ef024678 | ||
|
|
414c736655 | ||
|
|
fdbfbb9238 | ||
|
|
a766e5d334 | ||
|
|
2faf01eb9d | ||
|
|
a4c2463b2f | ||
|
|
ddbd8033f9 | ||
|
|
6da66cf367 | ||
|
|
c7bc37d010 | ||
|
|
e0ec3c5035 | ||
|
|
d4f287298f | ||
|
|
e70d5838a8 | ||
|
|
b04ca0fdbd | ||
|
|
a064da642d | ||
|
|
37ce37979a | ||
|
|
cb3cbd0d78 | ||
|
|
1415aa1c03 | ||
|
|
7eb17f6abc | ||
|
|
fdb471d506 | ||
|
|
e6785ffe95 | ||
|
|
cc2951265e | ||
|
|
ee413f3b24 | ||
|
|
fc664a850d | ||
|
|
55efe76b2e | ||
|
|
dd8922ce4d | ||
|
|
102b48c302 | ||
|
|
a360bebd9d | ||
|
|
37ab493b78 | ||
|
|
46b7afe558 | ||
|
|
eac3420583 | ||
|
|
e1e9c08242 | ||
|
|
5ac6cf3b74 | ||
|
|
d6a58136da | ||
|
|
819b7b93d1 | ||
|
|
bceeda1cfd | ||
|
|
0d8ef9583b | ||
|
|
b1393ae615 | ||
|
|
464109313b | ||
|
|
83c8baf8e3 | ||
|
|
1bac929ab3 | ||
|
|
b27417da41 | ||
|
|
ef1f72f69b | ||
|
|
784fd57410 | ||
|
|
1dd0afcf80 | ||
|
|
ddce7731b9 | ||
|
|
d00c3780a6 | ||
|
|
4a87a24235 | ||
|
|
f5c9174c2f | ||
|
|
8d5f676039 | ||
|
|
71db7e10b6 | ||
|
|
756f77063a | ||
|
|
5f637036bc | ||
|
|
24b5f41065 | ||
|
|
b00352570b | ||
|
|
0b44b7d384 | ||
|
|
82da64d66d | ||
|
|
ee5a76bb0c | ||
|
|
b65ee5b42e | ||
|
|
8298db7d13 | ||
|
|
5b0e86b179 | ||
|
|
ed2641ea02 | ||
|
|
66939636dc | ||
|
|
2c331d929a | ||
|
|
4472764f3a | ||
|
|
6cb7153bdd | ||
|
|
5142f69655 | ||
|
|
dfe23f7bfe | ||
|
|
6873353cd4 | ||
|
|
025c97673f | ||
|
|
c814b92557 | ||
|
|
87765e8673 | ||
|
|
833d197412 | ||
|
|
5e72cd12d1 | ||
|
|
330fb6dff5 | ||
|
|
6d7d485a38 | ||
|
|
0160901060 | ||
|
|
9313140c59 | ||
|
|
25b1d4b673 | ||
|
|
b9d0602f55 | ||
|
|
ec0a3bbc95 | ||
|
|
50f63a2f72 | ||
|
|
cb47cb7dbf | ||
|
|
c9ac49c758 | ||
|
|
c4e8aba1c9 | ||
|
|
ba6b8b1f69 | ||
|
|
09af2fc2cb | ||
|
|
5caf0b02f0 | ||
|
|
86bac90001 | ||
|
|
7d31575149 | ||
|
|
7223b28829 | ||
|
|
4014c1d0ab | ||
|
|
0bf0e795ca | ||
|
|
0bc3716f74 | ||
|
|
ca23ea0f69 | ||
|
|
efaa2c0e3f | ||
|
|
d91f4d5748 | ||
|
|
6f1a03587a | ||
|
|
7973f0f178 | ||
|
|
11f0ca371b | ||
|
|
0131746919 | ||
|
|
5774b1f69a | ||
|
|
34d36c1de1 | ||
|
|
cca84e6455 | ||
|
|
82ce1ace28 | ||
|
|
e59d5a766f | ||
|
|
98a8103f5a | ||
|
|
81a8593eb6 | ||
|
|
b6584c9ddf | ||
|
|
08d4cc9690 | ||
|
|
0565162fde | ||
|
|
31614247cf | ||
|
|
e9390dcd48 | ||
|
|
c9b3e2b156 | ||
|
|
ace6012c1c | ||
|
|
17901fbf3d | ||
|
|
2c7b5578e7 | ||
|
|
b2b7d18802 | ||
|
|
965da0535a | ||
|
|
ebd187ec06 | ||
|
|
b495fb7a37 | ||
|
|
58e5125cde | ||
|
|
dadfbd2ddb | ||
|
|
9fdeec6128 | ||
|
|
e783aeab66 | ||
|
|
53a94cc7bb | ||
|
|
839a9b6cb8 | ||
|
|
2557d77ae3 | ||
|
|
6a7df2f1ae | ||
|
|
2db83b3892 | ||
|
|
08c7cc0918 | ||
|
|
d555dcb6d7 | ||
|
|
7226f25c67 | ||
|
|
77a3be6fd7 | ||
|
|
2397df7f22 | ||
|
|
118d2c7273 | ||
|
|
b44d2b1bb3 | ||
|
|
9867d88067 | ||
|
|
037c49e2c6 | ||
|
|
1e989060f9 | ||
|
|
4d6271c161 | ||
|
|
c3900f6984 | ||
|
|
aa1a036f66 | ||
|
|
26274e6c5d | ||
|
|
bcfbe5a284 | ||
|
|
496243fbc7 | ||
|
|
036ffe595a | ||
|
|
2a903e9f03 | ||
|
|
36e5bbb752 | ||
|
|
96a976d826 | ||
|
|
ff514a0568 | ||
|
|
a4d99b78d5 | ||
|
|
029c69ecd3 | ||
|
|
9e1f04e476 | ||
|
|
c8bde41451 | ||
|
|
e16c01623d | ||
|
|
7de294a58e | ||
|
|
060343f096 | ||
|
|
addba0d007 | ||
|
|
68418f2c8f | ||
|
|
428dd17d54 | ||
|
|
7344b41f47 | ||
|
|
5f003b0dd7 | ||
|
|
b8f454b8ac | ||
|
|
b45e03a45a | ||
|
|
0a268f35bc | ||
|
|
e3709533b4 | ||
|
|
acc8d5f7b2 | ||
|
|
da06a96630 | ||
|
|
7eaf8cfd2f | ||
|
|
5248c8f78d | ||
|
|
f4365dbd3a | ||
|
|
2309b16bd4 | ||
|
|
c65af61a68 | ||
|
|
a48e9bc64c | ||
|
|
b4984d7f2d | ||
|
|
1017d62f0d | ||
|
|
ebc208cacd | ||
|
|
0236c8e7f8 | ||
|
|
db8a92f5c2 | ||
|
|
c1e5408492 | ||
|
|
84890eb1b4 | ||
|
|
220685a522 | ||
|
|
8ebf5dbcb4 | ||
|
|
dad807d62d | ||
|
|
8e52ffa30e | ||
|
|
18970ff0b9 | ||
|
|
630698459b | ||
|
|
f8e854a087 | ||
|
|
3b2274b0aa | ||
|
|
0d1ebce1a1 | ||
|
|
70d86ce9a2 | ||
|
|
291f591af3 | ||
|
|
5a48c52a95 | ||
|
|
5625ef956d | ||
|
|
969733ab77 | ||
|
|
12a1ac1a7d | ||
|
|
4da322a03f | ||
|
|
d5754c00e2 | ||
|
|
2e9b5f3a2b | ||
|
|
b11a8e9c0c | ||
|
|
e37533afbb | ||
|
|
4a476673ac | ||
|
|
fe5386cc08 | ||
|
|
8bfb1a9d39 | ||
|
|
109aafcbc0 | ||
|
|
e289c06d8d | ||
|
|
78c1c318c5 | ||
|
|
07983d75ea | ||
|
|
d0a4a4242d | ||
|
|
a71be2b193 | ||
|
|
779ca13d48 | ||
|
|
f3b0a50db7 | ||
|
|
c831596e02 | ||
|
|
2ae369fbdb | ||
|
|
f2c9d968a4 | ||
|
|
c3f02980a0 | ||
|
|
7d5ce7b5d7 | ||
|
|
8bb71eeb51 | ||
|
|
5a88598811 | ||
|
|
da3fbd386d | ||
|
|
1e915aad20 | ||
|
|
889d5e5b28 | ||
|
|
51cf2df2f8 | ||
|
|
e690285d9d | ||
|
|
81d6423f24 | ||
|
|
046dee7389 | ||
|
|
a33a24dddf | ||
|
|
636bdbd12c | ||
|
|
0ab3492afd | ||
|
|
666ae11639 | ||
|
|
826856c390 | ||
|
|
a764a481f3 | ||
|
|
e345090015 | ||
|
|
cc97198fe4 | ||
|
|
5a146e34fa | ||
|
|
70838c3e24 | ||
|
|
2dd71cbe26 | ||
|
|
32c013a72c | ||
|
|
db6e1ae558 | ||
|
|
4117da6e93 | ||
|
|
02b1ba03ed | ||
|
|
1f501f4553 | ||
|
|
0205d0afb5 | ||
|
|
ccf9bf4613 | ||
|
|
d4704e39ac | ||
|
|
4a82abc19b | ||
|
|
3ae32ab0a0 | ||
|
|
1cc6a108a7 | ||
|
|
d4ffe53dbd | ||
|
|
f43b3508e9 | ||
|
|
ea0c326d3d | ||
|
|
4843480fbf | ||
|
|
930e14e258 | ||
|
|
87bbb41fb6 | ||
|
|
e767f68f89 | ||
|
|
b4d304c21e | ||
|
|
db1f412dae | ||
|
|
a73f5e83ec | ||
|
|
49b2e98a8f | ||
|
|
b3a16fd0ce | ||
|
|
dd6735ec04 | ||
|
|
9ebf1436d2 | ||
|
|
0aa71b5237 | ||
|
|
7ef2d52efd | ||
|
|
a5c6dfe62b | ||
|
|
4a606e0cb2 | ||
|
|
27370c655d | ||
|
|
887f4e8985 | ||
|
|
c91c4b01f9 | ||
|
|
5f198dba08 | ||
|
|
67f9a7f5e4 | ||
|
|
62404721ed | ||
|
|
6d37329e74 | ||
|
|
0c316d3225 | ||
|
|
131644477d | ||
|
|
b964dd0895 | ||
|
|
7dd32ed44b | ||
|
|
0b6aedfc93 | ||
|
|
b692088c94 | ||
|
|
160ab8d0ae | ||
|
|
a599cb436a | ||
|
|
294aa81298 | ||
|
|
ddc4b3dd6e | ||
|
|
4c5bf3ad2b | ||
|
|
88c33214c6 | ||
|
|
393be65aa6 | ||
|
|
9ce9a1598f | ||
|
|
d9079fe18e | ||
|
|
3cde070d3b | ||
|
|
b2a5f65a77 | ||
|
|
69dff5a2cb | ||
|
|
40e0d5cfbf | ||
|
|
5a0100757f | ||
|
|
f9fe88898d | ||
|
|
a2d6955f79 | ||
|
|
8fa247145e | ||
|
|
04bb65dcf7 | ||
|
|
e4884c7835 | ||
|
|
8b9ce3e8de | ||
|
|
327a5f5fae | ||
|
|
50b971477b | ||
|
|
07c7eea2df | ||
|
|
42135d3e53 | ||
|
|
d74e5e6457 | ||
|
|
a2c7377134 | ||
|
|
0e54393d5a | ||
|
|
8cf8cefc92 | ||
|
|
098c40e9ac | ||
|
|
80c1c4c6c8 | ||
|
|
ef8afab7e8 | ||
|
|
84e0e08aec | ||
|
|
2fce771214 | ||
|
|
15929e8cf2 | ||
|
|
f1c8ec141a | ||
|
|
a62393e0ed | ||
|
|
a6defd9b62 | ||
|
|
374212beaa | ||
|
|
5132e62045 | ||
|
|
9504ede1f3 | ||
|
|
afb6e6b5ba | ||
|
|
60b2b92803 | ||
|
|
6113311fda | ||
|
|
ecac6201d0 | ||
|
|
90d8c37f91 | ||
|
|
c43085beab | ||
|
|
d9c383b2ef | ||
|
|
100938b0e5 | ||
|
|
83a6ee0a22 | ||
|
|
0dab7f02ed | ||
|
|
0e6f2eb1cd | ||
|
|
9a371f8998 | ||
|
|
8a7bbd1795 | ||
|
|
d1fef24723 | ||
|
|
3ddc5e7846 | ||
|
|
b439ea45ec | ||
|
|
5680785f86 | ||
|
|
e8eb7bec1b | ||
|
|
565b7404f9 | ||
|
|
9cffe4d476 | ||
|
|
43be88a37c | ||
|
|
ea67859de7 | ||
|
|
c8986464ec | ||
|
|
7804f94d8b | ||
|
|
bfa77aebfc | ||
|
|
487fb02313 | ||
|
|
ef64038107 | ||
|
|
5d336accbc | ||
|
|
99047b2866 | ||
|
|
f611cf3b5a | ||
|
|
4ad059605c | ||
|
|
dc2ff97dd8 | ||
|
|
2b5265c103 | ||
|
|
2e98fde053 | ||
|
|
d5176e0eb7 | ||
|
|
eb0b2a847b | ||
|
|
cff5482f69 | ||
|
|
afc37973d0 | ||
|
|
3eada6c6ab | ||
|
|
7c39268fe5 | ||
|
|
eb67b299de | ||
|
|
3e7a09c1e3 | ||
|
|
637dc9b9b2 | ||
|
|
1de16d4715 | ||
|
|
49090d774d | ||
|
|
9a695b5cdd | ||
|
|
e0339b50c0 | ||
|
|
d0c584672f | ||
|
|
27816acaeb | ||
|
|
4de3cc8a09 | ||
|
|
5c16ceb2fa | ||
|
|
9db3f73413 | ||
|
|
e0ceddce09 | ||
|
|
6dc4096299 | ||
|
|
1fa03b5c74 | ||
|
|
f8f57a93c3 | ||
|
|
960fe63312 | ||
|
|
7545b94128 | ||
|
|
1263a27c1c | ||
|
|
e1c084d365 | ||
|
|
7465ec0bbd | ||
|
|
17ed57836f | ||
|
|
d0acb51fd7 | ||
|
|
71c1ed6c49 | ||
|
|
bfb68254a4 | ||
|
|
18f7662209 | ||
|
|
a0d84f383c | ||
|
|
1f447ae681 | ||
|
|
8bd6691331 | ||
|
|
42afcf9322 | ||
|
|
3d3694bb8d | ||
|
|
589b913960 | ||
|
|
7ba4f42494 | ||
|
|
c96118d2b5 | ||
|
|
0285d8cd38 | ||
|
|
ee87604a4d | ||
|
|
2235ebaf20 | ||
|
|
954d0d954f | ||
|
|
e31747d087 | ||
|
|
fc581253a4 | ||
|
|
47c4609f23 | ||
|
|
2d52bc2a49 | ||
|
|
5367101330 | ||
|
|
db145b4999 | ||
|
|
7f950ddb80 | ||
|
|
50e2251e74 | ||
|
|
33d5455b6f | ||
|
|
ac018500cd | ||
|
|
3e661db283 | ||
|
|
18fb78b3ec | ||
|
|
58ff13d399 | ||
|
|
0ac0175bb1 | ||
|
|
f39007cd2d | ||
|
|
2349aa4df8 | ||
|
|
a93a173183 | ||
|
|
42e55dd2dd | ||
|
|
ca3146d38f | ||
|
|
7f6cd514a5 | ||
|
|
88e24e92b5 | ||
|
|
8b447e361f | ||
|
|
a92dd2d4e4 | ||
|
|
852ec44567 | ||
|
|
b6e32cdeb4 | ||
|
|
4bd264d9be | ||
|
|
4a4b8574d0 | ||
|
|
ded073edd9 | ||
|
|
568f95e7a3 | ||
|
|
5adcabc8de | ||
|
|
e3bce7172c | ||
|
|
96fb618390 | ||
|
|
2d4fdcb661 | ||
|
|
7a84cff5b4 | ||
|
|
2af627b704 | ||
|
|
77fdf18686 | ||
|
|
944a77fe42 | ||
|
|
049678b32e | ||
|
|
3325e4d854 | ||
|
|
1e90e88d4b | ||
|
|
8aeb33c98c | ||
|
|
3b08721835 | ||
|
|
5a30878599 | ||
|
|
c8a8935db0 | ||
|
|
ec196f57bb | ||
|
|
f7809ec3a7 | ||
|
|
71d3ec3616 | ||
|
|
4a7cf8d870 | ||
|
|
4bf8836c0a | ||
|
|
1ca36ee29c | ||
|
|
3446ff88cf | ||
|
|
de5a91a13f | ||
|
|
814cd73019 | ||
|
|
c21611661b | ||
|
|
8f817ce689 | ||
|
|
971b5111e7 | ||
|
|
07069a64ae | ||
|
|
6acea51f12 | ||
|
|
7aa2dab307 | ||
|
|
3091be8f67 | ||
|
|
487531cc52 | ||
|
|
58bfcb4273 | ||
|
|
8d8be27f22 | ||
|
|
27a978cba5 | ||
|
|
71b4e6afa4 | ||
|
|
e1f3b19c0c | ||
|
|
649c2aa5a6 | ||
|
|
cac8cc99e1 | ||
|
|
cb162b16f2 | ||
|
|
86e54ce145 | ||
|
|
efd809971f | ||
|
|
38ae14cc4d | ||
|
|
c7e33a90fe | ||
|
|
5add835750 | ||
|
|
734c614cba | ||
|
|
f6b347fa62 | ||
|
|
08d2f3125e | ||
|
|
385c48dcad | ||
|
|
0926057bfe | ||
|
|
6912e3893e | ||
|
|
d3052657df | ||
|
|
a5ca4d8edf | ||
|
|
afb1ebebd5 | ||
|
|
a04a9eb5ad | ||
|
|
027badd21f | ||
|
|
1affc760e6 | ||
|
|
3ca72b7398 | ||
|
|
702dbddd78 | ||
|
|
8fbecc0227 | ||
|
|
421271acfa | ||
|
|
98af76b3ac | ||
|
|
3952fdbe2d | ||
|
|
bc13beaa85 | ||
|
|
59b2e706ca | ||
|
|
8bf835c531 | ||
|
|
087ed7c132 | ||
|
|
4c075df327 | ||
|
|
9ea8baca05 | ||
|
|
9b6784720e | ||
|
|
3761e1dd60 | ||
|
|
b3eb809550 | ||
|
|
cb72865dcc | ||
|
|
d646b4729b | ||
|
|
a2dd903d0d | ||
|
|
28ed378ee7 | ||
|
|
15ae55136f | ||
|
|
b18abd954f | ||
|
|
b45f79a1f8 | ||
|
|
01a03b4c84 | ||
|
|
e48328af34 | ||
|
|
8925d44807 | ||
|
|
213283510f | ||
|
|
9f8190dc28 | ||
|
|
c32d4ee2f7 | ||
|
|
2c867a4b2c | ||
|
|
da5194bdcb | ||
|
|
bbb27aed10 | ||
|
|
c02fbaeae7 | ||
|
|
847218da73 | ||
|
|
90dc788893 | ||
|
|
f3525cc555 | ||
|
|
198f243181 | ||
|
|
8e049f4af5 | ||
|
|
ff465a59b6 | ||
|
|
68b4f3ca04 | ||
|
|
6b31f2b3f2 | ||
|
|
63cf0f1548 | ||
|
|
e607f68b3e | ||
|
|
db3bb82dbd | ||
|
|
5889c600fa | ||
|
|
d7d4c7236c | ||
|
|
f54d8ce36f | ||
|
|
43faa13cb5 | ||
|
|
85a2d994f3 | ||
|
|
d8cd78cd6b | ||
|
|
3e59a5bcd2 | ||
|
|
0efb89d6ff | ||
|
|
6697c075cb | ||
|
|
06660160e7 | ||
|
|
0b571737b7 | ||
|
|
0a486a280d | ||
|
|
bd53e685d0 | ||
|
|
a2bbf3f44e | ||
|
|
410bb62906 | ||
|
|
cbb4ac3e20 | ||
|
|
7508192ab9 | ||
|
|
bbf6cbd8fb | ||
|
|
9765269d27 | ||
|
|
61746b7ff7 | ||
|
|
e6066c2cb5 | ||
|
|
633a918590 | ||
|
|
71c14a0837 | ||
|
|
9bbed2c275 | ||
|
|
d9ba4d9130 | ||
|
|
ecd40de7ec | ||
|
|
e2bd6c06ec | ||
|
|
a26be76d79 | ||
|
|
3b68c1eb69 | ||
|
|
f7879bdbf9 | ||
|
|
dbb0fc519f | ||
|
|
b931ccfabf | ||
|
|
eeab048f46 | ||
|
|
5da89ac05b | ||
|
|
e2b446be1c | ||
|
|
5f2e17a738 | ||
|
|
399d6d0045 | ||
|
|
f36f3ffd21 | ||
|
|
3c785ae7d8 | ||
|
|
00cd772cbc | ||
|
|
d434724a54 | ||
|
|
b6c9a3bb89 | ||
|
|
fc3bf45a7f | ||
|
|
cbf6c06e4b | ||
|
|
cff21124da | ||
|
|
ce7893c2e5 | ||
|
|
f7dcaa38ff | ||
|
|
8ce1013a26 | ||
|
|
114d67b408 | ||
|
|
56c2d16560 | ||
|
|
2238c42432 | ||
|
|
64f6cf6747 | ||
|
|
c0e9f1ca43 | ||
|
|
7f66087d8c | ||
|
|
18671b7cca | ||
|
|
233156c744 | ||
|
|
ac0ffab99c | ||
|
|
5e964d2105 | ||
|
|
59e7a5fa4b | ||
|
|
8452f577d2 | ||
|
|
726eb8d0e1 | ||
|
|
2c0d6b93ee | ||
|
|
533f2734f1 | ||
|
|
2859bee4c0 | ||
|
|
972f857c71 | ||
|
|
d2eb1488fd | ||
|
|
508e4eac61 | ||
|
|
fd1d807012 | ||
|
|
906c4c7f39 | ||
|
|
668e43f57c | ||
|
|
6d260c195f | ||
|
|
fdfecbb3f7 |
@@ -1,4 +1,4 @@
|
||||
# EditorConfig is awesome: http://EditorConfig.org
|
||||
# EditorConfig is awesome: https://EditorConfig.org
|
||||
|
||||
# top-most EditorConfig file
|
||||
root = true
|
||||
@@ -6,7 +6,6 @@ root = true
|
||||
# Tab indentation
|
||||
[*]
|
||||
indent_style = tab
|
||||
indent_size = 4
|
||||
trim_trailing_whitespace = true
|
||||
|
||||
# The indent size used in the `package.json` file cannot be changed
|
||||
|
||||
19
.eslintrc
19
.eslintrc
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true
|
||||
},
|
||||
"rules": {
|
||||
"no-console": 0,
|
||||
"no-cond-assign": 0,
|
||||
"no-unused-vars": 1,
|
||||
"no-extra-semi": "warn",
|
||||
"semi": "warn"
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaFeatures": {
|
||||
"experimentalObjectRestSpread": true
|
||||
}
|
||||
}
|
||||
}
|
||||
20
.eslintrc.json
Normal file
20
.eslintrc.json
Normal file
@@ -0,0 +1,20 @@
|
||||
{
|
||||
"root": true,
|
||||
"env": {
|
||||
"node": true,
|
||||
"es6": true
|
||||
},
|
||||
"rules": {
|
||||
"no-console": 0,
|
||||
"no-cond-assign": 0,
|
||||
"no-unused-vars": 1,
|
||||
"no-extra-semi": "warn",
|
||||
"semi": "warn"
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaFeatures": {
|
||||
"experimentalObjectRestSpread": true
|
||||
}
|
||||
}
|
||||
}
|
||||
1
.gitattributes
vendored
1
.gitattributes
vendored
@@ -7,3 +7,4 @@ ThirdPartyNotices.txt eol=crlf
|
||||
*.cmd eol=crlf
|
||||
*.ps1 eol=lf
|
||||
*.sh eol=lf
|
||||
*.rtf -text
|
||||
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
4
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,6 +1,10 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: Bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!-- Please search existing issues to avoid creating duplicates. -->
|
||||
|
||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: Enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution or feature you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
49
.github/classifier.yml
vendored
Normal file
49
.github/classifier.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
{
|
||||
perform: false,
|
||||
alwaysRequireAssignee: false,
|
||||
labelsRequiringAssignee: [],
|
||||
autoAssignees: {
|
||||
accessibility: [],
|
||||
acquisition: [],
|
||||
agent: [],
|
||||
azure: [],
|
||||
backup: [],
|
||||
bcdr: [],
|
||||
'chart viewer': [],
|
||||
connection: [],
|
||||
dacfx: [],
|
||||
dashboard: [],
|
||||
'data explorer': [],
|
||||
documentation: [],
|
||||
'edit data': [],
|
||||
export: [],
|
||||
extensibility: [],
|
||||
extensionManager: [],
|
||||
globalization: [],
|
||||
grid: [],
|
||||
import: [],
|
||||
insights: [],
|
||||
intellisense: [],
|
||||
localization: [],
|
||||
'managed instance': [],
|
||||
notebooks: [],
|
||||
'object explorer': [],
|
||||
performance: [],
|
||||
profiler: [],
|
||||
'query editor': [],
|
||||
'query execution': [],
|
||||
reliability: [],
|
||||
restore: [],
|
||||
scripting: [],
|
||||
'server group': [],
|
||||
settings: [],
|
||||
setup: [],
|
||||
shell: [],
|
||||
showplan: [],
|
||||
snippet: [],
|
||||
sql2019Preview: [],
|
||||
sqldw: [],
|
||||
supportability: [],
|
||||
ux: []
|
||||
}
|
||||
}
|
||||
12
.github/commands.yml
vendored
Normal file
12
.github/commands.yml
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
perform: false,
|
||||
commands: [
|
||||
{
|
||||
type: 'label',
|
||||
name: 'duplicate',
|
||||
allowTriggerByBot: true,
|
||||
action: 'close',
|
||||
comment: "Thanks for creating this issue! We figured it's covering the same as another one we already have. Thus, we closed this one as a duplicate. You can search for existing issues [here](https://aka.ms/vscodeissuesearch). See also our [issue reporting](https://aka.ms/vscodeissuereporting) guidelines.\n\nHappy Coding!"
|
||||
}
|
||||
]
|
||||
}
|
||||
5
.github/copycat.yml
vendored
Normal file
5
.github/copycat.yml
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
perform: true,
|
||||
target_owner: 'anthonydresser',
|
||||
target_repo: 'testissues'
|
||||
}
|
||||
6
.github/locker.yml
vendored
Normal file
6
.github/locker.yml
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
daysAfterClose: 45,
|
||||
daysSinceLastUpdate: 3,
|
||||
ignoredLabels: [],
|
||||
perform: true
|
||||
}
|
||||
6
.github/needs_more_info.yml
vendored
Normal file
6
.github/needs_more_info.yml
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
daysUntilClose: 7,
|
||||
needsMoreInfoLabel: 'needs more info',
|
||||
perform: true,
|
||||
closeComment: "This issue has been closed automatically because it needs more information and has not had recent activity in the last 7 days. If you have more info to help resolve the issue, leave a comment"
|
||||
}
|
||||
6
.github/new_release.yml
vendored
Normal file
6
.github/new_release.yml
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
newReleaseLabel: 'new-release',
|
||||
newReleaseColor: '006b75',
|
||||
daysAfterRelease: 5,
|
||||
perform: true
|
||||
}
|
||||
5
.github/similarity.yml
vendored
Normal file
5
.github/similarity.yml
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
perform: true,
|
||||
whenCreatedByTeam: true,
|
||||
comment: "Thanks for submitting this issue. Please also check if it is already covered by an existing one, like:\n${potentialDuplicates}"
|
||||
}
|
||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -1,8 +1,10 @@
|
||||
.DS_Store
|
||||
.cache
|
||||
npm-debug.log
|
||||
Thumbs.db
|
||||
node_modules/
|
||||
.build/
|
||||
extensions/**/dist/
|
||||
out/
|
||||
out-build/
|
||||
out-editor/
|
||||
@@ -13,7 +15,10 @@ out-editor-min/
|
||||
out-monaco-editor-core/
|
||||
out-vscode/
|
||||
out-vscode-min/
|
||||
build/node_modules
|
||||
out-vscode-reh/
|
||||
out-vscode-reh-min/
|
||||
out-vscode-reh-pkg/
|
||||
**/node_modules
|
||||
coverage/
|
||||
test_data/
|
||||
test-results/
|
||||
|
||||
58
.travis.yml
58
.travis.yml
@@ -1,58 +0,0 @@
|
||||
sudo: false
|
||||
language: cpp
|
||||
|
||||
os:
|
||||
- linux
|
||||
- osx
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.cache/yarn
|
||||
|
||||
notifications:
|
||||
email: false
|
||||
webhooks:
|
||||
- http://vscode-probot.westus.cloudapp.azure.com:3450/travis/notifications
|
||||
- http://vscode-test-probot.westus.cloudapp.azure.com:3450/travis/notifications
|
||||
|
||||
addons:
|
||||
apt:
|
||||
sources:
|
||||
- ubuntu-toolchain-r-test
|
||||
packages:
|
||||
- gcc-4.9
|
||||
- g++-4.9
|
||||
- gcc-4.9-multilib
|
||||
- g++-4.9-multilib
|
||||
- zip
|
||||
- libgtk2.0-0
|
||||
- libx11-dev
|
||||
- libxkbfile-dev
|
||||
- libsecret-1-dev
|
||||
|
||||
before_install:
|
||||
- git submodule update --init --recursive
|
||||
- nvm install 8.9.1
|
||||
- nvm use 8.9.1
|
||||
- npm i -g yarn
|
||||
# - npm config set python `which python`
|
||||
- if [ $TRAVIS_OS_NAME == "linux" ]; then
|
||||
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:99.0;
|
||||
sh -e /etc/init.d/xvfb start;
|
||||
sleep 3;
|
||||
fi
|
||||
# Make npm logs less verbose
|
||||
# - npm config set depth 0
|
||||
# - npm config set loglevel warn
|
||||
|
||||
install:
|
||||
- yarn
|
||||
|
||||
script:
|
||||
- node_modules/.bin/gulp electron --silent
|
||||
- node_modules/.bin/gulp compile --silent --max_old_space_size=4096
|
||||
- node_modules/.bin/gulp optimize-vscode --silent --max_old_space_size=4096
|
||||
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then ./scripts/test.sh --coverage --reporter dot; else ./scripts/test.sh --reporter dot; fi
|
||||
|
||||
after_success:
|
||||
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then node_modules/.bin/coveralls < .build/coverage/lcov.info; fi
|
||||
23
.vscode/cglicenses.schema.json
vendored
Normal file
23
.vscode/cglicenses.schema.json
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
{
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"licenseDetail"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string",
|
||||
"description": "The name of the dependency"
|
||||
},
|
||||
"licenseDetail": {
|
||||
"type": "array",
|
||||
"description": "The complete license text of the dependency",
|
||||
"items": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
142
.vscode/cgmanifest.schema.json
vendored
Normal file
142
.vscode/cgmanifest.schema.json
vendored
Normal file
@@ -0,0 +1,142 @@
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"registrations": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"component": {
|
||||
"oneOf": [
|
||||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"type",
|
||||
"git"
|
||||
],
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"git"
|
||||
]
|
||||
},
|
||||
"git": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"repositoryUrl",
|
||||
"commitHash"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"repositoryUrl": {
|
||||
"type": "string"
|
||||
},
|
||||
"commitHash": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"type",
|
||||
"npm"
|
||||
],
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"npm"
|
||||
]
|
||||
},
|
||||
"npm": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"version"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"version": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "object",
|
||||
"required": [
|
||||
"type",
|
||||
"other"
|
||||
],
|
||||
"properties": {
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": [
|
||||
"other"
|
||||
]
|
||||
},
|
||||
"other": {
|
||||
"type": "object",
|
||||
"required": [
|
||||
"name",
|
||||
"downloadUrl",
|
||||
"version"
|
||||
],
|
||||
"properties": {
|
||||
"name": {
|
||||
"type": "string"
|
||||
},
|
||||
"downloadUrl": {
|
||||
"type": "string"
|
||||
},
|
||||
"version": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"repositoryUrl": {
|
||||
"type": "string",
|
||||
"description": "The git url of the component"
|
||||
},
|
||||
"version": {
|
||||
"type": "string",
|
||||
"description": "The version of the component"
|
||||
},
|
||||
"license": {
|
||||
"type": "string",
|
||||
"description": "The name of the license"
|
||||
},
|
||||
"developmentDependency": {
|
||||
"type": "boolean",
|
||||
"description": "This component is inlined in the vscode repo and **is not shipped**."
|
||||
},
|
||||
"isOnlyProductionDependency": {
|
||||
"type": "boolean",
|
||||
"description": "This component is shipped and **is not inlined in the vscode repo**."
|
||||
},
|
||||
"licenseDetail": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "string"
|
||||
},
|
||||
"description": "The license text"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
2
.vscode/extensions.json
vendored
2
.vscode/extensions.json
vendored
@@ -2,7 +2,7 @@
|
||||
// See https://go.microsoft.com/fwlink/?LinkId=827846
|
||||
// for the documentation about the extensions.json format
|
||||
"recommendations": [
|
||||
"eg2.tslint",
|
||||
"ms-vscode.vscode-typescript-tslint-plugin",
|
||||
"dbaeumer.vscode-eslint",
|
||||
"msjsdiag.debugger-for-chrome"
|
||||
]
|
||||
|
||||
147
.vscode/launch.json
vendored
147
.vscode/launch.json
vendored
@@ -9,14 +9,12 @@
|
||||
"stopOnEntry": true,
|
||||
"args": [
|
||||
"hygiene"
|
||||
],
|
||||
"cwd": "${workspaceFolder}"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"name": "Attach to Extension Host",
|
||||
"protocol": "inspector",
|
||||
"port": 5870,
|
||||
"restart": true,
|
||||
"outFiles": [
|
||||
@@ -24,19 +22,15 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"type": "chrome",
|
||||
"request": "attach",
|
||||
"name": "Attach to Shared Process",
|
||||
"protocol": "inspector",
|
||||
"port": 5871,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
]
|
||||
"port": 9222,
|
||||
"urlFilter": "*"
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"protocol": "inspector",
|
||||
"name": "Attach to Search Process",
|
||||
"port": 5876,
|
||||
"outFiles": [
|
||||
@@ -47,7 +41,6 @@
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"name": "Attach to CLI Process",
|
||||
"protocol": "inspector",
|
||||
"port": 5874,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
@@ -57,7 +50,6 @@
|
||||
"type": "node",
|
||||
"request": "attach",
|
||||
"name": "Attach to Main Process",
|
||||
"protocol": "inspector",
|
||||
"port": 5875,
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
@@ -73,6 +65,45 @@
|
||||
"type": "chrome",
|
||||
"request": "launch",
|
||||
"name": "Launch azuredatastudio",
|
||||
"windows": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat",
|
||||
"timeout": 20000
|
||||
},
|
||||
"osx": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
|
||||
"timeout": 20000
|
||||
},
|
||||
"linux": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
|
||||
"timeout": 20000
|
||||
},
|
||||
"env": {
|
||||
"VSCODE_EXTHOST_WILL_SEND_SOCKET": null
|
||||
},
|
||||
"breakOnLoad": false,
|
||||
"urlFilter": "*workbench.html*",
|
||||
"runtimeArgs": [
|
||||
"--inspect=5875",
|
||||
"--no-cached-data"
|
||||
],
|
||||
"webRoot": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Launch ADS (Main Process)",
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.sh",
|
||||
"runtimeArgs": [
|
||||
"--no-cached-data"
|
||||
],
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
"request": "launch",
|
||||
"name": "Launch azuredatastudio with new notebook command",
|
||||
"windows": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/sql.bat"
|
||||
},
|
||||
@@ -84,7 +115,8 @@
|
||||
},
|
||||
"urlFilter": "*index.html*",
|
||||
"runtimeArgs": [
|
||||
"--inspect=5875"
|
||||
"--inspect=5875",
|
||||
"--command=notebook.command.new"
|
||||
],
|
||||
"skipFiles": [
|
||||
"**/winjs*.js"
|
||||
@@ -92,34 +124,6 @@
|
||||
"webRoot": "${workspaceFolder}",
|
||||
"timeout": 45000
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Unit Tests",
|
||||
"protocol": "inspector",
|
||||
"program": "${workspaceFolder}/node_modules/mocha/bin/_mocha",
|
||||
"runtimeExecutable": "${workspaceFolder}/.build/electron/Azure Data Studio.app/Contents/MacOS/Electron",
|
||||
"windows": {
|
||||
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio.exe"
|
||||
},
|
||||
"linux": {
|
||||
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio"
|
||||
},
|
||||
"stopOnEntry": false,
|
||||
"outputCapture": "std",
|
||||
"args": [
|
||||
"--delay",
|
||||
"--timeout",
|
||||
"2000"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"env": {
|
||||
"ELECTRON_RUN_AS_NODE": "true"
|
||||
},
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Launch Built-in Extension",
|
||||
"type": "extensionHost",
|
||||
@@ -128,9 +132,70 @@
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceRoot}/extensions/debug-auto-launch"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Launch Smoke Test",
|
||||
"program": "${workspaceFolder}/test/smoke/test/index.js",
|
||||
"cwd": "${workspaceFolder}/test/smoke",
|
||||
"env": {
|
||||
"BUILD_ARTIFACTSTAGINGDIRECTORY": "${workspaceFolder}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Run Unit Tests",
|
||||
"program": "${workspaceFolder}/test/electron/index.js",
|
||||
"runtimeExecutable": "${workspaceFolder}/.build/electron/Azure Data Studio.app/Contents/MacOS/Electron",
|
||||
"windows": {
|
||||
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio.exe"
|
||||
},
|
||||
"linux": {
|
||||
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio"
|
||||
},
|
||||
"outputCapture": "std",
|
||||
"args": [
|
||||
"--remote-debugging-port=9222"
|
||||
],
|
||||
"cwd": "${workspaceFolder}",
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/out/**/*.js"
|
||||
]
|
||||
},
|
||||
{
|
||||
"type": "chrome",
|
||||
"request": "launch",
|
||||
"name": "Run Extension Unit Tests",
|
||||
"windows": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/test-extensions-unit.bat"
|
||||
},
|
||||
"osx": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/test-extensions-unit.sh"
|
||||
},
|
||||
"linux": {
|
||||
"runtimeExecutable": "${workspaceFolder}/scripts/test-extensions-unit.sh"
|
||||
},
|
||||
"webRoot": "${workspaceFolder}",
|
||||
"timeout": 45000
|
||||
},
|
||||
],
|
||||
"compounds": [
|
||||
{
|
||||
"name": "Debug Unit Tests",
|
||||
"configurations": [
|
||||
"Attach to azuredatastudio",
|
||||
"Run Unit Tests"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Debug Extension Unit Tests",
|
||||
"configurations": [
|
||||
"Attach to Extension Host",
|
||||
"Run Extension Unit Tests"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Debug azuredatastudio Main and Renderer",
|
||||
"configurations": [
|
||||
|
||||
24
.vscode/settings.json
vendored
24
.vscode/settings.json
vendored
@@ -11,7 +11,7 @@
|
||||
}
|
||||
},
|
||||
"files.associations": {
|
||||
"OSSREADME.json": "jsonc"
|
||||
"cglicenses.json": "jsonc"
|
||||
},
|
||||
"search.exclude": {
|
||||
"**/node_modules": true,
|
||||
@@ -22,9 +22,9 @@
|
||||
"out-vscode/**": true,
|
||||
"i18n/**": true,
|
||||
"extensions/**/out/**": true,
|
||||
"test/smoke/out/**": true
|
||||
"test/smoke/out/**": true,
|
||||
"src/vs/base/test/node/uri.test.data.txt": true
|
||||
},
|
||||
"tslint.enable": true,
|
||||
"lcov.path": [
|
||||
"./.build/coverage/lcov.info",
|
||||
"./.build/coverage-single/lcov.info"
|
||||
@@ -43,6 +43,20 @@
|
||||
"git.ignoreLimitWarning": true,
|
||||
"emmet.excludeLanguages": [],
|
||||
"typescript.preferences.importModuleSpecifier": "non-relative",
|
||||
"typescript.preferences.quoteStyle": "single"
|
||||
|
||||
"typescript.preferences.quoteStyle": "single",
|
||||
"json.schemas": [
|
||||
{
|
||||
"fileMatch": [
|
||||
"cgmanifest.json"
|
||||
],
|
||||
"url": "./.vscode/cgmanifest.schema.json"
|
||||
},
|
||||
{
|
||||
"fileMatch": [
|
||||
"cglicenses.json"
|
||||
],
|
||||
"url": "./.vscode/cglicenses.schema.json"
|
||||
}
|
||||
],
|
||||
"git.ignoreLimitWarning": true
|
||||
}
|
||||
40
.vscode/shared.code-snippets
vendored
Normal file
40
.vscode/shared.code-snippets
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
{
|
||||
// Each snippet is defined under a snippet name and has a scope, prefix, body and
|
||||
// description. The scope defines in watch languages the snippet is applicable. The prefix is what is
|
||||
// used to trigger the snippet and the body will be expanded and inserted.Possible variables are:
|
||||
// $1, $2 for tab stops, $0 for the final cursor position, and ${1:label}, ${2:another} for placeholders.
|
||||
// Placeholders with the same ids are connected.
|
||||
// Example:
|
||||
"MSFT Copyright Header": {
|
||||
"scope": "javascript,typescript,css",
|
||||
"prefix": [
|
||||
"header",
|
||||
"stub",
|
||||
"copyright"
|
||||
],
|
||||
"body": [
|
||||
"/*---------------------------------------------------------------------------------------------",
|
||||
" * Copyright (c) Microsoft Corporation. All rights reserved.",
|
||||
" * Licensed under the Source EULA. See License.txt in the project root for license information.",
|
||||
" *--------------------------------------------------------------------------------------------*/",
|
||||
"",
|
||||
"$0"
|
||||
],
|
||||
"description": "Insert Copyright Statement"
|
||||
},
|
||||
"TS -> Inject Service": {
|
||||
"scope": "typescript",
|
||||
"description": "Constructor Injection Pattern",
|
||||
"prefix": "@inject",
|
||||
"body": "@$1 private readonly _$2: ${1},$0"
|
||||
},
|
||||
"TS -> Event & Emitter": {
|
||||
"scope": "typescript",
|
||||
"prefix": "emitter",
|
||||
"description": "Add emitter and event properties",
|
||||
"body": [
|
||||
"private readonly _onDid$1 = new Emitter<$2>();",
|
||||
"readonly onDid$1: Event<$2> = this._onDid$1.event;"
|
||||
],
|
||||
}
|
||||
}
|
||||
14
.vscode/tasks.json
vendored
14
.vscode/tasks.json
vendored
@@ -28,6 +28,20 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
"script": "strict-initialization-watch",
|
||||
"label": "TS - Strict Initialization",
|
||||
"isBackground": true,
|
||||
"presentation": {
|
||||
"reveal": "never"
|
||||
},
|
||||
"problemMatcher": {
|
||||
"base": "$tsc-watch",
|
||||
"owner": "typescript-strict-initialization",
|
||||
"applyTo": "allDocuments"
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "gulp",
|
||||
"task": "tslint",
|
||||
|
||||
2
.yarnrc
2
.yarnrc
@@ -1,3 +1,3 @@
|
||||
disturl "https://atom.io/download/electron"
|
||||
target "2.0.9"
|
||||
target "3.1.8"
|
||||
runtime "electron"
|
||||
|
||||
132
CHANGELOG.md
132
CHANGELOG.md
@@ -1,5 +1,137 @@
|
||||
# Change Log
|
||||
|
||||
## Version 1.8.0
|
||||
* Release date: June 6, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
## What's new in this version
|
||||
* Initial release of the Database Admin Tool Extensions for Windows *Preview* extension
|
||||
* Initial release of the Central Management Servers extension
|
||||
* **Schema Compare**
|
||||
* Added Exclude/Include Options
|
||||
* Generate Script opens script after being generated
|
||||
* Removed double scroll bars
|
||||
* Formatting and layout improvements
|
||||
* Complete changes can be found [here](https://github.com/microsoft/azuredatastudio/issues?q=is%3Aissue+milestone%3A%22June+2019+Release%22+label%3A%22Area%3A+Schema+Compare%22+is%3Aclosed)
|
||||
* Messages panel moved into results panel - when users ran SQL queries, results and messages were in stacked panels. Now they are in separate tabs in a single panel similar to SSMS.
|
||||
* **Notebook**
|
||||
* Users can now choose to use their own Python 3 or Anaconda installs in notebooks
|
||||
* Multiple Stability + fit/finish fixes
|
||||
* View the full list of improvements and fixes [here](https://github.com/microsoft/azuredatastudio/issues?q=is%3Aissue+milestone%3A%22June+2019+Release%22+is%3Aclosed+label%3A%22Area%3A+Notebooks%22)
|
||||
* Visual Studio Code May Release Merge 1.34 - the latest improvements can be found [here](https://code.visualstudio.com/updates/v1_34)
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/32?closed=1).
|
||||
|
||||
## Version 1.7.0
|
||||
* Release date: May 8, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
## What's new in this version
|
||||
* Announcing Schema Compare *Preview* extension
|
||||
* Tasks Panel UX improvement
|
||||
* Announcing new Welcome page
|
||||
* Resolved [bugs and issues](https://github.com/microsoft/azuredatastudio/milestone/31?closed=1).
|
||||
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues.
|
||||
|
||||
## Version 1.6.0
|
||||
* Release date: April 18, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
## What's new in this version
|
||||
* Align with latest VS Code editor platform (currently 1.33.1)
|
||||
* Resolved [bugs and issues](https://github.com/Microsoft/azuredatastudio/milestone/26?closed=1).
|
||||
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* yamatoya for `fix the format (#4899)`
|
||||
|
||||
## Version 1.5.1
|
||||
* Release date: March 18, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
## What's new in this version
|
||||
* Announcing T-SQL Notebooks
|
||||
* Announcing PostgreSQL extension
|
||||
* Announcing SQL Server Dacpac extension
|
||||
* Resolved [bugs and issues](https://github.com/Microsoft/azuredatastudio/milestone/25?closed=1).
|
||||
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* GeoffYoung for `Fix sqlDropColumn description #4422`
|
||||
|
||||
## Version 1.4.5
|
||||
* Release date: February 13, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
## What's new in this version
|
||||
* Added **Admin pack for SQL Server** extension pack to make it easier to install SQL Server admin-related extensions. This includes:
|
||||
* [SQL Server Agent](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-agent-extension?view=sql-server-2017)
|
||||
* [SQL Server Profiler](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-profiler-extension?view=sql-server-2017)
|
||||
* [SQL Server Import](https://docs.microsoft.com/en-us/sql/azure-data-studio/sql-server-import-extension?view=sql-server-2017)
|
||||
* Added filtering extended event support in Profiler extension
|
||||
* Added Save as XML feature that can save T-SQL results as XML
|
||||
* Added Data-Tier Application Wizard improvements
|
||||
* Added Generate script button
|
||||
* Added view to give warnings of possible data loss during deployment
|
||||
* Updates to the [SQL Server 2019 Preview extension](https://docs.microsoft.com/sql/azure-data-studio/sql-server-2019-extension?view=sql-server-ver15)
|
||||
* Results streaming enabled by default for long running queries
|
||||
* Resolved [bugs and issues](https://github.com/Microsoft/azuredatastudio/milestone/23?closed=1).
|
||||
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* AlexFsmn for `Added context menu for DBs in explorer view to backup & restore db. #2277`
|
||||
* sadedil for `Missing feature request: Save as XML #3729`
|
||||
* gbritton1 for `Removed reference to object explorer #3463`
|
||||
|
||||
## Version 1.3.8
|
||||
* Release date: January 9, 2019
|
||||
* Release status: General Availability
|
||||
|
||||
## What's new in this version
|
||||
* #13 Feature Request: Azure Active Directory Authentication
|
||||
* #1040 Stream initial query results as they become available
|
||||
* #3298 Сan't add an azure account.
|
||||
* #2387 Support Per-User Installer
|
||||
* SQL Server Import updates for DACPAC\BACPAC
|
||||
* SQL Server Profiler UI and UX improvements
|
||||
* Updates to [SQL Server 2019 extension](https://docs.microsoft.com/sql/azure-data-studio/sql-server-2019-extension?view=sql-server-ver15)
|
||||
* **sp_executesql to SQL** and **New Database** extensions
|
||||
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* Tarig0 for `Add Routine_Type to CreateStoredProc fixes #3257 (#3286)`
|
||||
* oltruong for `typo fix #3025'`
|
||||
* Thomas-S-B for `Removed unnecessary IErrorDetectionStrategy #749`
|
||||
* Thomas-S-B for `Simplified code #750`
|
||||
|
||||
## Version 1.2.4
|
||||
* Release date: November 6, 2018
|
||||
* Release status: General Availability
|
||||
|
||||
## What's new in this version
|
||||
* Update to the SQL Server 2019 Preview extension
|
||||
* Introducing Paste the Plan extension
|
||||
* Introducing High Color queries extension, including SSMS editor theme
|
||||
* Fixes in SQL Server Agent, Profiler, and Import extensions
|
||||
* Fix .Net Core Socket KeepAlive issue causing dropped inactive connections on macOS
|
||||
* Upgrade SQL Tools Service to .Net Core 2.2 Preview 3 (for eventual AAD support)
|
||||
* Fix customer reported GitHub issues
|
||||
|
||||
## Contributions and "thank you"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* rdaniels6813 for `Add query plan theme support #3031`
|
||||
* Ruturaj123 for `Fixed some typos and grammatical errors #3027`
|
||||
* PromoFaux for `Use emoji shortcodes in CONTRIBUTING.md instead of <20> #3009`
|
||||
* ckaczor for `Fix: DATETIMEOFFSET data types should be ISO formatted #714`
|
||||
* hi-im-T0dd for `Fixed sync issue with my forked master so this commit is correct #2948`
|
||||
* hi-im-T0dd for `Fixed when right clicking and selecting Manage-correct name displays #2794`
|
||||
|
||||
## Version 1.1.3
|
||||
* Release date: October 18, 2018
|
||||
* Release status: General Availability
|
||||
|
||||
@@ -18,11 +18,15 @@ File a single issue per problem and feature request.
|
||||
* Do not enumerate multiple bugs or feature requests in the same issue.
|
||||
* Do not add your issue as a comment to an existing issue unless it's for the identical input. Many issues look similar, but have different causes.
|
||||
|
||||
The more information you can provide, the more likely someone will be successful reproducing the issue and finding a fix.
|
||||
The more information you can provide, the more likely someone will be successful at reproducing the issue and finding a fix.
|
||||
|
||||
The built-in tool for reporting an issue, which you can access by using `Report Issue` in Azure Data Studio's Help menu, can help streamline this process by automatically providing the version of Azure Data Studio, all your installed extensions, and your system info.
|
||||
|
||||
Please include the following with each issue.
|
||||
|
||||
* Version of Azure Data Studio (formerly SQL Operations Studio).
|
||||
* Version of Azure Data Studio (formerly SQL Operations Studio)
|
||||
|
||||
* Your operating system
|
||||
|
||||
> **Tip:** You can easily create an issue using `Report Issues` from Azure Data Studio Help menu.
|
||||
|
||||
|
||||
1751
OSSREADME.json
1751
OSSREADME.json
File diff suppressed because it is too large
Load Diff
58
README.md
58
README.md
@@ -1,27 +1,34 @@
|
||||
# Azure Data Studio
|
||||
|
||||
[](https://gitter.im/Microsoft/sqlopsstudio?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
|
||||
[](https://dev.azure.com/ms/azuredatastudio/_build/latest?definitionId=4)
|
||||
|
||||
Azure Data Studio is a data management tool that enables you to work with SQL Server, Azure SQL DB and SQL DW from Windows, macOS and Linux.
|
||||
|
||||
**Download the latest Azure Data Studio release**
|
||||
## **Download the latest Azure Data Studio release**
|
||||
|
||||
Platform | Link
|
||||
-- | --
|
||||
Windows Setup Installer | https://go.microsoft.com/fwlink/?linkid=2030731
|
||||
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2030736
|
||||
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2030738
|
||||
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2030741
|
||||
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2030746
|
||||
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2030750
|
||||
Windows User Installer | https://go.microsoft.com/fwlink/?linkid=2094100
|
||||
Windows System Installer | https://go.microsoft.com/fwlink/?linkid=2094200
|
||||
Windows ZIP | https://go.microsoft.com/fwlink/?linkid=2094201
|
||||
macOS ZIP | https://go.microsoft.com/fwlink/?linkid=2094202
|
||||
Linux TAR.GZ | https://go.microsoft.com/fwlink/?linkid=2094101
|
||||
Linux RPM | https://go.microsoft.com/fwlink/?linkid=2094102
|
||||
Linux DEB | https://go.microsoft.com/fwlink/?linkid=2094203
|
||||
|
||||
Go to our [download page](https://aka.ms/azuredatastudio) for more specific instructions.
|
||||
|
||||
Try out the latest insiders build from `master` at https://github.com/Microsoft/azuredatastudio/releases.
|
||||
## Try out the latest insiders build from `master`:
|
||||
- [Windows User Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-user/insider)
|
||||
- [Windows System Installer - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64/insider)
|
||||
- [Windows ZIP - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/win32-x64-archive/insider)
|
||||
- [macOS ZIP - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/darwin/insider)
|
||||
- [Linux TAR.GZ - **Insiders build**](https://azuredatastudio-update.azurewebsites.net/latest/linux-x64/insider)
|
||||
|
||||
See the [change log](https://github.com/Microsoft/azuredatastudio/blob/master/CHANGELOG.md) for additional details of what's in this release.
|
||||
|
||||
**Feature Highlights**
|
||||
## **Feature Highlights**
|
||||
|
||||
- Cross-Platform DB management for Windows, macOS and Linux with simple XCopy deployment
|
||||
- SQL Server Connection Management with Connection Dialog, Server Groups, Azure Integration and Registered Servers
|
||||
@@ -34,9 +41,9 @@ See the [change log](https://github.com/Microsoft/azuredatastudio/blob/master/CH
|
||||
- Task History window to view current task execution status, completion results with error messages and task T-SQL scripting
|
||||
- Scripting support to generate CREATE, SELECT, ALTER and DROP statements for database objects
|
||||
- Workspaces with full Git integration and Find In Files support to managing T-SQL script libraries
|
||||
- Modern light-weight shell with theming, user settings, full screen support, integrated terminal and numerous other features
|
||||
- Modern light-weight shell with theming, user settings, full-screen support, integrated terminal and numerous other features
|
||||
|
||||
Here's some of these features in action.
|
||||
Here are some of these features in action.
|
||||
|
||||
<img src='https://github.com/Microsoft/azuredatastudio/blob/master/docs/overview_screen.jpg' width='800px'>
|
||||
|
||||
@@ -61,6 +68,22 @@ The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.micro
|
||||
## Contributions and "Thank You"
|
||||
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
|
||||
|
||||
* Stevoni for `Corrected Keyboard Shortcut Execution Issue #5480`
|
||||
* yamatoya for `fix the format #4899`
|
||||
* GeoffYoung for `Fix sqlDropColumn description #4422`
|
||||
* AlexFsmn for `Added context menu for DBs in explorer view to backup & restore db. #2277`
|
||||
* sadedil for `Missing feature request: Save as XML #3729`
|
||||
* gbritton1 for `Removed reference to object explorer #3463`
|
||||
* Tarig0 for `Add Routine_Type to CreateStoredProc fixes #3257 (#3286)`
|
||||
* oltruong for `typo fix #3025'`
|
||||
* Thomas-S-B for `Removed unnecessary IErrorDetectionStrategy #749`
|
||||
* Thomas-S-B for `Simplified code #750`
|
||||
* rdaniels6813 for `Add query plan theme support #3031`
|
||||
* Ruturaj123 for `Fixed some typos and grammatical errors #3027`
|
||||
* PromoFaux for `Use emoji shortcodes in CONTRIBUTING.md instead of <20> #3009`
|
||||
* ckaczor for `Fix: DATETIMEOFFSET data types should be ISO formatted #714`
|
||||
* hi-im-T0dd for `Fixed sync issue with my forked master so this commit is correct #2948`
|
||||
* hi-im-T0dd for `Fixed when right clicking and selecting Manage-correct name displays #2794`
|
||||
* philoushka for `center the icon #2760`
|
||||
* anthonypants for `Typo #2775`
|
||||
* kstolte for `Fix Invalid Configuration in Launch.json #2789`
|
||||
@@ -69,25 +92,25 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* AlexFsmn `Disabled connection name input when connecting to a server. #2566`
|
||||
* SebastianPfliegel `Added more saveAsCsv options #2099`
|
||||
* ianychoi `Fixes a typo: Mimunum -> Minimum #1994`
|
||||
* AlexFsmn `Fixed bug where proper file extension wasn't appended to filename. #2151`
|
||||
* AlexFsmn `Fixed bug where proper file extension wasn't appended to the filename. #2151`
|
||||
* AlexFsmn `Added functionality for adding any file to import wizard #2329`
|
||||
* AlexFsmn `Fixed background issue when copying a chart to clipboard #2215`
|
||||
* AlexFsmn `Fixed problem where vertical charts didn't display labels correctly. #2263`
|
||||
* AlexFsmn `Fixed Initial values for charts to match visuals #2266`
|
||||
* AlexFsmn `Renamed chart option labels #2264`
|
||||
* AlexFsmn `Added feature for opening file after exporting to CSV/XLS/JSON & query files #2216`
|
||||
* AlexFsmn `Added feature for the opening file after exporting to CSV/XLS/JSON & query files #2216`
|
||||
* AlexFsmm `Get Connection String should copy to clipboard #2175`
|
||||
* lanceklinger `Fix for double clicking column handle in results table #1504`
|
||||
* lanceklinger `Fix for double-clicking column handle in results table #1504`
|
||||
* westerncj for `Removed duplicate contribution from README.md (#753)`
|
||||
* ntovas for `Fix for duplicate extensions shown in "Save File" dialog. (#779)`
|
||||
* SebastianPfliegel for `Add cursor snippet (#475)`
|
||||
* mikaoelitiana for fix: `revert README and CONTRIBUTING after last VSCode merge (#574)`
|
||||
* mikaoelitiana for the fix: `revert README and CONTRIBUTING after last VSCode merge (#574)`
|
||||
* alextercete for `Reinstate menu item to install from VSIX (#682)`
|
||||
* alextercete for `Fix "No extension gallery service configured" error (#427)`
|
||||
* mwiedemeyer for `Fix #58: Default sort order for DB size widget (#111)`
|
||||
* AlexTroshkin for `Show disconnect in context menu only when connectionProfile connected (#150)`
|
||||
* AlexTroshkin for `Fix #138: Invalid syntax color highlighting (identity not highlighting) (#140))`
|
||||
* stebet for `Fix #153: Fixing sql snippets that failed on a DB with case-sensitive collation. (#152)`
|
||||
* stebet for `Fix #153: Fixing sql snippets that failed on a DB with a case-sensitive collation. (#152)`
|
||||
* SebastianPfliegel `Remove sqlExtensionHelp (#312)`
|
||||
* olljanat for `Implemented npm version check (#314)`
|
||||
* Adam Machanic for helping with the `whoisactive` extension
|
||||
@@ -103,8 +126,7 @@ We would like to thank all our users who raised issues, and in particular the fo
|
||||
* Russian: Andrey Veselov, Anton Fontanov, Anton Savin, Elena Ostrovskaia, Igor Babichev, Maxim Zelensky, Rodion Fedechkin, Tasha T, Vladimir Zyryanov
|
||||
* Portuguese Brazil: Daniel de Sousa, Diogo Duarte, Douglas Correa, Douglas Eccker, José Emanuel Mendes, Marcelo Fernandes, Marcondes Alexandre, Roberto Fonseca, Rodrigo Crespi
|
||||
|
||||
|
||||
And of course we'd like to thank the authors of all upstream dependencies. Please see a full list in the [ThirdPartyNotices.txt](https://raw.githubusercontent.com/Microsoft/azuredatastudio/master/ThirdPartyNotices.txt)
|
||||
And of course, we'd like to thank the authors of all upstream dependencies. Please see a full list in the [ThirdPartyNotices.txt](https://raw.githubusercontent.com/Microsoft/azuredatastudio/master/ThirdPartyNotices.txt)
|
||||
|
||||
## License
|
||||
|
||||
|
||||
@@ -17,10 +17,12 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
||||
chokidar: https://github.com/paulmillr/chokidar
|
||||
comment-json: https://github.com/kaelzhang/node-comment-json
|
||||
core-js: https://github.com/zloirock/core-js
|
||||
decompress: https://github.com/kevva/decompress
|
||||
emmet: https://github.com/emmetio/emmet
|
||||
error-ex: https://github.com/Qix-/node-error-ex
|
||||
escape-string-regexp: https://github.com/sindresorhus/escape-string-regexp
|
||||
fast-plist: https://github.com/Microsoft/node-fast-plist
|
||||
figures: https://github.com/sindresorhus/figures
|
||||
find-remove: https://www.npmjs.com/package/find-remove
|
||||
fs-extra: https://github.com/jprichardson/node-fs-extra
|
||||
gc-signals: https://github.com/Microsoft/node-gc-signals
|
||||
@@ -34,28 +36,34 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
||||
jquery-ui: https://github.com/jquery/jquery-ui
|
||||
jquery.event.drag: https://github.com/devongovett/jquery.event.drag
|
||||
jschardet: https://github.com/aadsm/jschardet
|
||||
JupyterLab: https://github.com/jupyterlab/jupyterlab
|
||||
make-error: https://github.com/JsCommunity/make-error
|
||||
minimist: https://github.com/substack/minimist
|
||||
moment: https://github.com/moment/moment
|
||||
native-keymap: https://github.com/Microsoft/node-native-keymap
|
||||
native-watchdog: https://github.com/Microsoft/node-native-watchdog
|
||||
ng2-charts: https://github.com/valor-software/ng2-charts
|
||||
node-fetch: https://github.com/bitinn/node-fetch
|
||||
node-pty: https://github.com/Tyriar/node-pty
|
||||
nsfw: https://github.com/Axosoft/nsfw
|
||||
pretty-data: https://github.com/vkiryukhin/pretty-data
|
||||
primeng: https://github.com/primefaces/primeng
|
||||
process-nextick-args: https://github.com/calvinmetcalf/process-nextick-args
|
||||
pty.js: https://github.com/chjj/pty.js
|
||||
reflect-metadata: https://github.com/rbuckton/reflect-metadata
|
||||
request: https://github.com/request/request
|
||||
rxjs: https://github.com/ReactiveX/RxJS
|
||||
semver: https://github.com/npm/node-semver
|
||||
slickgrid: https://github.com/6pac/SlickGrid
|
||||
sqltoolsservice: https://github.com/Microsoft/sqltoolsservice
|
||||
svg.js: https://github.com/svgdotjs/svg.js
|
||||
systemjs: https://github.com/systemjs/systemjs
|
||||
temp-write: https://github.com/sindresorhus/temp-write
|
||||
underscore: https://github.com/jashkenas/underscore
|
||||
v8-profiler: https://github.com/node-inspector/v8-profiler
|
||||
vscode: https://github.com/microsoft/vscode
|
||||
vscode-debugprotocol: https://github.com/Microsoft/vscode-debugadapter-node
|
||||
vscode-languageclient: https://github.com/Microsoft/vscode-languageserver-node
|
||||
vscode-nls: https://github.com/Microsoft/vscode-nls
|
||||
vscode-ripgrep: https://github.com/roblourens/vscode-ripgrep
|
||||
vscode-textmate: https://github.com/Microsoft/vscode-textmate
|
||||
winreg: https://github.com/fresc81/node-winreg
|
||||
@@ -63,10 +71,9 @@ expressly granted herein, whether by implication, estoppel or otherwise.
|
||||
yauzl: https://github.com/thejoshwolfe/yauzl
|
||||
zone.js: https://www.npmjs.com/package/zone
|
||||
|
||||
Microsoft PROSE SDK: https://microsoft.github.io/prose
|
||||
|
||||
%% angular NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
The MIT License
|
||||
|
||||
Copyright (c) 2014-2017 Google, Inc. http://angular.io
|
||||
|
||||
@@ -292,6 +299,20 @@ THE SOFTWARE.
|
||||
=========================================
|
||||
END OF core-js NOTICES AND INFORMATION
|
||||
|
||||
%% decompress NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
MIT License
|
||||
|
||||
Copyright (c) Kevin Mårtensson <kevinmartensson@gmail.com> (github.com/kevva)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
=========================================
|
||||
END OF decompress NOTICES AND INFORMATION
|
||||
|
||||
%% emmet NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
The MIT License (MIT)
|
||||
@@ -321,32 +342,6 @@ END OF emmet NOTICES AND INFORMATION
|
||||
=========================================
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2015 JD Ballard
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
=========================================
|
||||
END OF error-ex NOTICES AND INFORMATION
|
||||
|
||||
%% escape-string-regexp NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
@@ -393,6 +388,20 @@ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEAL
|
||||
=========================================
|
||||
END OF fast-plist NOTICES AND INFORMATION
|
||||
|
||||
%% figures NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
MIT License
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
=========================================
|
||||
END OF figures NOTICES AND INFORMATION
|
||||
|
||||
%% fs-extra NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
(The MIT License)
|
||||
@@ -1166,6 +1175,43 @@ That's all there is to it!
|
||||
=========================================
|
||||
END OF jschardet NOTICES AND INFORMATION
|
||||
|
||||
%% JupyterLab NOTICES AND INFORMATION BEGIN HERE
|
||||
Copyright (c) 2015 Project Jupyter Contributors
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
Semver File License
|
||||
===================
|
||||
|
||||
The semver.py file is from https://github.com/podhmo/python-semver
|
||||
which is licensed under the "MIT" license. See the semver.py file for details.
|
||||
|
||||
END OF JupyterLab NOTICES AND INFORMATION
|
||||
|
||||
%% make-error NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
ISC © Julien Fontanet
|
||||
@@ -1297,6 +1343,32 @@ SOFTWARE.
|
||||
=========================================
|
||||
END OF ng2-charts NOTICES AND INFORMATION
|
||||
|
||||
%% node-fetch NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2016 David Frank
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
=========================================
|
||||
END OF node-fetch NOTICES AND INFORMATION
|
||||
|
||||
%% node-pty NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
Copyright (c) 2012-2015, Christopher Jeffrey (https://github.com/chjj/)
|
||||
@@ -1347,16 +1419,6 @@ SOFTWARE.
|
||||
=========================================
|
||||
END OF nsfw NOTICES AND INFORMATION
|
||||
|
||||
%% pretty-data NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
License: Dual licensed under the MIT and GPL licenses:
|
||||
|
||||
http://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
http://www.gnu.org/licenses/gpl.html
|
||||
=========================================
|
||||
END OF pretty-data NOTICES AND INFORMATION
|
||||
|
||||
%% primeng NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
The MIT License (MIT)
|
||||
@@ -1371,6 +1433,30 @@ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLI
|
||||
=========================================
|
||||
END OF primeng NOTICES AND INFORMATION
|
||||
|
||||
%% process-nextick-args NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
# Copyright (c) 2015 Calvin Metcalf
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.**
|
||||
=========================================
|
||||
END OF process-nextick-args NOTICES AND INFORMATION
|
||||
|
||||
%% pty.js NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
Copyright (c) 2012-2015, Christopher Jeffrey (https://github.com/chjj/)
|
||||
@@ -1455,6 +1541,66 @@ END OF TERMS AND CONDITIONS
|
||||
=========================================
|
||||
END OF reflect-metadata NOTICES AND INFORMATION
|
||||
|
||||
%% request NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
Apache License
|
||||
|
||||
Version 2.0, January 2004
|
||||
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
|
||||
|
||||
You must give any other recipients of the Work or Derivative Works a copy of this License; and
|
||||
|
||||
You must cause any modified files to carry prominent notices stating that You changed the files; and
|
||||
|
||||
You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
|
||||
|
||||
If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
=========================================
|
||||
END OF request NOTICES AND INFORMATION
|
||||
|
||||
%% rxjs NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
Apache License
|
||||
@@ -1780,6 +1926,20 @@ ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEAL
|
||||
=========================================
|
||||
END OF systemjs NOTICES AND INFORMATION
|
||||
|
||||
%% temp-write NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
MIT License
|
||||
|
||||
Copyright (c) Sindre Sorhus <sindresorhus@gmail.com> (sindresorhus.com)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
=========================================
|
||||
END OF temp-write NOTICES AND INFORMATION
|
||||
|
||||
%% underscore NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
Copyright (c) 2009-2017 Jeremy Ashkenas, DocumentCloud and Investigative
|
||||
@@ -1882,6 +2042,50 @@ OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWA
|
||||
=========================================
|
||||
END OF vscode-debugprotocol NOTICES AND INFORMATION
|
||||
|
||||
%% vscode-languageclient NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
Copyright (c) Microsoft Corporation
|
||||
|
||||
All rights reserved.
|
||||
|
||||
MIT License
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy,
|
||||
modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
|
||||
OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
=========================================
|
||||
END OF vscode-languageclient NOTICES AND INFORMATION
|
||||
|
||||
%% vscode-nls NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Microsoft Corporation
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation
|
||||
files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy,
|
||||
modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
|
||||
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
|
||||
BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT
|
||||
OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
=========================================
|
||||
END OF vscode-nls NOTICES AND INFORMATION
|
||||
|
||||
%% vscode-ripgrep NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
vscode-ripgrep
|
||||
@@ -2041,3 +2245,187 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
=========================================
|
||||
END OF zone.js NOTICES AND INFORMATION
|
||||
|
||||
%% Microsoft.ProgramSynthesis.Common NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
NOTICES AND INFORMATION
|
||||
Do Not Translate or Localize
|
||||
|
||||
This software incorporates material from third parties. Microsoft makes certain
|
||||
open source code available at http://3rdpartysource.microsoft.com, or you may
|
||||
send a check or money order for US $5.00, including the product name, the open
|
||||
source component name, and version number, to:
|
||||
|
||||
Source Code Compliance Team
|
||||
Microsoft Corporation
|
||||
One Microsoft Way
|
||||
Redmond, WA 98052
|
||||
USA
|
||||
|
||||
Notwithstanding any other terms, you may reverse engineer this software to the
|
||||
extent required to debug changes to any libraries licensed under the GNU Lesser
|
||||
General Public License.
|
||||
|
||||
-------------------------------START OF THIRD-PARTY NOTICES-------------------------------------------
|
||||
|
||||
===================================CoreFx (BEGIN)
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) .NET Foundation and Contributors
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
===================================CoreFx (END)
|
||||
|
||||
===================================CoreFxLab (BEGIN)
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) Microsoft Corporation
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
===================================CoreFxLab (END)
|
||||
|
||||
===================================Reactive Extensions (BEGIN)
|
||||
Copyright (c) .NET Foundation and Contributors
|
||||
All Rights Reserved
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License"); you
|
||||
may not use this file except in compliance with the License. You may
|
||||
obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied. See the License for the specific language governing permissions
|
||||
and limitations under the License.
|
||||
|
||||
List of contributors to the Rx libraries
|
||||
|
||||
Rx and Ix.NET:
|
||||
Wes Dyer
|
||||
Jeffrey van Gogh
|
||||
Matthew Podwysocki
|
||||
Bart De Smet
|
||||
Danny van Velzen
|
||||
Erik Meijer
|
||||
Brian Beckman
|
||||
Aaron Lahman
|
||||
Georgi Chkodrov
|
||||
Arthur Watson
|
||||
Gert Drapers
|
||||
Mark Shields
|
||||
Eric Rozell
|
||||
|
||||
Rx.js and Ix.js:
|
||||
Matthew Podwysocki
|
||||
Jeffrey van Gogh
|
||||
Bart De Smet
|
||||
Brian Beckman
|
||||
Wes Dyer
|
||||
Erik Meijer
|
||||
|
||||
Tx:
|
||||
Georgi Chkodrov
|
||||
Bart De Smet
|
||||
Aaron Lahman
|
||||
Erik Meijer
|
||||
Brian Grunkemeyer
|
||||
Beysim Sezgin
|
||||
Tiho Tarnavski
|
||||
Collin Meek
|
||||
Sajay Anthony
|
||||
Karen Albrecht
|
||||
John Allen
|
||||
Zach Kramer
|
||||
|
||||
Rx++ and Ix++:
|
||||
Aaron Lahman
|
||||
===================================Reactive Extensions (END)
|
||||
|
||||
-------------------------------END OF THIRD-PARTY NOTICES-------------------------------------------
|
||||
=========================================
|
||||
END OF Microsoft.ProgramSynthesis.Common NOTICES AND INFORMATION
|
||||
|
||||
%% Microsoft.ProgramSynthesis.Detection NOTICES AND INFORMATION BEGIN HERE
|
||||
=========================================
|
||||
NOTICES AND INFORMATION
|
||||
Do Not Translate or Localize
|
||||
|
||||
This software incorporates material from third parties. Microsoft makes certain
|
||||
open source code available at http://3rdpartysource.microsoft.com, or you may
|
||||
send a check or money order for US $5.00, including the product name, the open
|
||||
source component name, and version number, to:
|
||||
|
||||
Source Code Compliance Team
|
||||
Microsoft Corporation
|
||||
One Microsoft Way
|
||||
Redmond, WA 98052
|
||||
USA
|
||||
|
||||
Notwithstanding any other terms, you may reverse engineer this software to the
|
||||
extent required to debug changes to any libraries licensed under the GNU Lesser
|
||||
General Public License.
|
||||
|
||||
-------------------------------START OF THIRD-PARTY NOTICES-------------------------------------------
|
||||
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 ExcelDataReader
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
===================================ExcelDataReader (END)
|
||||
|
||||
-------------------------------END OF THIRD-PARTY NOTICES-------------------------------------------
|
||||
=========================================
|
||||
END OF Microsoft.ProgramSynthesis.Detection NOTICES AND INFORMATION
|
||||
|
||||
19
appveyor.yml
19
appveyor.yml
@@ -1,19 +0,0 @@
|
||||
environment:
|
||||
ELECTRON_RUN_AS_NODE: 1
|
||||
VSCODE_BUILD_VERBOSE: true
|
||||
|
||||
cache:
|
||||
- '%LOCALAPPDATA%\Yarn\cache'
|
||||
|
||||
install:
|
||||
- ps: Install-Product node 8.9.1 x64
|
||||
|
||||
build_script:
|
||||
- yarn
|
||||
- .\node_modules\.bin\gulp electron
|
||||
- npm run compile
|
||||
|
||||
test_script:
|
||||
- node --version
|
||||
- .\scripts\test.bat
|
||||
- .\scripts\test-integration.bat
|
||||
66
azure-pipelines-linux-mac.yml
Normal file
66
azure-pipelines-linux-mac.yml
Normal file
@@ -0,0 +1,66 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: '10.15.1'
|
||||
displayName: 'Install Node.js'
|
||||
|
||||
- script: |
|
||||
npm i -g yarn
|
||||
displayName: 'preinstall'
|
||||
|
||||
- script: |
|
||||
export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:10
|
||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||
sudo chmod +x /etc/init.d/xvfb
|
||||
sudo update-rc.d xvfb defaults
|
||||
sudo service xvfb start
|
||||
# sh -e /etc/init.d/xvfb start
|
||||
# sleep 3
|
||||
displayName: 'Linux preinstall'
|
||||
condition: eq(variables['Agent.OS'], 'Linux')
|
||||
|
||||
- script: |
|
||||
yarn
|
||||
displayName: 'Install'
|
||||
|
||||
- script: |
|
||||
yarn gulp electron-x64
|
||||
displayName: Download Electron
|
||||
|
||||
- script: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
|
||||
- script: |
|
||||
yarn tslint
|
||||
displayName: 'Run TSLint'
|
||||
|
||||
- script: |
|
||||
yarn strict-null-check
|
||||
displayName: 'Run Strict Null Check'
|
||||
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: 'Compile'
|
||||
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter
|
||||
displayName: 'Tests'
|
||||
condition: and(succeeded(), eq(variables['Agent.OS'], 'Linux'))
|
||||
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test.sh --reporter mocha-junit-reporter --coverage
|
||||
displayName: 'Tests'
|
||||
condition: and(succeeded(), ne(variables['Agent.OS'], 'Linux'))
|
||||
|
||||
- task: PublishTestResults@2
|
||||
inputs:
|
||||
testResultsFiles: '**/test-results.xml'
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
inputs:
|
||||
codeCoverageTool: 'cobertura'
|
||||
summaryFileLocation: $(System.DefaultWorkingDirectory)/.build/coverage/cobertura-coverage.xml
|
||||
reportDirectory: $(System.DefaultWorkingDirectory)/.build/coverage/lcov-reports
|
||||
condition: ne(variables['Agent.OS'], 'Linux')
|
||||
44
azure-pipelines-windows.yml
Normal file
44
azure-pipelines-windows.yml
Normal file
@@ -0,0 +1,44 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: '10.15.1'
|
||||
displayName: 'Install Node.js'
|
||||
|
||||
- script: |
|
||||
yarn
|
||||
displayName: 'Yarn Install'
|
||||
|
||||
- script: |
|
||||
yarn gulp electron-x64
|
||||
displayName: 'Electron'
|
||||
|
||||
- script: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
|
||||
- script: |
|
||||
yarn tslint
|
||||
displayName: 'Run TSLint'
|
||||
|
||||
- script: |
|
||||
yarn strict-null-check
|
||||
displayName: 'Run Strict Null Check'
|
||||
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: 'Compile'
|
||||
|
||||
- script: |
|
||||
.\scripts\test.bat --reporter mocha-junit-reporter --coverage
|
||||
displayName: 'Test'
|
||||
|
||||
- task: PublishTestResults@2
|
||||
inputs:
|
||||
testResultsFiles: 'test-results.xml'
|
||||
condition: succeededOrFailed()
|
||||
|
||||
- task: PublishCodeCoverageResults@1
|
||||
inputs:
|
||||
codeCoverageTool: 'cobertura'
|
||||
summaryFileLocation: $(System.DefaultWorkingDirectory)\.build\coverage\cobertura-coverage.xml
|
||||
reportDirectory: $(System.DefaultWorkingDirectory)\.build\coverage\lcov-report
|
||||
29
azure-pipelines.yml
Normal file
29
azure-pipelines.yml
Normal file
@@ -0,0 +1,29 @@
|
||||
trigger:
|
||||
- master
|
||||
- releases/*
|
||||
|
||||
jobs:
|
||||
|
||||
# All tasks on Windows
|
||||
- job: build_all_windows
|
||||
displayName: Build all tasks (Windows)
|
||||
pool:
|
||||
vmImage: vs2017-win2016
|
||||
steps:
|
||||
- template: azure-pipelines-windows.yml
|
||||
|
||||
# All tasks on Linux
|
||||
- job: build_all_linux
|
||||
displayName: Build all tasks (Linux)
|
||||
pool:
|
||||
vmImage: 'Ubuntu 16.04'
|
||||
steps:
|
||||
- template: azure-pipelines-linux-mac.yml
|
||||
|
||||
# All tasks on macOS
|
||||
- job: build_all_darwin
|
||||
displayName: Build all tasks (macOS)
|
||||
pool:
|
||||
vmImage: macos-10.13
|
||||
steps:
|
||||
- template: azure-pipelines-linux-mac.yml
|
||||
126
build/.nativeignore
Normal file
126
build/.nativeignore
Normal file
@@ -0,0 +1,126 @@
|
||||
# cleanup rules for native node modules, .gitignore style
|
||||
|
||||
fsevents/binding.gyp
|
||||
fsevents/fsevents.cc
|
||||
fsevents/build/**
|
||||
fsevents/src/**
|
||||
fsevents/test/**
|
||||
!fsevents/**/*.node
|
||||
|
||||
vscode-sqlite3/binding.gyp
|
||||
vscode-sqlite3/benchmark/**
|
||||
vscode-sqlite3/cloudformation/**
|
||||
vscode-sqlite3/deps/**
|
||||
vscode-sqlite3/test/**
|
||||
vscode-sqlite3/build/**
|
||||
vscode-sqlite3/src/**
|
||||
!vscode-sqlite3/build/Release/*.node
|
||||
|
||||
oniguruma/binding.gyp
|
||||
oniguruma/build/**
|
||||
oniguruma/src/**
|
||||
oniguruma/deps/**
|
||||
!oniguruma/build/Release/*.node
|
||||
!oniguruma/src/*.js
|
||||
|
||||
windows-mutex/binding.gyp
|
||||
windows-mutex/build/**
|
||||
windows-mutex/src/**
|
||||
!windows-mutex/**/*.node
|
||||
|
||||
native-keymap/binding.gyp
|
||||
native-keymap/build/**
|
||||
native-keymap/src/**
|
||||
native-keymap/deps/**
|
||||
!native-keymap/build/Release/*.node
|
||||
|
||||
native-is-elevated/binding.gyp
|
||||
native-is-elevated/build/**
|
||||
native-is-elevated/src/**
|
||||
native-is-elevated/deps/**
|
||||
!native-is-elevated/build/Release/*.node
|
||||
|
||||
native-watchdog/binding.gyp
|
||||
native-watchdog/build/**
|
||||
native-watchdog/src/**
|
||||
!native-watchdog/build/Release/*.node
|
||||
|
||||
spdlog/binding.gyp
|
||||
spdlog/build/**
|
||||
spdlog/deps/**
|
||||
spdlog/src/**
|
||||
spdlog/test/**
|
||||
!spdlog/build/Release/*.node
|
||||
|
||||
jschardet/dist/**
|
||||
|
||||
windows-foreground-love/binding.gyp
|
||||
windows-foreground-love/build/**
|
||||
windows-foreground-love/src/**
|
||||
!windows-foreground-love/**/*.node
|
||||
|
||||
windows-process-tree/binding.gyp
|
||||
windows-process-tree/build/**
|
||||
windows-process-tree/src/**
|
||||
!windows-process-tree/**/*.node
|
||||
|
||||
gc-signals/binding.gyp
|
||||
gc-signals/build/**
|
||||
gc-signals/src/**
|
||||
gc-signals/deps/**
|
||||
|
||||
!gc-signals/build/Release/*.node
|
||||
!gc-signals/src/index.js
|
||||
|
||||
keytar/binding.gyp
|
||||
keytar/build/**
|
||||
keytar/src/**
|
||||
keytar/script/**
|
||||
keytar/node_modules/**
|
||||
!keytar/**/*.node
|
||||
|
||||
node-pty/binding.gyp
|
||||
node-pty/build/**
|
||||
node-pty/src/**
|
||||
node-pty/tools/**
|
||||
!node-pty/build/Release/*.exe
|
||||
!node-pty/build/Release/*.dll
|
||||
!node-pty/build/Release/*.node
|
||||
|
||||
chart.js/node_modules/**
|
||||
|
||||
emmet/node_modules/**
|
||||
|
||||
pty.js/build/**
|
||||
!pty.js/build/Release/**
|
||||
|
||||
jquery-ui/external/**
|
||||
jquery-ui/demos/**
|
||||
|
||||
core-js/**/**
|
||||
|
||||
slickgrid/node_modules/**
|
||||
slickgrid/examples/**
|
||||
|
||||
vscode-nsfw/binding.gyp
|
||||
vscode-nsfw/build/**
|
||||
vscode-nsfw/src/**
|
||||
vscode-nsfw/openpa/**
|
||||
vscode-nsfw/includes/**
|
||||
!vscode-nsfw/build/Release/*.node
|
||||
!vscode-nsfw/**/*.a
|
||||
|
||||
vsda/binding.gyp
|
||||
vsda/README.md
|
||||
vsda/build/**
|
||||
vsda/*.bat
|
||||
vsda/*.sh
|
||||
vsda/*.cpp
|
||||
vsda/*.h
|
||||
!vsda/build/Release/vsda.node
|
||||
|
||||
vscode-windows-ca-certs/**/*
|
||||
!vscode-windows-ca-certs/package.json
|
||||
!vscode-windows-ca-certs/**/*.node
|
||||
|
||||
node-addon-api/**/*
|
||||
20
build/azure-pipelines/common/installDistro.ts
Normal file
20
build/azure-pipelines/common/installDistro.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as cp from 'child_process';
|
||||
import * as path from 'path';
|
||||
|
||||
function yarnInstall(packageName: string): void {
|
||||
cp.execSync(`yarn add --no-lockfile ${packageName}`);
|
||||
cp.execSync(`yarn add --no-lockfile ${packageName}`, { cwd: path.join( process.cwd(), 'remote') });
|
||||
}
|
||||
|
||||
const product = require('../../../product.json');
|
||||
const dependencies = product.dependencies || {} as { [name: string]: string; };
|
||||
|
||||
Object.keys(dependencies).forEach(name => {
|
||||
const url = dependencies[name];
|
||||
yarnInstall(url);
|
||||
});
|
||||
@@ -6,7 +6,6 @@
|
||||
'use strict';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import { execSync } from 'child_process';
|
||||
import { Readable } from 'stream';
|
||||
import * as crypto from 'crypto';
|
||||
import * as azure from 'azure-storage';
|
||||
@@ -44,7 +43,7 @@ function createDefaultConfig(quality: string): Config {
|
||||
}
|
||||
|
||||
function getConfig(quality: string): Promise<Config> {
|
||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/config';
|
||||
const query = {
|
||||
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
|
||||
@@ -66,7 +65,7 @@ interface Asset {
|
||||
platform: string;
|
||||
type: string;
|
||||
url: string;
|
||||
mooncakeUrl: string;
|
||||
mooncakeUrl?: string;
|
||||
hash: string;
|
||||
sha256hash: string;
|
||||
size: number;
|
||||
@@ -74,7 +73,7 @@ interface Asset {
|
||||
}
|
||||
|
||||
function createOrUpdate(commit: string, quality: string, platform: string, type: string, release: NewDocument, asset: Asset, isUpdate: boolean): Promise<void> {
|
||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/' + quality;
|
||||
const updateQuery = {
|
||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
||||
@@ -128,7 +127,7 @@ async function assertContainer(blobService: azure.BlobService, quality: string):
|
||||
await new Promise((c, e) => blobService.createContainerIfNotExists(quality, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
|
||||
}
|
||||
|
||||
async function doesAssetExist(blobService: azure.BlobService, quality: string, blobName: string): Promise<boolean> {
|
||||
async function doesAssetExist(blobService: azure.BlobService, quality: string, blobName: string): Promise<boolean | undefined> {
|
||||
const existsResult = await new Promise<azure.BlobService.BlobResult>((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
|
||||
return existsResult.exists;
|
||||
}
|
||||
@@ -151,11 +150,15 @@ interface PublishOptions {
|
||||
async function publish(commit: string, quality: string, platform: string, type: string, name: string, version: string, _isUpdate: string, file: string, opts: PublishOptions): Promise<void> {
|
||||
const isUpdate = _isUpdate === 'true';
|
||||
|
||||
const queuedBy = process.env['BUILD_QUEUEDBY'];
|
||||
const sourceBranch = process.env['BUILD_SOURCEBRANCH'];
|
||||
const isReleased = quality === 'insider'
|
||||
&& /^master$|^refs\/heads\/master$/.test(sourceBranch)
|
||||
&& /Project Collection Service Accounts|Microsoft.VisualStudio.Services.TFS/.test(queuedBy);
|
||||
const queuedBy = process.env['BUILD_QUEUEDBY']!;
|
||||
const sourceBranch = process.env['BUILD_SOURCEBRANCH']!;
|
||||
const isReleased = (
|
||||
// Insiders: nightly build from master
|
||||
(quality === 'insider' && /^master$|^refs\/heads\/master$/.test(sourceBranch) && /Project Collection Service Accounts|Microsoft.VisualStudio.Services.TFS/.test(queuedBy)) ||
|
||||
|
||||
// Exploration: any build from electron-4.0.x branch
|
||||
(quality === 'exploration' && /^electron-4.0.x$|^refs\/heads\/electron-4.0.x$/.test(sourceBranch))
|
||||
);
|
||||
|
||||
console.log('Publishing...');
|
||||
console.log('Quality:', quality);
|
||||
@@ -180,62 +183,23 @@ async function publish(commit: string, quality: string, platform: string, type:
|
||||
console.log('SHA256:', sha256hash);
|
||||
|
||||
const blobName = commit + '/' + name;
|
||||
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2'];
|
||||
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2']!;
|
||||
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2']!)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
await assertContainer(blobService, quality);
|
||||
|
||||
const blobExists = await doesAssetExist(blobService, quality, blobName);
|
||||
|
||||
const promises = [];
|
||||
|
||||
if (!blobExists) {
|
||||
promises.push(uploadBlob(blobService, quality, blobName, file));
|
||||
}
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
if (process.env['MOONCAKE_STORAGE_ACCESS_KEY']) {
|
||||
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
|
||||
// mooncake is fussy and far away, this is needed!
|
||||
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
|
||||
|
||||
await Promise.all([
|
||||
assertContainer(blobService, quality),
|
||||
assertContainer(mooncakeBlobService, quality)
|
||||
]);
|
||||
|
||||
const [blobExists, moooncakeBlobExists] = await Promise.all([
|
||||
doesAssetExist(blobService, quality, blobName),
|
||||
doesAssetExist(mooncakeBlobService, quality, blobName)
|
||||
]);
|
||||
|
||||
const promises = [];
|
||||
|
||||
if (!blobExists) {
|
||||
promises.push(uploadBlob(blobService, quality, blobName, file));
|
||||
}
|
||||
|
||||
if (!moooncakeBlobExists) {
|
||||
promises.push(uploadBlob(mooncakeBlobService, quality, blobName, file));
|
||||
}
|
||||
} else {
|
||||
console.log('Skipping Mooncake publishing.');
|
||||
}
|
||||
|
||||
|
||||
if (promises.length === 0) {
|
||||
if (blobExists) {
|
||||
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('Uploading blobs to Azure storage...');
|
||||
|
||||
await Promise.all(promises);
|
||||
await uploadBlob(blobService, quality, blobName, file);
|
||||
|
||||
console.log('Blobs successfully uploaded.');
|
||||
|
||||
@@ -247,8 +211,6 @@ async function publish(commit: string, quality: string, platform: string, type:
|
||||
platform: platform,
|
||||
type: type,
|
||||
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
|
||||
// {{SQL CARBON EDIT}}
|
||||
mooncakeUrl: process.env['MOONCAKE_CDN_URL'] ? `${process.env['MOONCAKE_CDN_URL']}/${quality}/${blobName}` : undefined,
|
||||
hash: sha1hash,
|
||||
sha256hash,
|
||||
size
|
||||
@@ -268,7 +230,7 @@ async function publish(commit: string, quality: string, platform: string, type:
|
||||
isReleased: config.frozen ? false : isReleased,
|
||||
sourceBranch,
|
||||
queuedBy,
|
||||
assets: [],
|
||||
assets: [] as Array<Asset>,
|
||||
updates: {} as any
|
||||
};
|
||||
|
||||
@@ -284,15 +246,23 @@ async function publish(commit: string, quality: string, platform: string, type:
|
||||
}
|
||||
|
||||
function main(): void {
|
||||
if (process.env['VSCODE_BUILD_SKIP_PUBLISH']) {
|
||||
console.warn('Skipping publish due to VSCODE_BUILD_SKIP_PUBLISH');
|
||||
return;
|
||||
}
|
||||
|
||||
const commit = process.env['BUILD_SOURCEVERSION'];
|
||||
|
||||
if (!commit) {
|
||||
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
|
||||
return;
|
||||
}
|
||||
|
||||
const opts = minimist<PublishOptions>(process.argv.slice(2), {
|
||||
boolean: ['upload-only']
|
||||
});
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
let [quality, platform, type, name, version, _isUpdate, file, commit] = opts._;
|
||||
if (!commit) {
|
||||
commit = execSync('git rev-parse HEAD', { encoding: 'utf8' }).trim();
|
||||
}
|
||||
const [quality, platform, type, name, version, _isUpdate, file] = opts._;
|
||||
|
||||
publish(commit, quality, platform, type, name, version, _isUpdate, file, opts).catch(err => {
|
||||
console.error(err);
|
||||
@@ -97,7 +97,7 @@ function updateVersion(accessor: IVersionAccessor, symbolsPath: string) {
|
||||
|
||||
function asyncRequest<T>(options: request.UrlOptions & request.CoreOptions): Promise<T> {
|
||||
return new Promise<T>((resolve, reject) => {
|
||||
request(options, (error, response, body) => {
|
||||
request(options, (error, _response, body) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
} else {
|
||||
@@ -107,17 +107,17 @@ function asyncRequest<T>(options: request.UrlOptions & request.CoreOptions): Pro
|
||||
});
|
||||
}
|
||||
|
||||
function downloadAsset(repository, assetName: string, targetPath: string, electronVersion: string) {
|
||||
function downloadAsset(repository: any, assetName: string, targetPath: string, electronVersion: string) {
|
||||
return new Promise((resolve, reject) => {
|
||||
repository.getReleases({ tag_name: `v${electronVersion}` }, (err, releases) => {
|
||||
repository.getReleases({ tag_name: `v${electronVersion}` }, (err: any, releases: any) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
const asset = releases[0].assets.filter(asset => asset.name === assetName)[0];
|
||||
const asset = releases[0].assets.filter((asset: any) => asset.name === assetName)[0];
|
||||
if (!asset) {
|
||||
reject(new Error(`Asset with name ${assetName} not found`));
|
||||
} else {
|
||||
repository.downloadAsset(asset, (err, reader) => {
|
||||
repository.downloadAsset(asset, (err: any, reader: any) => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
@@ -156,7 +156,7 @@ async function ensureVersionAndSymbols(options: IOptions) {
|
||||
const symbolsName = symbolsZipName(options.platform, options.versions.electron, options.versions.insiders);
|
||||
const symbolsPath = await tmpFile('symbols.zip');
|
||||
console.log(`HockeyApp: downloading symbols ${symbolsName} for electron ${options.versions.electron} (${options.platform}) into ${symbolsPath}`);
|
||||
await downloadAsset(new github({ repo: options.repository, token: options.access.githubToken }), symbolsName, symbolsPath, options.versions.electron);
|
||||
await downloadAsset(new (github as any)({ repo: options.repository, token: options.access.githubToken }), symbolsName, symbolsPath, options.versions.electron);
|
||||
|
||||
// Create version
|
||||
console.log(`HockeyApp: creating new version ${options.versions.code} (${options.platform})`);
|
||||
176
build/azure-pipelines/common/sync-mooncake.ts
Normal file
176
build/azure-pipelines/common/sync-mooncake.ts
Normal file
@@ -0,0 +1,176 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as url from 'url';
|
||||
import * as azure from 'azure-storage';
|
||||
import * as mime from 'mime';
|
||||
import { DocumentClient, RetrievedDocument } from 'documentdb';
|
||||
|
||||
function log(...args: any[]) {
|
||||
console.log(...[`[${new Date().toISOString()}]`, ...args]);
|
||||
}
|
||||
|
||||
function error(...args: any[]) {
|
||||
console.error(...[`[${new Date().toISOString()}]`, ...args]);
|
||||
}
|
||||
|
||||
if (process.argv.length < 3) {
|
||||
error('Usage: node sync-mooncake.js <quality>');
|
||||
process.exit(-1);
|
||||
}
|
||||
|
||||
interface Build extends RetrievedDocument {
|
||||
assets: Asset[];
|
||||
}
|
||||
|
||||
interface Asset {
|
||||
platform: string;
|
||||
type: string;
|
||||
url: string;
|
||||
mooncakeUrl: string;
|
||||
hash: string;
|
||||
sha256hash: string;
|
||||
size: number;
|
||||
supportsFastUpdate?: boolean;
|
||||
}
|
||||
|
||||
function updateBuild(commit: string, quality: string, platform: string, type: string, asset: Asset): Promise<void> {
|
||||
const client = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/' + quality;
|
||||
const updateQuery = {
|
||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
||||
parameters: [{ name: '@id', value: commit }]
|
||||
};
|
||||
|
||||
let updateTries = 0;
|
||||
|
||||
function _update(): Promise<void> {
|
||||
updateTries++;
|
||||
|
||||
return new Promise<void>((c, e) => {
|
||||
client.queryDocuments(collection, updateQuery).toArray((err, results) => {
|
||||
if (err) { return e(err); }
|
||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
||||
|
||||
const release = results[0];
|
||||
|
||||
release.assets = [
|
||||
...release.assets.filter((a: any) => !(a.platform === platform && a.type === type)),
|
||||
asset
|
||||
];
|
||||
|
||||
client.replaceDocument(release._self, release, err => {
|
||||
if (err && err.code === 409 && updateTries < 5) { return c(_update()); }
|
||||
if (err) { return e(err); }
|
||||
|
||||
log('Build successfully updated.');
|
||||
c();
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
return _update();
|
||||
}
|
||||
|
||||
async function sync(commit: string, quality: string): Promise<void> {
|
||||
log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
|
||||
|
||||
const cosmosdb = new DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT']!, { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = `dbs/builds/colls/${quality}`;
|
||||
const query = {
|
||||
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
|
||||
parameters: [{ name: '@id', value: commit }]
|
||||
};
|
||||
|
||||
const build = await new Promise<Build>((c, e) => {
|
||||
cosmosdb.queryDocuments(collection, query).toArray((err, results) => {
|
||||
if (err) { return e(err); }
|
||||
if (results.length !== 1) { return e(new Error('No documents')); }
|
||||
c(results[0] as Build);
|
||||
});
|
||||
});
|
||||
|
||||
log(`Found build for ${commit}, with ${build.assets.length} assets`);
|
||||
|
||||
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2']!;
|
||||
|
||||
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2']!)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
|
||||
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY']!, `${storageAccount}.blob.core.chinacloudapi.cn`)
|
||||
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
|
||||
|
||||
// mooncake is fussy and far away, this is needed!
|
||||
blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
|
||||
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
|
||||
|
||||
for (const asset of build.assets) {
|
||||
try {
|
||||
const blobPath = url.parse(asset.url).path;
|
||||
|
||||
if (!blobPath) {
|
||||
throw new Error(`Failed to parse URL: ${asset.url}`);
|
||||
}
|
||||
|
||||
const blobName = blobPath.replace(/^\/\w+\//, '');
|
||||
|
||||
log(`Found ${blobName}`);
|
||||
|
||||
if (asset.mooncakeUrl) {
|
||||
log(` Already in Mooncake ✔️`);
|
||||
continue;
|
||||
}
|
||||
|
||||
const readStream = blobService.createReadStream(quality, blobName, undefined!);
|
||||
const blobOptions: azure.BlobService.CreateBlockBlobRequestOptions = {
|
||||
contentSettings: {
|
||||
contentType: mime.lookup(blobPath),
|
||||
cacheControl: 'max-age=31536000, public'
|
||||
}
|
||||
};
|
||||
|
||||
const writeStream = mooncakeBlobService.createWriteStreamToBlockBlob(quality, blobName, blobOptions, undefined);
|
||||
|
||||
log(` Uploading to Mooncake...`);
|
||||
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
|
||||
|
||||
log(` Updating build in DB...`);
|
||||
asset.mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
|
||||
await updateBuild(commit, quality, asset.platform, asset.type, asset);
|
||||
|
||||
log(` Done ✔️`);
|
||||
} catch (err) {
|
||||
error(err);
|
||||
}
|
||||
}
|
||||
|
||||
log(`All done ✔️`);
|
||||
}
|
||||
|
||||
function main(): void {
|
||||
if (process.env['VSCODE_BUILD_SKIP_PUBLISH']) {
|
||||
error('Skipping publish due to VSCODE_BUILD_SKIP_PUBLISH');
|
||||
return;
|
||||
}
|
||||
|
||||
const commit = process.env['BUILD_SOURCEVERSION'];
|
||||
|
||||
if (!commit) {
|
||||
error('Skipping publish due to missing BUILD_SOURCEVERSION');
|
||||
return;
|
||||
}
|
||||
|
||||
const quality = process.argv[2];
|
||||
|
||||
sync(commit, quality).catch(err => {
|
||||
error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
}
|
||||
|
||||
main();
|
||||
5
build/azure-pipelines/darwin/build.sh
Executable file
5
build/azure-pipelines/darwin/build.sh
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
yarn gulp vscode-darwin-min
|
||||
yarn gulp vscode-reh-darwin-min
|
||||
yarn gulp upload-vscode-sourcemaps
|
||||
50
build/azure-pipelines/darwin/continuous-build-darwin.yml
Normal file
50
build/azure-pipelines/darwin/continuous-build-darwin.yml
Normal file
@@ -0,0 +1,50 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
# inputs:
|
||||
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
# vstsFeed: '$(ArtifactFeed)'
|
||||
# condition: eq(variables['System.PullRequest.PullRequestId'], '')
|
||||
- script: |
|
||||
yarn
|
||||
displayName: Install Dependencies
|
||||
# condition: or(ne(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
|
||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
# inputs:
|
||||
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
# vstsFeed: '$(ArtifactFeed)'
|
||||
# condition: and(succeeded(), eq(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
|
||||
- script: |
|
||||
yarn gulp electron-x64
|
||||
displayName: Download Electron
|
||||
- script: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
- script: |
|
||||
yarn monaco-compile-check
|
||||
displayName: Run Monaco Editor Checks
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
- script: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
- script: |
|
||||
./scripts/test.sh --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests
|
||||
- script: |
|
||||
./scripts/test-integration.sh --tfs "Integration Tests"
|
||||
displayName: Run Integration Tests
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
testResultsFiles: '*-results.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||
condition: succeededOrFailed()
|
||||
96
build/azure-pipelines/darwin/product-build-darwin.yml
Normal file
96
build/azure-pipelines/darwin/product-build-darwin.yml
Normal file
@@ -0,0 +1,96 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'vscode-builds-subscription'
|
||||
KeyVaultName: vscode
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
cat << EOF > ~/.netrc
|
||||
machine monacotools.visualstudio.com
|
||||
password $(devops-pat)
|
||||
machine github.com
|
||||
login vscode
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
|
||||
yarn
|
||||
yarn gulp mixin
|
||||
yarn gulp hygiene
|
||||
yarn monaco-compile-check
|
||||
node build/azure-pipelines/common/installDistro.js
|
||||
node build/lib/builtInExtensions.js
|
||||
displayName: Prepare build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
||||
./build/azure-pipelines/darwin/build.sh
|
||||
displayName: Build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./scripts/test.sh --build --tfs "Unit Tests"
|
||||
# APP_NAME="`ls $(agent.builddirectory)/VSCode-darwin | head -n 1`"
|
||||
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-darwin/$APP_NAME"
|
||||
displayName: Run unit tests
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
./scripts/test-integration.sh --build --tfs "Integration Tests"
|
||||
displayName: Run integration tests
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
pushd ../VSCode-darwin && zip -r -X -y ../VSCode-darwin.zip * && popd
|
||||
displayName: Archive build
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: 'ESRP CodeSign'
|
||||
FolderPath: '$(agent.builddirectory)'
|
||||
Pattern: 'VSCode-darwin.zip'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-401337-Apple",
|
||||
"operationSetCode": "MacAppDeveloperSign",
|
||||
"parameters": [ ],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
displayName: Codesign
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||
VSCODE_HOCKEYAPP_TOKEN="$(vscode-hockeyapp-token)" \
|
||||
./build/azure-pipelines/darwin/publish.sh
|
||||
displayName: Publish
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
continueOnError: true
|
||||
36
build/azure-pipelines/darwin/publish.sh
Executable file
36
build/azure-pipelines/darwin/publish.sh
Executable file
@@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
|
||||
# remove pkg from archive
|
||||
zip -d ../VSCode-darwin.zip "*.pkg"
|
||||
|
||||
# publish the build
|
||||
PACKAGEJSON=`ls ../VSCode-darwin/*.app/Contents/Resources/app/package.json`
|
||||
VERSION=`node -p "require(\"$PACKAGEJSON\").version"`
|
||||
node build/azure-pipelines/common/publish.js \
|
||||
"$VSCODE_QUALITY" \
|
||||
darwin \
|
||||
archive \
|
||||
"VSCode-darwin-$VSCODE_QUALITY.zip" \
|
||||
$VERSION \
|
||||
true \
|
||||
../VSCode-darwin.zip
|
||||
|
||||
# package Remote Extension Host
|
||||
pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd
|
||||
|
||||
# publish Remote Extension Host
|
||||
node build/azure-pipelines/common/publish.js \
|
||||
"$VSCODE_QUALITY" \
|
||||
server-darwin \
|
||||
archive-unsigned \
|
||||
"vscode-server-darwin.zip" \
|
||||
$VERSION \
|
||||
true \
|
||||
../vscode-server-darwin.zip
|
||||
|
||||
# publish hockeyapp symbols
|
||||
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "$VSCODE_ARCH" "$VSCODE_HOCKEYAPP_ID_MACOS"
|
||||
|
||||
# upload configuration
|
||||
yarn gulp upload-vscode-configuration
|
||||
36
build/azure-pipelines/distro-build.yml
Normal file
36
build/azure-pipelines/distro-build.yml
Normal file
@@ -0,0 +1,36 @@
|
||||
trigger:
|
||||
branches:
|
||||
include: ['master', 'release/*']
|
||||
pr:
|
||||
branches:
|
||||
include: ['master', 'release/*']
|
||||
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'vscode-builds-subscription'
|
||||
KeyVaultName: vscode
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
cat << EOF > ~/.netrc
|
||||
machine github.com
|
||||
login vscode
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
|
||||
git remote add distro "https://github.com/$VSCODE_MIXIN_REPO.git"
|
||||
git fetch distro
|
||||
git push distro origin/master:refs/heads/master
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
|
||||
displayName: Sync & Merge Distro
|
||||
1
build/azure-pipelines/linux/.gitignore
vendored
Normal file
1
build/azure-pipelines/linux/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
pat
|
||||
7
build/azure-pipelines/linux/build.sh
Executable file
7
build/azure-pipelines/linux/build.sh
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
yarn gulp "vscode-linux-$VSCODE_ARCH-min"
|
||||
|
||||
if [[ "$VSCODE_ARCH" != "ia32" ]]; then
|
||||
yarn gulp vscode-reh-linux-$VSCODE_ARCH-min
|
||||
fi
|
||||
55
build/azure-pipelines/linux/continuous-build-linux.yml
Normal file
55
build/azure-pipelines/linux/continuous-build-linux.yml
Normal file
@@ -0,0 +1,55 @@
|
||||
steps:
|
||||
- script: |
|
||||
set -e
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0
|
||||
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
|
||||
sudo chmod +x /etc/init.d/xvfb
|
||||
sudo update-rc.d xvfb defaults
|
||||
sudo service xvfb start
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
# inputs:
|
||||
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
# vstsFeed: '$(ArtifactFeed)'
|
||||
# condition: eq(variables['System.PullRequest.PullRequestId'], '')
|
||||
- script: |
|
||||
yarn
|
||||
displayName: Install Dependencies
|
||||
# condition: or(ne(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
|
||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
# inputs:
|
||||
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
# vstsFeed: '$(ArtifactFeed)'
|
||||
# condition: and(succeeded(), eq(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
|
||||
- script: |
|
||||
yarn gulp electron-x64
|
||||
displayName: Download Electron
|
||||
- script: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
- script: |
|
||||
yarn monaco-compile-check
|
||||
displayName: Run Monaco Editor Checks
|
||||
- script: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
- script: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
- script: |
|
||||
DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
testResultsFiles: '*-results.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||
condition: succeededOrFailed()
|
||||
40
build/azure-pipelines/linux/frozen-check.js
Normal file
40
build/azure-pipelines/linux/frozen-check.js
Normal file
@@ -0,0 +1,40 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const documentdb_1 = require("documentdb");
|
||||
function createDefaultConfig(quality) {
|
||||
return {
|
||||
id: quality,
|
||||
frozen: false
|
||||
};
|
||||
}
|
||||
function getConfig(quality) {
|
||||
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
|
||||
const collection = 'dbs/builds/colls/config';
|
||||
const query = {
|
||||
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
|
||||
parameters: [
|
||||
{ name: '@quality', value: quality }
|
||||
]
|
||||
};
|
||||
return new Promise((c, e) => {
|
||||
client.queryDocuments(collection, query).toArray((err, results) => {
|
||||
if (err && err.code !== 409) {
|
||||
return e(err);
|
||||
}
|
||||
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0]);
|
||||
});
|
||||
});
|
||||
}
|
||||
getConfig(process.argv[2])
|
||||
.then(config => {
|
||||
console.log(config.frozen);
|
||||
process.exit(0);
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(err);
|
||||
process.exit(1);
|
||||
});
|
||||
79
build/azure-pipelines/linux/product-build-linux.yml
Normal file
79
build/azure-pipelines/linux/product-build-linux.yml
Normal file
@@ -0,0 +1,79 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'vscode-builds-subscription'
|
||||
KeyVaultName: vscode
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
export npm_config_arch="$(VSCODE_ARCH)"
|
||||
if [[ "$(VSCODE_ARCH)" == "ia32" ]]; then
|
||||
export PKG_CONFIG_PATH="/usr/lib/i386-linux-gnu/pkgconfig"
|
||||
fi
|
||||
|
||||
cat << EOF > ~/.netrc
|
||||
machine monacotools.visualstudio.com
|
||||
password $(devops-pat)
|
||||
machine github.com
|
||||
login vscode
|
||||
password $(github-distro-mixin-password)
|
||||
EOF
|
||||
|
||||
git config user.email "vscode@microsoft.com"
|
||||
git config user.name "VSCode"
|
||||
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
|
||||
git fetch distro
|
||||
git merge $(node -p "require('./package.json').distro")
|
||||
|
||||
CHILD_CONCURRENCY=1 yarn
|
||||
yarn gulp mixin
|
||||
yarn gulp hygiene
|
||||
yarn monaco-compile-check
|
||||
node build/azure-pipelines/common/installDistro.js
|
||||
node build/lib/builtInExtensions.js
|
||||
displayName: Prepare build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
./build/azure-pipelines/linux/build.sh
|
||||
displayName: Build
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
yarn gulp "electron-$(VSCODE_ARCH)"
|
||||
|
||||
# xvfb seems to be crashing often, let's make sure it's always up
|
||||
service xvfb start
|
||||
|
||||
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
|
||||
# yarn smoketest -- --build "$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)"
|
||||
displayName: Run unit tests
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
|
||||
VSCODE_HOCKEYAPP_TOKEN="$(vscode-hockeyapp-token)" \
|
||||
./build/azure-pipelines/linux/publish.sh
|
||||
displayName: Publish
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
continueOnError: true
|
||||
|
||||
- task: PublishPipelineArtifact@0
|
||||
displayName: 'Publish Pipeline Artifact'
|
||||
inputs:
|
||||
artifactName: snap-$(VSCODE_ARCH)
|
||||
targetPath: .build/linux/snap-tarball
|
||||
64
build/azure-pipelines/linux/publish.sh
Executable file
64
build/azure-pipelines/linux/publish.sh
Executable file
@@ -0,0 +1,64 @@
|
||||
#!/usr/bin/env bash
|
||||
set -e
|
||||
REPO="$(pwd)"
|
||||
ROOT="$REPO/.."
|
||||
|
||||
# Publish tarball
|
||||
PLATFORM_LINUX="linux-$VSCODE_ARCH"
|
||||
[[ "$VSCODE_ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
|
||||
[[ "$VSCODE_ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
|
||||
BUILDNAME="VSCode-$PLATFORM_LINUX"
|
||||
BUILD="$ROOT/$BUILDNAME"
|
||||
BUILD_VERSION="$(date +%s)"
|
||||
[ -z "$VSCODE_QUALITY" ] && TARBALL_FILENAME="code-$BUILD_VERSION.tar.gz" || TARBALL_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.tar.gz"
|
||||
TARBALL_PATH="$ROOT/$TARBALL_FILENAME"
|
||||
PACKAGEJSON="$BUILD/resources/app/package.json"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
|
||||
rm -rf $ROOT/code-*.tar.*
|
||||
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
|
||||
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_LINUX" archive-unsigned "$TARBALL_FILENAME" "$VERSION" true "$TARBALL_PATH"
|
||||
|
||||
# Publish Remote Extension Host
|
||||
if [[ "$VSCODE_ARCH" != "ia32" ]]; then
|
||||
LEGACY_SERVER_BUILD_NAME="vscode-reh-$PLATFORM_LINUX"
|
||||
SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX"
|
||||
SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX.tar.gz"
|
||||
SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
|
||||
|
||||
rm -rf $ROOT/vscode-server-*.tar.*
|
||||
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
|
||||
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$VERSION" true "$SERVER_TARBALL_PATH"
|
||||
fi
|
||||
|
||||
# Publish hockeyapp symbols
|
||||
node build/azure-pipelines/common/symbols.js "$VSCODE_MIXIN_PASSWORD" "$VSCODE_HOCKEYAPP_TOKEN" "$VSCODE_ARCH" "$VSCODE_HOCKEYAPP_ID_LINUX64"
|
||||
|
||||
# Publish DEB
|
||||
yarn gulp "vscode-linux-$VSCODE_ARCH-build-deb"
|
||||
PLATFORM_DEB="linux-deb-$VSCODE_ARCH"
|
||||
[[ "$VSCODE_ARCH" == "ia32" ]] && DEB_ARCH="i386" || DEB_ARCH="amd64"
|
||||
DEB_FILENAME="$(ls $REPO/.build/linux/deb/$DEB_ARCH/deb/)"
|
||||
DEB_PATH="$REPO/.build/linux/deb/$DEB_ARCH/deb/$DEB_FILENAME"
|
||||
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_DEB" package "$DEB_FILENAME" "$VERSION" true "$DEB_PATH"
|
||||
|
||||
# Publish RPM
|
||||
yarn gulp "vscode-linux-$VSCODE_ARCH-build-rpm"
|
||||
PLATFORM_RPM="linux-rpm-$VSCODE_ARCH"
|
||||
[[ "$VSCODE_ARCH" == "ia32" ]] && RPM_ARCH="i386" || RPM_ARCH="x86_64"
|
||||
RPM_FILENAME="$(ls $REPO/.build/linux/rpm/$RPM_ARCH/ | grep .rpm)"
|
||||
RPM_PATH="$REPO/.build/linux/rpm/$RPM_ARCH/$RPM_FILENAME"
|
||||
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "$PLATFORM_RPM" package "$RPM_FILENAME" "$VERSION" true "$RPM_PATH"
|
||||
|
||||
# Publish Snap
|
||||
yarn gulp "vscode-linux-$VSCODE_ARCH-prepare-snap"
|
||||
|
||||
# Pack snap tarball artifact, in order to preserve file perms
|
||||
mkdir -p $REPO/.build/linux/snap-tarball
|
||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$VSCODE_ARCH.tar.gz"
|
||||
rm -rf $SNAP_TARBALL_PATH
|
||||
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)
|
||||
55
build/azure-pipelines/linux/snap-build-linux.yml
Normal file
55
build/azure-pipelines/linux/snap-build-linux.yml
Normal file
@@ -0,0 +1,55 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'vscode-builds-subscription'
|
||||
KeyVaultName: vscode
|
||||
|
||||
- task: DownloadPipelineArtifact@0
|
||||
displayName: 'Download Pipeline Artifact'
|
||||
inputs:
|
||||
artifactName: snap-$(VSCODE_ARCH)
|
||||
targetPath: .build/linux/snap-tarball
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
# Get snapcraft version
|
||||
snapcraft --version
|
||||
|
||||
# Make sure we get latest packages
|
||||
sudo apt-get update
|
||||
sudo apt-get upgrade -y
|
||||
|
||||
# Define variables
|
||||
REPO="$(pwd)"
|
||||
ARCH="$(VSCODE_ARCH)"
|
||||
SNAP_ROOT="$REPO/.build/linux/snap/$ARCH"
|
||||
|
||||
# Install build dependencies
|
||||
(cd build && yarn)
|
||||
|
||||
# Unpack snap tarball artifact, in order to preserve file perms
|
||||
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$ARCH.tar.gz"
|
||||
(cd .build/linux && tar -xzf $SNAP_TARBALL_PATH)
|
||||
|
||||
# Create snap package
|
||||
BUILD_VERSION="$(date +%s)"
|
||||
SNAP_FILENAME="code-$VSCODE_QUALITY-$BUILD_VERSION.snap"
|
||||
PACKAGEJSON="$(ls $SNAP_ROOT/code*/usr/share/code*/resources/app/package.json)"
|
||||
VERSION=$(node -p "require(\"$PACKAGEJSON\").version")
|
||||
SNAP_PATH="$SNAP_ROOT/$SNAP_FILENAME"
|
||||
(cd $SNAP_ROOT/code-* && sudo snapcraft snap --output "$SNAP_PATH")
|
||||
|
||||
# Publish snap package
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||
node build/azure-pipelines/common/publish.js "$VSCODE_QUALITY" "linux-snap-$ARCH" package "$SNAP_FILENAME" "$VERSION" true "$SNAP_PATH"
|
||||
81
build/azure-pipelines/product-build.yml
Normal file
81
build/azure-pipelines/product-build.yml
Normal file
@@ -0,0 +1,81 @@
|
||||
resources:
|
||||
containers:
|
||||
- container: vscode-x64
|
||||
endpoint: VSCodeHub
|
||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:x64
|
||||
- container: vscode-ia32
|
||||
endpoint: VSCodeHub
|
||||
image: vscodehub.azurecr.io/vscode-linux-build-agent:ia32
|
||||
- container: snapcraft
|
||||
image: snapcore/snapcraft
|
||||
|
||||
jobs:
|
||||
- job: Windows
|
||||
condition: eq(variables['VSCODE_BUILD_WIN32'], 'true')
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
|
||||
- job: Windows32
|
||||
condition: eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true')
|
||||
pool:
|
||||
vmImage: VS2017-Win2016
|
||||
variables:
|
||||
VSCODE_ARCH: ia32
|
||||
steps:
|
||||
- template: win32/product-build-win32.yml
|
||||
|
||||
- job: Linux
|
||||
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
container: vscode-x64
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
- job: LinuxSnap
|
||||
condition: eq(variables['VSCODE_BUILD_LINUX'], 'true')
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
variables:
|
||||
VSCODE_ARCH: x64
|
||||
container: snapcraft
|
||||
dependsOn: Linux
|
||||
steps:
|
||||
- template: linux/snap-build-linux.yml
|
||||
|
||||
- job: Linux32
|
||||
condition: eq(variables['VSCODE_BUILD_LINUX_32BIT'], 'true')
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
variables:
|
||||
VSCODE_ARCH: ia32
|
||||
container: vscode-ia32
|
||||
steps:
|
||||
- template: linux/product-build-linux.yml
|
||||
|
||||
- job: macOS
|
||||
condition: eq(variables['VSCODE_BUILD_MACOS'], 'true')
|
||||
pool:
|
||||
vmImage: macOS 10.13
|
||||
steps:
|
||||
- template: darwin/product-build-darwin.yml
|
||||
|
||||
- job: Mooncake
|
||||
pool:
|
||||
vmImage: 'Ubuntu-16.04'
|
||||
condition: true
|
||||
dependsOn:
|
||||
- Windows
|
||||
- Windows32
|
||||
- Linux
|
||||
- LinuxSnap
|
||||
- Linux32
|
||||
- macOS
|
||||
steps:
|
||||
- template: sync-mooncake.yml
|
||||
2
build/azure-pipelines/publish-types/.gitignore
vendored
Normal file
2
build/azure-pipelines/publish-types/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
node_modules/
|
||||
*.js
|
||||
36
build/azure-pipelines/publish-types/check-version.js
Normal file
36
build/azure-pipelines/publish-types/check-version.js
Normal file
@@ -0,0 +1,36 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const cp = require("child_process");
|
||||
let tag = '';
|
||||
try {
|
||||
tag = cp
|
||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
||||
.toString()
|
||||
.trim();
|
||||
if (!isValidTag(tag)) {
|
||||
throw Error(`Invalid tag ${tag}`);
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
console.error('Failed to update types');
|
||||
process.exit(1);
|
||||
}
|
||||
function isValidTag(t) {
|
||||
if (t.split('.').length !== 3) {
|
||||
return false;
|
||||
}
|
||||
const [major, minor, bug] = t.split('.');
|
||||
// Only release for tags like 1.34.0
|
||||
if (bug !== '0') {
|
||||
return false;
|
||||
}
|
||||
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
43
build/azure-pipelines/publish-types/check-version.ts
Normal file
43
build/azure-pipelines/publish-types/check-version.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as cp from 'child_process';
|
||||
|
||||
let tag = '';
|
||||
try {
|
||||
tag = cp
|
||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
||||
.toString()
|
||||
.trim();
|
||||
|
||||
if (!isValidTag(tag)) {
|
||||
throw Error(`Invalid tag ${tag}`);
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
console.error('Failed to update types');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
function isValidTag(t: string) {
|
||||
if (t.split('.').length !== 3) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const [major, minor, bug] = t.split('.');
|
||||
|
||||
// Only release for tags like 1.34.0
|
||||
if (bug !== '0') {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (parseInt(major, 10) === NaN || parseInt(minor, 10) === NaN) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
67
build/azure-pipelines/publish-types/publish-types.yml
Normal file
67
build/azure-pipelines/publish-types/publish-types.yml
Normal file
@@ -0,0 +1,67 @@
|
||||
# Publish @types/vscode for each release
|
||||
|
||||
trigger:
|
||||
branches:
|
||||
include: ['refs/tags/*']
|
||||
|
||||
pr: none
|
||||
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
|
||||
- bash: |
|
||||
# Install build dependencies
|
||||
(cd build && yarn)
|
||||
node build/azure-pipelines/publish-types/check-version.js
|
||||
displayName: Check version
|
||||
|
||||
- bash: |
|
||||
git config --global user.email "vscode@microsoft.com"
|
||||
git config --global user.name "VSCode"
|
||||
|
||||
git clone https://$(GITHUB_TOKEN)@github.com/DefinitelyTyped/DefinitelyTyped.git --depth=1
|
||||
node build/azure-pipelines/publish-types/update-types.js
|
||||
|
||||
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
|
||||
|
||||
cd DefinitelyTyped
|
||||
|
||||
git diff --color | cat
|
||||
git add -A
|
||||
git status
|
||||
git checkout -b "vscode-types-$TAG_VERSION"
|
||||
git commit -m "VS Code $TAG_VERSION Extension API"
|
||||
git push origin "vscode-types-$TAG_VERSION"
|
||||
|
||||
displayName: Push update to DefinitelyTyped
|
||||
|
||||
- bash: |
|
||||
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
|
||||
CHANNEL="G1C14HJ2F"
|
||||
|
||||
MESSAGE="DefinitelyTyped/DefinitelyTyped#vscode-types-$TAG_VERSION created. Endgame master, please open this link, examine changes and create a PR:"
|
||||
LINK="https://github.com/DefinitelyTyped/DefinitelyTyped/compare/vscode-types-$TAG_VERSION?quick_pull=1&body=Updating%20VS%20Code%20Extension%20API.%20See%20https%3A%2F%2Fgithub.com%2Fmicrosoft%2Fvscode%2Fissues%2F70175%20for%20details."
|
||||
MESSAGE2="[@octref, @jrieken, @kmaetzel, @egamma]. Please review and merge PR to publish @types/vscode."
|
||||
|
||||
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
|
||||
-H 'Content-type: application/json; charset=utf-8' \
|
||||
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE"'"}' \
|
||||
https://slack.com/api/chat.postMessage
|
||||
|
||||
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
|
||||
-H 'Content-type: application/json; charset=utf-8' \
|
||||
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$LINK"'"}' \
|
||||
https://slack.com/api/chat.postMessage
|
||||
|
||||
curl -X POST -H "Authorization: Bearer $(SLACK_TOKEN)" \
|
||||
-H 'Content-type: application/json; charset=utf-8' \
|
||||
--data '{"channel":"'"$CHANNEL"'", "link_names": true, "text":"'"$MESSAGE2"'"}' \
|
||||
https://slack.com/api/chat.postMessage
|
||||
|
||||
displayName: Send message on Slack
|
||||
62
build/azure-pipelines/publish-types/update-types.js
Normal file
62
build/azure-pipelines/publish-types/update-types.js
Normal file
@@ -0,0 +1,62 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const fs = require("fs");
|
||||
const cp = require("child_process");
|
||||
const path = require("path");
|
||||
let tag = '';
|
||||
try {
|
||||
tag = cp
|
||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
||||
.toString()
|
||||
.trim();
|
||||
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
|
||||
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
|
||||
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
|
||||
updateDTSFile(outPath, tag);
|
||||
console.log(`Done updating vscode.d.ts at ${outPath}`);
|
||||
}
|
||||
catch (err) {
|
||||
console.error(err);
|
||||
console.error('Failed to update types');
|
||||
process.exit(1);
|
||||
}
|
||||
function updateDTSFile(outPath, tag) {
|
||||
const oldContent = fs.readFileSync(outPath, 'utf-8');
|
||||
const newContent = getNewFileContent(oldContent, tag);
|
||||
fs.writeFileSync(outPath, newContent);
|
||||
}
|
||||
function getNewFileContent(content, tag) {
|
||||
const oldheader = [
|
||||
`/*---------------------------------------------------------------------------------------------`,
|
||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
||||
` * Licensed under the Source EULA. See License.txt in the project root for license information.`,
|
||||
` *--------------------------------------------------------------------------------------------*/`
|
||||
].join('\n');
|
||||
return getNewFileHeader(tag) + content.slice(oldheader.length);
|
||||
}
|
||||
function getNewFileHeader(tag) {
|
||||
const [major, minor] = tag.split('.');
|
||||
const shorttag = `${major}.${minor}`;
|
||||
const header = [
|
||||
`// Type definitions for Visual Studio Code ${shorttag}`,
|
||||
`// Project: https://github.com/microsoft/vscode`,
|
||||
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/Microsoft>`,
|
||||
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
|
||||
``,
|
||||
`/*---------------------------------------------------------------------------------------------`,
|
||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
||||
` * Licensed under the Source EULA.`,
|
||||
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
|
||||
` *--------------------------------------------------------------------------------------------*/`,
|
||||
``,
|
||||
`/**`,
|
||||
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
|
||||
` * See https://code.visualstudio.com/api for more information`,
|
||||
` */`
|
||||
].join('\n');
|
||||
return header;
|
||||
}
|
||||
73
build/azure-pipelines/publish-types/update-types.ts
Normal file
73
build/azure-pipelines/publish-types/update-types.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as cp from 'child_process';
|
||||
import * as path from 'path';
|
||||
|
||||
let tag = '';
|
||||
try {
|
||||
tag = cp
|
||||
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
|
||||
.toString()
|
||||
.trim();
|
||||
|
||||
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
|
||||
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
|
||||
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
|
||||
|
||||
updateDTSFile(outPath, tag);
|
||||
|
||||
console.log(`Done updating vscode.d.ts at ${outPath}`);
|
||||
} catch (err) {
|
||||
console.error(err);
|
||||
console.error('Failed to update types');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
function updateDTSFile(outPath: string, tag: string) {
|
||||
const oldContent = fs.readFileSync(outPath, 'utf-8');
|
||||
const newContent = getNewFileContent(oldContent, tag);
|
||||
|
||||
fs.writeFileSync(outPath, newContent);
|
||||
}
|
||||
|
||||
function getNewFileContent(content: string, tag: string) {
|
||||
const oldheader = [
|
||||
`/*---------------------------------------------------------------------------------------------`,
|
||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
||||
` * Licensed under the Source EULA. See License.txt in the project root for license information.`,
|
||||
` *--------------------------------------------------------------------------------------------*/`
|
||||
].join('\n');
|
||||
|
||||
return getNewFileHeader(tag) + content.slice(oldheader.length);
|
||||
}
|
||||
|
||||
function getNewFileHeader(tag: string) {
|
||||
const [major, minor] = tag.split('.');
|
||||
const shorttag = `${major}.${minor}`;
|
||||
|
||||
const header = [
|
||||
`// Type definitions for Visual Studio Code ${shorttag}`,
|
||||
`// Project: https://github.com/microsoft/vscode`,
|
||||
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/Microsoft>`,
|
||||
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
|
||||
``,
|
||||
`/*---------------------------------------------------------------------------------------------`,
|
||||
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
|
||||
` * Licensed under the Source EULA.`,
|
||||
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
|
||||
` *--------------------------------------------------------------------------------------------*/`,
|
||||
``,
|
||||
`/**`,
|
||||
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
|
||||
` * See https://code.visualstudio.com/api for more information`,
|
||||
` */`
|
||||
].join('\n');
|
||||
|
||||
return header;
|
||||
}
|
||||
24
build/azure-pipelines/sync-mooncake.yml
Normal file
24
build/azure-pipelines/sync-mooncake.yml
Normal file
@@ -0,0 +1,24 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'vscode-builds-subscription'
|
||||
KeyVaultName: vscode
|
||||
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
(cd build ; yarn)
|
||||
|
||||
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
|
||||
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
|
||||
MOONCAKE_STORAGE_ACCESS_KEY="$(vscode-mooncake-storage-key)" \
|
||||
node build/azure-pipelines/common/sync-mooncake.js "$VSCODE_QUALITY"
|
||||
5
build/azure-pipelines/win32/build.ps1
Normal file
5
build/azure-pipelines/win32/build.ps1
Normal file
@@ -0,0 +1,5 @@
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-min" }
|
||||
exec { yarn gulp "vscode-reh-win32-$env:VSCODE_ARCH-min" }
|
||||
exec { yarn gulp "vscode-win32-$env:VSCODE_ARCH-inno-updater" }
|
||||
54
build/azure-pipelines/win32/continuous-build-win32.yml
Normal file
54
build/azure-pipelines/win32/continuous-build-win32.yml
Normal file
@@ -0,0 +1,54 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '2.x'
|
||||
addToPath: true
|
||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
# inputs:
|
||||
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
# vstsFeed: '$(ArtifactFeed)'
|
||||
# condition: eq(variables['System.PullRequest.PullRequestId'], '')
|
||||
- powershell: |
|
||||
yarn
|
||||
displayName: Install Dependencies
|
||||
# condition: or(ne(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
|
||||
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
# inputs:
|
||||
# keyfile: '**/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
|
||||
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
|
||||
# vstsFeed: '$(ArtifactFeed)'
|
||||
# condition: and(succeeded(), eq(variables['System.PullRequest.PullRequestId'], ''), ne(variables['CacheRestored'], 'true'))
|
||||
- powershell: |
|
||||
yarn gulp electron
|
||||
displayName: Download Electron
|
||||
- powershell: |
|
||||
yarn gulp hygiene
|
||||
displayName: Run Hygiene Checks
|
||||
- powershell: |
|
||||
yarn monaco-compile-check
|
||||
displayName: Run Monaco Editor Checks
|
||||
- powershell: |
|
||||
yarn compile
|
||||
displayName: Compile Sources
|
||||
- powershell: |
|
||||
yarn download-builtin-extensions
|
||||
displayName: Download Built-in Extensions
|
||||
- powershell: |
|
||||
.\scripts\test.bat --tfs "Unit Tests"
|
||||
displayName: Run Unit Tests
|
||||
- powershell: |
|
||||
.\scripts\test-integration.bat --tfs "Integration Tests"
|
||||
displayName: Run Integration Tests
|
||||
- task: PublishTestResults@2
|
||||
displayName: Publish Tests Results
|
||||
inputs:
|
||||
testResultsFiles: '*-results.xml'
|
||||
searchFolder: '$(Build.ArtifactStagingDirectory)/test-results'
|
||||
condition: succeededOrFailed()
|
||||
149
build/azure-pipelines/win32/product-build-win32.yml
Normal file
149
build/azure-pipelines/win32/product-build-win32.yml
Normal file
@@ -0,0 +1,149 @@
|
||||
steps:
|
||||
- task: NodeTool@0
|
||||
inputs:
|
||||
versionSpec: "10.15.1"
|
||||
|
||||
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
|
||||
inputs:
|
||||
versionSpec: "1.10.1"
|
||||
|
||||
- task: UsePythonVersion@0
|
||||
inputs:
|
||||
versionSpec: '2.x'
|
||||
addToPath: true
|
||||
|
||||
- task: AzureKeyVault@1
|
||||
displayName: 'Azure Key Vault: Get Secrets'
|
||||
inputs:
|
||||
azureSubscription: 'vscode-builds-subscription'
|
||||
KeyVaultName: vscode
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
"machine monacotools.visualstudio.com`npassword $(devops-pat)`nmachine github.com`nlogin vscode`npassword $(github-distro-mixin-password)" | Out-File "$env:USERPROFILE\_netrc" -Encoding ASCII
|
||||
$env:npm_config_arch="$(VSCODE_ARCH)"
|
||||
$env:CHILD_CONCURRENCY="1"
|
||||
|
||||
exec { git config user.email "vscode@microsoft.com" }
|
||||
exec { git config user.name "VSCode" }
|
||||
exec { git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git" }
|
||||
exec { git fetch distro }
|
||||
exec { git merge $(node -p "require('./package.json').distro") }
|
||||
|
||||
exec { yarn }
|
||||
exec { yarn gulp mixin }
|
||||
exec { yarn gulp hygiene }
|
||||
exec { yarn monaco-compile-check }
|
||||
exec { node build/azure-pipelines/common/installDistro.js }
|
||||
exec { node build/lib/builtInExtensions.js }
|
||||
displayName: Prepare build
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
|
||||
.\build\azure-pipelines\win32\build.ps1
|
||||
displayName: Build
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn gulp "electron-$(VSCODE_ARCH)" }
|
||||
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
|
||||
# yarn smoketest -- --build "$(agent.builddirectory)\VSCode-win32-$(VSCODE_ARCH)"
|
||||
displayName: Run unit tests
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
exec { yarn gulp "electron-$(VSCODE_ARCH)" }
|
||||
exec { .\scripts\test-integration.bat --build --tfs "Integration Tests" }
|
||||
displayName: Run integration tests
|
||||
|
||||
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
|
||||
inputs:
|
||||
ConnectedServiceName: 'ESRP CodeSign'
|
||||
FolderPath: '$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH),$(agent.builddirectory)/vscode-reh-win32-$(VSCODE_ARCH)'
|
||||
Pattern: '*.dll,*.exe,*.node'
|
||||
signConfigType: inlineSignParams
|
||||
inlineOperation: |
|
||||
[
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolSign",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "OpusName",
|
||||
"parameterValue": "VS Code"
|
||||
},
|
||||
{
|
||||
"parameterName": "OpusInfo",
|
||||
"parameterValue": "https://code.visualstudio.com/"
|
||||
},
|
||||
{
|
||||
"parameterName": "Append",
|
||||
"parameterValue": "/as"
|
||||
},
|
||||
{
|
||||
"parameterName": "FileDigest",
|
||||
"parameterValue": "/fd \"SHA256\""
|
||||
},
|
||||
{
|
||||
"parameterName": "PageHash",
|
||||
"parameterValue": "/NPH"
|
||||
},
|
||||
{
|
||||
"parameterName": "TimeStamp",
|
||||
"parameterValue": "/tr \"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer\" /td sha256"
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
},
|
||||
{
|
||||
"keyCode": "CP-230012",
|
||||
"operationSetCode": "SigntoolVerify",
|
||||
"parameters": [
|
||||
{
|
||||
"parameterName": "VerifyAll",
|
||||
"parameterValue": "/all"
|
||||
}
|
||||
],
|
||||
"toolName": "sign",
|
||||
"toolVersion": "1.0"
|
||||
}
|
||||
]
|
||||
SessionTimeout: 120
|
||||
|
||||
- task: NuGetCommand@2
|
||||
displayName: Install ESRPClient.exe
|
||||
inputs:
|
||||
restoreSolution: 'build\azure-pipelines\win32\ESRPClient\packages.config'
|
||||
feedsToUse: config
|
||||
nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config'
|
||||
externalFeedCredentials: 3fc0b7f7-da09-4ae7-a9c8-d69824b1819b
|
||||
restoreDirectory: packages
|
||||
|
||||
- task: ESRPImportCertTask@1
|
||||
displayName: Import ESRP Request Signing Certificate
|
||||
inputs:
|
||||
ESRP: 'ESRP CodeSign'
|
||||
|
||||
- powershell: |
|
||||
$ErrorActionPreference = "Stop"
|
||||
.\build\azure-pipelines\win32\import-esrp-auth-cert.ps1 -AuthCertificateBase64 $(esrp-auth-certificate) -AuthCertificateKey $(esrp-auth-certificate-key)
|
||||
displayName: Import ESRP Auth Certificate
|
||||
|
||||
- powershell: |
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
$env:AZURE_STORAGE_ACCESS_KEY_2 = "$(vscode-storage-key)"
|
||||
$env:AZURE_DOCUMENTDB_MASTERKEY = "$(builds-docdb-key-readwrite)"
|
||||
$env:VSCODE_HOCKEYAPP_TOKEN = "$(vscode-hockeyapp-token)"
|
||||
.\build\azure-pipelines\win32\publish.ps1
|
||||
displayName: Publish
|
||||
|
||||
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
|
||||
displayName: 'Component Detection'
|
||||
continueOnError: true
|
||||
37
build/azure-pipelines/win32/publish.ps1
Normal file
37
build/azure-pipelines/win32/publish.ps1
Normal file
@@ -0,0 +1,37 @@
|
||||
. build/azure-pipelines/win32/exec.ps1
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
$Arch = "$env:VSCODE_ARCH"
|
||||
|
||||
exec { yarn gulp "vscode-win32-$Arch-archive" "vscode-win32-$Arch-system-setup" "vscode-win32-$Arch-user-setup" --sign }
|
||||
|
||||
$Repo = "$(pwd)"
|
||||
$Root = "$Repo\.."
|
||||
$SystemExe = "$Repo\.build\win32-$Arch\system-setup\VSCodeSetup.exe"
|
||||
$UserExe = "$Repo\.build\win32-$Arch\user-setup\VSCodeSetup.exe"
|
||||
$Zip = "$Repo\.build\win32-$Arch\archive\VSCode-win32-$Arch.zip"
|
||||
$LegacyServer = "$Root\vscode-reh-win32-$Arch"
|
||||
$ServerName = "vscode-server-win32-$Arch"
|
||||
$Server = "$Root\$ServerName"
|
||||
$ServerZip = "$Repo\.build\vscode-server-win32-$Arch.zip"
|
||||
$Build = "$Root\VSCode-win32-$Arch"
|
||||
|
||||
# Create server archive
|
||||
exec { Rename-Item -Path $LegacyServer -NewName $ServerName }
|
||||
exec { .\node_modules\7zip\7zip-lite\7z.exe a -tzip $ServerZip $Server -r }
|
||||
|
||||
# get version
|
||||
$PackageJson = Get-Content -Raw -Path "$Build\resources\app\package.json" | ConvertFrom-Json
|
||||
$Version = $PackageJson.version
|
||||
$Quality = "$env:VSCODE_QUALITY"
|
||||
|
||||
$AssetPlatform = if ("$Arch" -eq "ia32") { "win32" } else { "win32-x64" }
|
||||
|
||||
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform-archive" archive "VSCode-win32-$Arch-$Version.zip" $Version true $Zip }
|
||||
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform" setup "VSCodeSetup-$Arch-$Version.exe" $Version true $SystemExe }
|
||||
exec { node build/azure-pipelines/common/publish.js $Quality "$AssetPlatform-user" setup "VSCodeUserSetup-$Arch-$Version.exe" $Version true $UserExe }
|
||||
exec { node build/azure-pipelines/common/publish.js $Quality "server-$AssetPlatform" archive "vscode-server-win32-$Arch.zip" $Version true $ServerZip }
|
||||
|
||||
# publish hockeyapp symbols
|
||||
$hockeyAppId = if ("$Arch" -eq "ia32") { "$env:VSCODE_HOCKEYAPP_ID_WIN32" } else { "$env:VSCODE_HOCKEYAPP_ID_WIN64" }
|
||||
exec { node build/azure-pipelines/common/symbols.js "$env:VSCODE_MIXIN_PASSWORD" "$env:VSCODE_HOCKEYAPP_TOKEN" "$Arch" $hockeyAppId }
|
||||
70
build/azure-pipelines/win32/sign.ps1
Normal file
70
build/azure-pipelines/win32/sign.ps1
Normal file
@@ -0,0 +1,70 @@
|
||||
function Create-TmpJson($Obj) {
|
||||
$FileName = [System.IO.Path]::GetTempFileName()
|
||||
ConvertTo-Json -Depth 100 $Obj | Out-File -Encoding UTF8 $FileName
|
||||
return $FileName
|
||||
}
|
||||
|
||||
$Auth = Create-TmpJson @{
|
||||
Version = "1.0.0"
|
||||
AuthenticationType = "AAD_CERT"
|
||||
ClientId = $env:ESRPClientId
|
||||
AuthCert = @{
|
||||
SubjectName = $env:ESRPAuthCertificateSubjectName
|
||||
StoreLocation = "LocalMachine"
|
||||
StoreName = "My"
|
||||
}
|
||||
RequestSigningCert = @{
|
||||
SubjectName = $env:ESRPCertificateSubjectName
|
||||
StoreLocation = "LocalMachine"
|
||||
StoreName = "My"
|
||||
}
|
||||
}
|
||||
|
||||
$Policy = Create-TmpJson @{
|
||||
Version = "1.0.0"
|
||||
}
|
||||
|
||||
$Input = Create-TmpJson @{
|
||||
Version = "1.0.0"
|
||||
SignBatches = @(
|
||||
@{
|
||||
SourceLocationType = "UNC"
|
||||
SignRequestFiles = @(
|
||||
@{
|
||||
SourceLocation = $args[0]
|
||||
}
|
||||
)
|
||||
SigningInfo = @{
|
||||
Operations = @(
|
||||
@{
|
||||
KeyCode = "CP-230012"
|
||||
OperationCode = "SigntoolSign"
|
||||
Parameters = @{
|
||||
OpusName = "VS Code"
|
||||
OpusInfo = "https://code.visualstudio.com/"
|
||||
Append = "/as"
|
||||
FileDigest = "/fd `"SHA256`""
|
||||
PageHash = "/NPH"
|
||||
TimeStamp = "/tr `"http://rfc3161.gtm.corp.microsoft.com/TSS/HttpTspServer`" /td sha256"
|
||||
}
|
||||
ToolName = "sign"
|
||||
ToolVersion = "1.0"
|
||||
},
|
||||
@{
|
||||
KeyCode = "CP-230012"
|
||||
OperationCode = "SigntoolVerify"
|
||||
Parameters = @{
|
||||
VerifyAll = "/all"
|
||||
}
|
||||
ToolName = "sign"
|
||||
ToolVersion = "1.0"
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
$Output = [System.IO.Path]::GetTempFileName()
|
||||
$ScriptPath = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent
|
||||
& "$ScriptPath\ESRPClient\packages\EsrpClient.1.0.27\tools\ESRPClient.exe" Sign -a $Auth -p $Policy -i $Input -o $Output
|
||||
@@ -1,12 +1,2 @@
|
||||
[
|
||||
{
|
||||
"name": "ms-vscode.node-debug",
|
||||
"version": "1.26.7",
|
||||
"repo": "https://github.com/Microsoft/vscode-node-debug"
|
||||
},
|
||||
{
|
||||
"name": "ms-vscode.node-debug2",
|
||||
"version": "1.26.8",
|
||||
"repo": "https://github.com/Microsoft/vscode-node-debug2"
|
||||
}
|
||||
]
|
||||
|
||||
@@ -43,7 +43,7 @@ function asYarnDependency(prefix, tree) {
|
||||
}
|
||||
|
||||
function getYarnProductionDependencies(cwd) {
|
||||
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'ignore'] });
|
||||
const raw = cp.execSync('yarn list --json', { cwd, encoding: 'utf8', env: { ...process.env, NODE_ENV: 'production' }, stdio: [null, null, 'inherit'] });
|
||||
const match = /^{"type":"tree".*$/m.exec(raw);
|
||||
|
||||
if (!match || match.length !== 1) {
|
||||
|
||||
91
build/download/download.js
Normal file
91
build/download/download.js
Normal file
@@ -0,0 +1,91 @@
|
||||
"use strict";
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const https = require("https");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const cp = require("child_process");
|
||||
function ensureDir(filepath) {
|
||||
if (!fs.existsSync(filepath)) {
|
||||
ensureDir(path.dirname(filepath));
|
||||
fs.mkdirSync(filepath);
|
||||
}
|
||||
}
|
||||
function download(options, destination) {
|
||||
ensureDir(path.dirname(destination));
|
||||
return new Promise((c, e) => {
|
||||
const fd = fs.openSync(destination, 'w');
|
||||
const req = https.get(options, (res) => {
|
||||
res.on('data', (chunk) => {
|
||||
fs.writeSync(fd, chunk);
|
||||
});
|
||||
res.on('end', () => {
|
||||
fs.closeSync(fd);
|
||||
c();
|
||||
});
|
||||
});
|
||||
req.on('error', (reqErr) => {
|
||||
console.error(`request to ${options.host}${options.path} failed.`);
|
||||
console.error(reqErr);
|
||||
e(reqErr);
|
||||
});
|
||||
});
|
||||
}
|
||||
const MARKER_ARGUMENT = `_download_fork_`;
|
||||
function base64encode(str) {
|
||||
return Buffer.from(str, 'utf8').toString('base64');
|
||||
}
|
||||
function base64decode(str) {
|
||||
return Buffer.from(str, 'base64').toString('utf8');
|
||||
}
|
||||
function downloadInExternalProcess(options) {
|
||||
const url = `https://${options.requestOptions.host}${options.requestOptions.path}`;
|
||||
console.log(`Downloading ${url}...`);
|
||||
return new Promise((c, e) => {
|
||||
const child = cp.fork(__filename, [MARKER_ARGUMENT, base64encode(JSON.stringify(options))], {
|
||||
stdio: ['pipe', 'pipe', 'pipe', 'ipc']
|
||||
});
|
||||
let stderr = [];
|
||||
child.stderr.on('data', (chunk) => {
|
||||
stderr.push(typeof chunk === 'string' ? Buffer.from(chunk) : chunk);
|
||||
});
|
||||
child.on('exit', (code) => {
|
||||
if (code === 0) {
|
||||
// normal termination
|
||||
console.log(`Finished downloading ${url}.`);
|
||||
c();
|
||||
}
|
||||
else {
|
||||
// abnormal termination
|
||||
console.error(Buffer.concat(stderr).toString());
|
||||
e(new Error(`Download of ${url} failed.`));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.downloadInExternalProcess = downloadInExternalProcess;
|
||||
function _downloadInExternalProcess() {
|
||||
let options;
|
||||
try {
|
||||
options = JSON.parse(base64decode(process.argv[3]));
|
||||
}
|
||||
catch (err) {
|
||||
console.error(`Cannot read arguments`);
|
||||
console.error(err);
|
||||
process.exit(-1);
|
||||
return;
|
||||
}
|
||||
download(options.requestOptions, options.destinationPath).then(() => {
|
||||
process.exit(0);
|
||||
}, (err) => {
|
||||
console.error(err);
|
||||
process.exit(-2);
|
||||
});
|
||||
}
|
||||
if (process.argv.length >= 4 && process.argv[2] === MARKER_ARGUMENT) {
|
||||
// running as forked download script
|
||||
_downloadInExternalProcess();
|
||||
}
|
||||
111
build/download/download.ts
Normal file
111
build/download/download.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as https from 'https';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as cp from 'child_process';
|
||||
|
||||
function ensureDir(filepath: string) {
|
||||
if (!fs.existsSync(filepath)) {
|
||||
ensureDir(path.dirname(filepath));
|
||||
fs.mkdirSync(filepath);
|
||||
}
|
||||
}
|
||||
|
||||
function download(options: https.RequestOptions, destination: string): Promise<void> {
|
||||
ensureDir(path.dirname(destination));
|
||||
|
||||
return new Promise<void>((c, e) => {
|
||||
const fd = fs.openSync(destination, 'w');
|
||||
const req = https.get(options, (res) => {
|
||||
res.on('data', (chunk) => {
|
||||
fs.writeSync(fd, chunk);
|
||||
});
|
||||
res.on('end', () => {
|
||||
fs.closeSync(fd);
|
||||
c();
|
||||
});
|
||||
});
|
||||
req.on('error', (reqErr) => {
|
||||
console.error(`request to ${options.host}${options.path} failed.`);
|
||||
console.error(reqErr);
|
||||
e(reqErr);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
const MARKER_ARGUMENT = `_download_fork_`;
|
||||
|
||||
function base64encode(str: string): string {
|
||||
return Buffer.from(str, 'utf8').toString('base64');
|
||||
}
|
||||
|
||||
function base64decode(str: string): string {
|
||||
return Buffer.from(str, 'base64').toString('utf8');
|
||||
}
|
||||
|
||||
export interface IDownloadRequestOptions {
|
||||
host: string;
|
||||
path: string;
|
||||
}
|
||||
|
||||
export interface IDownloadOptions {
|
||||
requestOptions: IDownloadRequestOptions;
|
||||
destinationPath: string;
|
||||
}
|
||||
|
||||
export function downloadInExternalProcess(options: IDownloadOptions): Promise<void> {
|
||||
const url = `https://${options.requestOptions.host}${options.requestOptions.path}`;
|
||||
console.log(`Downloading ${url}...`);
|
||||
return new Promise<void>((c, e) => {
|
||||
const child = cp.fork(
|
||||
__filename,
|
||||
[MARKER_ARGUMENT, base64encode(JSON.stringify(options))],
|
||||
{
|
||||
stdio: ['pipe', 'pipe', 'pipe', 'ipc']
|
||||
}
|
||||
);
|
||||
let stderr: Buffer[] = [];
|
||||
child.stderr.on('data', (chunk) => {
|
||||
stderr.push(typeof chunk === 'string' ? Buffer.from(chunk) : chunk);
|
||||
});
|
||||
child.on('exit', (code) => {
|
||||
if (code === 0) {
|
||||
// normal termination
|
||||
console.log(`Finished downloading ${url}.`);
|
||||
c();
|
||||
} else {
|
||||
// abnormal termination
|
||||
console.error(Buffer.concat(stderr).toString());
|
||||
e(new Error(`Download of ${url} failed.`));
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function _downloadInExternalProcess() {
|
||||
let options: IDownloadOptions;
|
||||
try {
|
||||
options = JSON.parse(base64decode(process.argv[3]));
|
||||
} catch (err) {
|
||||
console.error(`Cannot read arguments`);
|
||||
console.error(err);
|
||||
process.exit(-1);
|
||||
return;
|
||||
}
|
||||
|
||||
download(options.requestOptions, options.destinationPath).then(() => {
|
||||
process.exit(0);
|
||||
}, (err) => {
|
||||
console.error(err);
|
||||
process.exit(-2);
|
||||
});
|
||||
}
|
||||
|
||||
if (process.argv.length >= 4 && process.argv[2] === MARKER_ARGUMENT) {
|
||||
// running as forked download script
|
||||
_downloadInExternalProcess();
|
||||
}
|
||||
18
build/gulpfile.compile.js
Normal file
18
build/gulpfile.compile.js
Normal file
@@ -0,0 +1,18 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const util = require('./lib/util');
|
||||
const task = require('./lib/task');
|
||||
const compilation = require('./lib/compilation');
|
||||
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
|
||||
|
||||
// Full compile, including nls and inline sources in sourcemaps, for build
|
||||
const compileClientBuildTask = task.define('compile-client-build', task.series(util.rimraf('out-build'), compilation.compileTask('src', 'out-build', true)));
|
||||
|
||||
// All Build
|
||||
const compileBuildTask = task.define('compile-build', task.parallel(compileClientBuildTask, compileExtensionsBuildTask));
|
||||
exports.compileBuildTask = compileBuildTask;
|
||||
@@ -6,6 +6,7 @@
|
||||
const gulp = require('gulp');
|
||||
const path = require('path');
|
||||
const util = require('./lib/util');
|
||||
const task = require('./lib/task');
|
||||
const common = require('./lib/optimize');
|
||||
const es = require('event-stream');
|
||||
const File = require('vinyl');
|
||||
@@ -28,7 +29,7 @@ var editorEntryPoints = [
|
||||
name: 'vs/editor/editor.main',
|
||||
include: [],
|
||||
exclude: ['vs/css', 'vs/nls'],
|
||||
prepend: ['out-build/vs/css.js', 'out-build/vs/nls.js'],
|
||||
prepend: ['out-editor-build/vs/css.js', 'out-editor-build/vs/nls.js'],
|
||||
},
|
||||
{
|
||||
name: 'vs/base/common/worker/simpleWorker',
|
||||
@@ -48,9 +49,6 @@ var editorResources = [
|
||||
'!**/test/**'
|
||||
];
|
||||
|
||||
var editorOtherSources = [
|
||||
];
|
||||
|
||||
var BUNDLED_FILE_HEADER = [
|
||||
'/*!-----------------------------------------------------------',
|
||||
' * Copyright (c) Microsoft Corporation. All rights reserved.',
|
||||
@@ -63,8 +61,7 @@ var BUNDLED_FILE_HEADER = [
|
||||
|
||||
const languages = i18n.defaultLanguages.concat([]); // i18n.defaultLanguages.concat(process.env.VSCODE_QUALITY !== 'stable' ? i18n.extraLanguages : []);
|
||||
|
||||
gulp.task('clean-editor-src', util.rimraf('out-editor-src'));
|
||||
gulp.task('extract-editor-src', ['clean-editor-src'], function () {
|
||||
const extractEditorSrcTask = task.define('extract-editor-src', () => {
|
||||
console.log(`If the build fails, consider tweaking shakeLevel below to a lower value.`);
|
||||
const apiusages = monacoapi.execute().usageContent;
|
||||
const extrausages = fs.readFileSync(path.join(root, 'build', 'monaco', 'monaco.usage.recipe')).toString();
|
||||
@@ -79,35 +76,39 @@ gulp.task('extract-editor-src', ['clean-editor-src'], function () {
|
||||
apiusages,
|
||||
extrausages
|
||||
],
|
||||
typings: [
|
||||
'typings/lib.ie11_safe_es6.d.ts',
|
||||
'typings/thenable.d.ts',
|
||||
'typings/es6-promise.d.ts',
|
||||
'typings/require-monaco.d.ts',
|
||||
"typings/lib.es2018.promise.d.ts",
|
||||
'vs/monaco.d.ts'
|
||||
],
|
||||
libs: [
|
||||
`lib.d.ts`,
|
||||
`lib.es2015.collection.d.ts`
|
||||
`lib.es5.d.ts`,
|
||||
`lib.dom.d.ts`,
|
||||
`lib.webworker.importscripts.d.ts`
|
||||
],
|
||||
redirects: {
|
||||
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
|
||||
},
|
||||
compilerOptions: {
|
||||
module: 2, // ModuleKind.AMD
|
||||
},
|
||||
shakeLevel: 2, // 0-Files, 1-InnerFile, 2-ClassMembers
|
||||
importIgnorePattern: /^vs\/css!/,
|
||||
importIgnorePattern: /(^vs\/css!)|(promise-polyfill\/polyfill)/,
|
||||
destRoot: path.join(root, 'out-editor-src')
|
||||
});
|
||||
});
|
||||
|
||||
// Full compile, including nls and inline sources in sourcemaps, for build
|
||||
gulp.task('clean-editor-build', util.rimraf('out-editor-build'));
|
||||
gulp.task('compile-editor-build', ['clean-editor-build', 'extract-editor-src'], compilation.compileTask('out-editor-src', 'out-editor-build', true));
|
||||
const compileEditorAMDTask = task.define('compile-editor-amd', compilation.compileTask('out-editor-src', 'out-editor-build', true));
|
||||
|
||||
gulp.task('clean-optimized-editor', util.rimraf('out-editor'));
|
||||
gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-editor-build'], common.optimizeTask({
|
||||
const optimizeEditorAMDTask = task.define('optimize-editor-amd', common.optimizeTask({
|
||||
src: 'out-editor-build',
|
||||
entryPoints: editorEntryPoints,
|
||||
otherSources: editorOtherSources,
|
||||
resources: editorResources,
|
||||
loaderConfig: {
|
||||
paths: {
|
||||
'vs': 'out-editor-build/vs',
|
||||
'vs/css': 'out-editor-build/vs/css.build',
|
||||
'vs/nls': 'out-editor-build/vs/nls.build',
|
||||
'vscode': 'empty:'
|
||||
}
|
||||
},
|
||||
@@ -118,29 +119,45 @@ gulp.task('optimize-editor', ['clean-optimized-editor', 'compile-editor-build'],
|
||||
languages: languages
|
||||
}));
|
||||
|
||||
gulp.task('clean-minified-editor', util.rimraf('out-editor-min'));
|
||||
gulp.task('minify-editor', ['clean-minified-editor', 'optimize-editor'], common.minifyTask('out-editor'));
|
||||
const minifyEditorAMDTask = task.define('minify-editor-amd', common.minifyTask('out-editor'));
|
||||
|
||||
gulp.task('clean-editor-esm', util.rimraf('out-editor-esm'));
|
||||
gulp.task('extract-editor-esm', ['clean-editor-esm', 'clean-editor-distro'], function () {
|
||||
standalone.createESMSourcesAndResources({
|
||||
entryPoints: [
|
||||
'vs/editor/editor.main',
|
||||
'vs/editor/editor.worker'
|
||||
],
|
||||
outFolder: './out-editor-esm/src',
|
||||
const createESMSourcesAndResourcesTask = task.define('extract-editor-esm', () => {
|
||||
standalone.createESMSourcesAndResources2({
|
||||
srcFolder: './out-editor-src',
|
||||
outFolder: './out-editor-esm',
|
||||
outResourcesFolder: './out-monaco-editor-core/esm',
|
||||
redirects: {
|
||||
'vs/base/browser/ui/octiconLabel/octiconLabel': 'vs/base/browser/ui/octiconLabel/octiconLabel.mock',
|
||||
'vs/nls': 'vs/nls.mock',
|
||||
ignores: [
|
||||
'inlineEntryPoint:0.ts',
|
||||
'inlineEntryPoint:1.ts',
|
||||
'vs/loader.js',
|
||||
'vs/nls.ts',
|
||||
'vs/nls.build.js',
|
||||
'vs/nls.d.ts',
|
||||
'vs/css.js',
|
||||
'vs/css.build.js',
|
||||
'vs/css.d.ts',
|
||||
'vs/base/worker/workerMain.ts',
|
||||
],
|
||||
renames: {
|
||||
'vs/nls.mock.ts': 'vs/nls.ts'
|
||||
}
|
||||
});
|
||||
});
|
||||
gulp.task('compile-editor-esm', ['extract-editor-esm', 'clean-editor-distro'], function () {
|
||||
|
||||
const compileEditorESMTask = task.define('compile-editor-esm', () => {
|
||||
if (process.platform === 'win32') {
|
||||
const result = cp.spawnSync(`..\\node_modules\\.bin\\tsc.cmd`, {
|
||||
cwd: path.join(__dirname, '../out-editor-esm')
|
||||
});
|
||||
console.log(result.stdout.toString());
|
||||
console.log(result.stderr.toString());
|
||||
} else {
|
||||
const result = cp.spawnSync(`node`, [`../node_modules/.bin/tsc`], {
|
||||
cwd: path.join(__dirname, '../out-editor-esm')
|
||||
});
|
||||
console.log(result.stdout.toString());
|
||||
console.log(result.stderr.toString());
|
||||
}
|
||||
});
|
||||
|
||||
function toExternalDTS(contents) {
|
||||
@@ -178,8 +195,16 @@ function toExternalDTS(contents) {
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
gulp.task('clean-editor-distro', util.rimraf('out-monaco-editor-core'));
|
||||
gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify-editor', 'optimize-editor'], function () {
|
||||
function filterStream(testFunc) {
|
||||
return es.through(function (data) {
|
||||
if (!testFunc(data.relative)) {
|
||||
return;
|
||||
}
|
||||
this.emit('data', data);
|
||||
});
|
||||
}
|
||||
|
||||
const finalEditorResourcesTask = task.define('final-editor-resources', () => {
|
||||
return es.merge(
|
||||
// other assets
|
||||
es.merge(
|
||||
@@ -194,7 +219,7 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
|
||||
this.emit('data', new File({
|
||||
path: data.path.replace(/monaco\.d\.ts/, 'editor.api.d.ts'),
|
||||
base: data.base,
|
||||
contents: new Buffer(toExternalDTS(data.contents.toString()))
|
||||
contents: Buffer.from(toExternalDTS(data.contents.toString()))
|
||||
}));
|
||||
}))
|
||||
.pipe(gulp.dest('out-monaco-editor-core/esm/vs/editor')),
|
||||
@@ -209,6 +234,14 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
|
||||
}))
|
||||
.pipe(gulp.dest('out-monaco-editor-core')),
|
||||
|
||||
// version.txt
|
||||
gulp.src('build/monaco/version.txt')
|
||||
.pipe(es.through(function (data) {
|
||||
data.contents = Buffer.from(`monaco-editor-core: https://github.com/Microsoft/vscode/tree/${sha1}`);
|
||||
this.emit('data', data);
|
||||
}))
|
||||
.pipe(gulp.dest('out-monaco-editor-core')),
|
||||
|
||||
// README.md
|
||||
gulp.src('build/monaco/README-npm.md')
|
||||
.pipe(es.through(function (data) {
|
||||
@@ -242,7 +275,7 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
|
||||
|
||||
var strContents = data.contents.toString();
|
||||
var newStr = '//# sourceMappingURL=' + relativePathToMap.replace(/\\/g, '/');
|
||||
strContents = strContents.replace(/\/\/\# sourceMappingURL=[^ ]+$/, newStr);
|
||||
strContents = strContents.replace(/\/\/# sourceMappingURL=[^ ]+$/, newStr);
|
||||
|
||||
data.contents = Buffer.from(strContents);
|
||||
this.emit('data', data);
|
||||
@@ -258,59 +291,31 @@ gulp.task('editor-distro', ['clean-editor-distro', 'compile-editor-esm', 'minify
|
||||
);
|
||||
});
|
||||
|
||||
gulp.task('analyze-editor-distro', function () {
|
||||
// @ts-ignore
|
||||
var bundleInfo = require('../out-editor/bundleInfo.json');
|
||||
var graph = bundleInfo.graph;
|
||||
var bundles = bundleInfo.bundles;
|
||||
|
||||
var inverseGraph = {};
|
||||
Object.keys(graph).forEach(function (module) {
|
||||
var dependencies = graph[module];
|
||||
dependencies.forEach(function (dep) {
|
||||
inverseGraph[dep] = inverseGraph[dep] || [];
|
||||
inverseGraph[dep].push(module);
|
||||
});
|
||||
});
|
||||
|
||||
var detailed = {};
|
||||
Object.keys(bundles).forEach(function (entryPoint) {
|
||||
var included = bundles[entryPoint];
|
||||
var includedMap = {};
|
||||
included.forEach(function (included) {
|
||||
includedMap[included] = true;
|
||||
});
|
||||
|
||||
var explanation = [];
|
||||
included.map(function (included) {
|
||||
if (included.indexOf('!') >= 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
var reason = (inverseGraph[included] || []).filter(function (mod) {
|
||||
return !!includedMap[mod];
|
||||
});
|
||||
explanation.push({
|
||||
module: included,
|
||||
reason: reason
|
||||
});
|
||||
});
|
||||
|
||||
detailed[entryPoint] = explanation;
|
||||
});
|
||||
|
||||
console.log(JSON.stringify(detailed, null, '\t'));
|
||||
});
|
||||
|
||||
function filterStream(testFunc) {
|
||||
return es.through(function (data) {
|
||||
if (!testFunc(data.relative)) {
|
||||
return;
|
||||
}
|
||||
this.emit('data', data);
|
||||
});
|
||||
}
|
||||
|
||||
gulp.task('editor-distro',
|
||||
task.series(
|
||||
task.parallel(
|
||||
util.rimraf('out-editor-src'),
|
||||
util.rimraf('out-editor-build'),
|
||||
util.rimraf('out-editor-esm'),
|
||||
util.rimraf('out-monaco-editor-core'),
|
||||
util.rimraf('out-editor'),
|
||||
util.rimraf('out-editor-min')
|
||||
),
|
||||
extractEditorSrcTask,
|
||||
task.parallel(
|
||||
task.series(
|
||||
compileEditorAMDTask,
|
||||
optimizeEditorAMDTask,
|
||||
minifyEditorAMDTask
|
||||
),
|
||||
task.series(
|
||||
createESMSourcesAndResourcesTask,
|
||||
compileEditorESMTask
|
||||
)
|
||||
),
|
||||
finalEditorResourcesTask
|
||||
)
|
||||
);
|
||||
|
||||
//#region monaco type checking
|
||||
|
||||
@@ -330,6 +335,7 @@ function createTscCompileTask(watch) {
|
||||
let errors = [];
|
||||
let reporter = createReporter();
|
||||
let report;
|
||||
// eslint-disable-next-line no-control-regex
|
||||
let magic = /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g; // https://stackoverflow.com/questions/25245716/remove-all-ansi-colors-styles-from-strings
|
||||
|
||||
child.stdout.on('data', data => {
|
||||
@@ -363,7 +369,10 @@ function createTscCompileTask(watch) {
|
||||
};
|
||||
}
|
||||
|
||||
gulp.task('monaco-typecheck-watch', createTscCompileTask(true));
|
||||
gulp.task('monaco-typecheck', createTscCompileTask(false));
|
||||
const monacoTypecheckWatchTask = task.define('monaco-typecheck-watch', createTscCompileTask(true));
|
||||
exports.monacoTypecheckWatchTask = monacoTypecheckWatchTask;
|
||||
|
||||
const monacoTypecheckTask = task.define('monaco-typecheck', createTscCompileTask(false));
|
||||
exports.monacoTypecheckTask = monacoTypecheckTask;
|
||||
|
||||
//#endregion
|
||||
|
||||
@@ -11,8 +11,8 @@ const path = require('path');
|
||||
const tsb = require('gulp-tsb');
|
||||
const es = require('event-stream');
|
||||
const filter = require('gulp-filter');
|
||||
const rimraf = require('rimraf');
|
||||
const util = require('./lib/util');
|
||||
const task = require('./lib/task');
|
||||
const watcher = require('./lib/watch');
|
||||
const createReporter = require('./lib/reporter').createReporter;
|
||||
const glob = require('glob');
|
||||
@@ -21,6 +21,7 @@ const nlsDev = require('vscode-nls-dev');
|
||||
const root = path.dirname(__dirname);
|
||||
const commit = util.getVersion(root);
|
||||
const plumber = require('gulp-plumber');
|
||||
const _ = require('underscore');
|
||||
|
||||
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
|
||||
|
||||
@@ -35,22 +36,13 @@ const tasks = compilations.map(function (tsconfigFile) {
|
||||
const absolutePath = path.join(extensionsPath, tsconfigFile);
|
||||
const relativeDirname = path.dirname(tsconfigFile);
|
||||
|
||||
const tsOptions = require(absolutePath).compilerOptions;
|
||||
const tsconfig = require(absolutePath);
|
||||
const tsOptions = _.assign({}, tsconfig.extends ? require(path.join(extensionsPath, relativeDirname, tsconfig.extends)).compilerOptions : {}, tsconfig.compilerOptions);
|
||||
tsOptions.verbose = false;
|
||||
tsOptions.sourceMap = true;
|
||||
|
||||
const name = relativeDirname.replace(/\//g, '-');
|
||||
|
||||
// Tasks
|
||||
const clean = 'clean-extension:' + name;
|
||||
const compile = 'compile-extension:' + name;
|
||||
const watch = 'watch-extension:' + name;
|
||||
|
||||
// Build Tasks
|
||||
const cleanBuild = 'clean-extension-build:' + name;
|
||||
const compileBuild = 'compile-extension-build:' + name;
|
||||
const watchBuild = 'watch-extension-build:' + name;
|
||||
|
||||
const root = path.join('extensions', relativeDirname);
|
||||
const srcBase = path.join(root, 'src');
|
||||
const src = path.join(srcBase, '**');
|
||||
@@ -109,18 +101,18 @@ const tasks = compilations.map(function (tsconfigFile) {
|
||||
|
||||
const srcOpts = { cwd: path.dirname(__dirname), base: srcBase };
|
||||
|
||||
gulp.task(clean, cb => rimraf(out, cb));
|
||||
const cleanTask = task.define(`clean-extension-${name}`, util.rimraf(out));
|
||||
|
||||
gulp.task(compile, [clean], () => {
|
||||
const compileTask = task.define(`compile-extension:${name}`, task.series(cleanTask, () => {
|
||||
const pipeline = createPipeline(false, true);
|
||||
const input = gulp.src(src, srcOpts);
|
||||
|
||||
return input
|
||||
.pipe(pipeline())
|
||||
.pipe(gulp.dest(out));
|
||||
});
|
||||
}));
|
||||
|
||||
gulp.task(watch, [clean], () => {
|
||||
const watchTask = task.define(`watch-extension:${name}`, task.series(cleanTask, () => {
|
||||
const pipeline = createPipeline(false);
|
||||
const input = gulp.src(src, srcOpts);
|
||||
const watchInput = watcher(src, srcOpts);
|
||||
@@ -128,43 +120,35 @@ const tasks = compilations.map(function (tsconfigFile) {
|
||||
return watchInput
|
||||
.pipe(util.incremental(pipeline, input))
|
||||
.pipe(gulp.dest(out));
|
||||
});
|
||||
}));
|
||||
|
||||
gulp.task(cleanBuild, cb => rimraf(out, cb));
|
||||
|
||||
gulp.task(compileBuild, [clean], () => {
|
||||
const compileBuildTask = task.define(`compile-build-extension-${name}`, task.series(cleanTask, () => {
|
||||
const pipeline = createPipeline(true, true);
|
||||
const input = gulp.src(src, srcOpts);
|
||||
|
||||
return input
|
||||
.pipe(pipeline())
|
||||
.pipe(gulp.dest(out));
|
||||
});
|
||||
}));
|
||||
|
||||
gulp.task(watchBuild, [clean], () => {
|
||||
const pipeline = createPipeline(true);
|
||||
const input = gulp.src(src, srcOpts);
|
||||
const watchInput = watcher(src, srcOpts);
|
||||
|
||||
return watchInput
|
||||
.pipe(util.incremental(() => pipeline(), input))
|
||||
.pipe(gulp.dest(out));
|
||||
});
|
||||
// Tasks
|
||||
gulp.task(compileTask);
|
||||
gulp.task(watchTask);
|
||||
|
||||
return {
|
||||
clean: clean,
|
||||
compile: compile,
|
||||
watch: watch,
|
||||
cleanBuild: cleanBuild,
|
||||
compileBuild: compileBuild,
|
||||
watchBuild: watchBuild
|
||||
compileTask: compileTask,
|
||||
watchTask: watchTask,
|
||||
compileBuildTask: compileBuildTask
|
||||
};
|
||||
});
|
||||
|
||||
gulp.task('clean-extensions', tasks.map(t => t.clean));
|
||||
gulp.task('compile-extensions', tasks.map(t => t.compile));
|
||||
gulp.task('watch-extensions', tasks.map(t => t.watch));
|
||||
const compileExtensionsTask = task.define('compile-extensions', task.parallel(...tasks.map(t => t.compileTask)));
|
||||
gulp.task(compileExtensionsTask);
|
||||
exports.compileExtensionsTask = compileExtensionsTask;
|
||||
|
||||
gulp.task('clean-extensions-build', tasks.map(t => t.cleanBuild));
|
||||
gulp.task('compile-extensions-build', tasks.map(t => t.compileBuild));
|
||||
gulp.task('watch-extensions-build', tasks.map(t => t.watchBuild));
|
||||
const watchExtensionsTask = task.define('watch-extensions', task.parallel(...tasks.map(t => t.watchTask)));
|
||||
gulp.task(watchExtensionsTask);
|
||||
exports.watchExtensionsTask = watchExtensionsTask;
|
||||
|
||||
const compileExtensionsBuildTask = task.define('compile-extensions-build', task.parallel(...tasks.map(t => t.compileBuildTask)));
|
||||
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;
|
||||
|
||||
@@ -42,12 +42,15 @@ const indentationFilter = [
|
||||
|
||||
// except specific files
|
||||
'!ThirdPartyNotices.txt',
|
||||
'!LICENSE.txt',
|
||||
'!LICENSE.{txt,rtf}',
|
||||
'!LICENSES.chromium.html',
|
||||
'!**/LICENSE',
|
||||
'!src/vs/nls.js',
|
||||
'!src/vs/nls.build.js',
|
||||
'!src/vs/css.js',
|
||||
'!src/vs/css.build.js',
|
||||
'!src/vs/loader.js',
|
||||
'!src/vs/base/common/marked/marked.js',
|
||||
'!src/vs/base/common/winjs.base.js',
|
||||
'!src/vs/base/node/terminateProcess.sh',
|
||||
'!src/vs/base/node/cpuUsage.sh',
|
||||
'!test/assert.js',
|
||||
@@ -78,13 +81,23 @@ const indentationFilter = [
|
||||
'!src/vs/*/**/*.d.ts',
|
||||
'!src/typings/**/*.d.ts',
|
||||
'!extensions/**/*.d.ts',
|
||||
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe}',
|
||||
'!build/{lib,tslintRules}/**/*.js',
|
||||
'!**/*.{svg,exe,png,bmp,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns}',
|
||||
'!build/{lib,tslintRules,download}/**/*.js',
|
||||
'!build/**/*.sh',
|
||||
'!build/tfs/**/*.js',
|
||||
'!build/tfs/**/*.config',
|
||||
'!build/azure-pipelines/**/*.js',
|
||||
'!build/azure-pipelines/**/*.config',
|
||||
'!**/Dockerfile',
|
||||
'!extensions/markdown-language-features/media/*.js'
|
||||
'!**/Dockerfile.*',
|
||||
'!**/*.Dockerfile',
|
||||
'!**/*.dockerfile',
|
||||
'!extensions/markdown-language-features/media/*.js',
|
||||
// {{SQL CARBON EDIT}}
|
||||
'!**/*.{xlf,docx,sql,vsix,bacpac}',
|
||||
'!extensions/mssql/sqltoolsservice/**',
|
||||
'!extensions/import/flatfileimportservice/**',
|
||||
'!extensions/admin-tool-ext-win/ssmsmin/**',
|
||||
'!extensions/resource-deployment/notebooks/**',
|
||||
'!extensions/mssql/notebooks/**'
|
||||
];
|
||||
|
||||
const copyrightFilter = [
|
||||
@@ -96,6 +109,8 @@ const copyrightFilter = [
|
||||
'!**/*.md',
|
||||
'!**/*.bat',
|
||||
'!**/*.cmd',
|
||||
'!**/*.ico',
|
||||
'!**/*.icns',
|
||||
'!**/*.xml',
|
||||
'!**/*.sh',
|
||||
'!**/*.txt',
|
||||
@@ -103,13 +118,47 @@ const copyrightFilter = [
|
||||
'!**/*.opts',
|
||||
'!**/*.disabled',
|
||||
'!**/*.code-workspace',
|
||||
'!**/promise-polyfill/polyfill.js',
|
||||
'!build/**/*.init',
|
||||
'!resources/linux/snap/snapcraft.yaml',
|
||||
'!resources/linux/snap/electron-launch',
|
||||
'!resources/win32/bin/code.js',
|
||||
'!resources/completions/**',
|
||||
'!extensions/markdown-language-features/media/highlight.css',
|
||||
'!extensions/html-language-features/server/src/modes/typescript/*',
|
||||
'!extensions/*/server/bin/*'
|
||||
'!extensions/*/server/bin/*',
|
||||
// {{SQL CARBON EDIT}}
|
||||
'!extensions/notebook/src/intellisense/text.ts',
|
||||
'!extensions/mssql/src/objectExplorerNodeProvider/webhdfs.ts',
|
||||
'!src/sql/workbench/parts/notebook/outputs/tableRenderers.ts',
|
||||
'!src/sql/workbench/parts/notebook/outputs/common/url.ts',
|
||||
'!src/sql/workbench/parts/notebook/outputs/common/renderMimeInterfaces.ts',
|
||||
'!src/sql/workbench/parts/notebook/outputs/common/outputProcessor.ts',
|
||||
'!src/sql/workbench/parts/notebook/outputs/common/mimemodel.ts',
|
||||
'!src/sql/workbench/parts/notebook/cellViews/media/*.css',
|
||||
'!src/sql/base/browser/ui/table/plugins/rowSelectionModel.plugin.ts',
|
||||
'!src/sql/base/browser/ui/table/plugins/rowDetailView.ts',
|
||||
'!src/sql/base/browser/ui/table/plugins/headerFilter.plugin.ts',
|
||||
'!src/sql/base/browser/ui/table/plugins/checkboxSelectColumn.plugin.ts',
|
||||
'!src/sql/base/browser/ui/table/plugins/cellSelectionModel.plugin.ts',
|
||||
'!src/sql/base/browser/ui/table/plugins/autoSizeColumns.plugin.ts',
|
||||
'!src/sql/workbench/parts/notebook/outputs/sanitizer.ts',
|
||||
'!src/sql/workbench/parts/notebook/outputs/renderers.ts',
|
||||
'!src/sql/workbench/parts/notebook/outputs/registry.ts',
|
||||
'!src/sql/workbench/parts/notebook/outputs/factories.ts',
|
||||
'!src/sql/workbench/parts/notebook/models/nbformat.ts',
|
||||
'!extensions/markdown-language-features/media/tomorrow.css',
|
||||
'!src/sql/workbench/electron-browser/modelComponents/media/highlight.css',
|
||||
'!src/sql/parts/modelComponents/highlight.css',
|
||||
'!extensions/mssql/sqltoolsservice/**',
|
||||
'!extensions/import/flatfileimportservice/**',
|
||||
'!extensions/notebook/src/prompts/**',
|
||||
'!extensions/mssql/src/prompts/**',
|
||||
'!extensions/notebook/resources/jupyter_config/**',
|
||||
'!**/*.gif',
|
||||
'!**/*.xlf',
|
||||
'!**/*.dacpac',
|
||||
'!**/*.bacpac'
|
||||
];
|
||||
|
||||
const eslintFilter = [
|
||||
@@ -120,7 +169,6 @@ const eslintFilter = [
|
||||
'!src/vs/nls.js',
|
||||
'!src/vs/css.build.js',
|
||||
'!src/vs/nls.build.js',
|
||||
'!src/**/winjs.base.js',
|
||||
'!src/**/marked.js',
|
||||
'!**/test/**'
|
||||
];
|
||||
@@ -139,6 +187,11 @@ const tslintFilter = [
|
||||
'!extensions/html-language-features/server/lib/jquery.d.ts'
|
||||
];
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const useStrictFilter = [
|
||||
'src/**'
|
||||
];
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const copyrightHeaderLines = [
|
||||
'/*---------------------------------------------------------------------------------------------',
|
||||
@@ -156,8 +209,7 @@ gulp.task('eslint', () => {
|
||||
});
|
||||
|
||||
gulp.task('tslint', () => {
|
||||
// {{SQL CARBON EDIT}}
|
||||
const options = { emitError: false };
|
||||
const options = { emitError: true };
|
||||
|
||||
return vfs.src(all, { base: '.', follow: true, allowEmpty: true })
|
||||
.pipe(filter(tslintFilter))
|
||||
@@ -190,8 +242,8 @@ function hygiene(some) {
|
||||
});
|
||||
|
||||
const copyrights = es.through(function (file) {
|
||||
const lines = file.__lines;
|
||||
|
||||
const lines = file.__lines;
|
||||
for (let i = 0; i < copyrightHeaderLines.length; i++) {
|
||||
if (lines[i] !== copyrightHeaderLines[i]) {
|
||||
console.error(file.relative + ': Missing or bad copyright statement');
|
||||
@@ -203,6 +255,23 @@ function hygiene(some) {
|
||||
this.emit('data', file);
|
||||
});
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
// Check for unnecessary 'use strict' lines. These are automatically added by the alwaysStrict compiler option so don't need to be added manually
|
||||
const useStrict = es.through(function (file) {
|
||||
const lines = file.__lines;
|
||||
// Only take the first 10 lines to reduce false positives- the compiler will throw an error if it's not the first non-comment line in a file
|
||||
// (10 is used to account for copyright and extraneous newlines)
|
||||
lines.slice(0, 10).forEach((line, i) => {
|
||||
if (/\s*'use\s*strict\s*'/.test(line)) {
|
||||
console.error(file.relative + '(' + (i + 1) + ',1): Unnecessary \'use strict\' - this is already added by the compiler');
|
||||
errorCount++;
|
||||
}
|
||||
});
|
||||
|
||||
this.emit('data', file);
|
||||
});
|
||||
// {{SQL CARBON EDIT}} END
|
||||
|
||||
const formatting = es.map(function (file, cb) {
|
||||
tsfmt.processString(file.path, file.contents.toString('utf8'), {
|
||||
verify: false,
|
||||
@@ -223,7 +292,7 @@ function hygiene(some) {
|
||||
let formatted = result.dest.replace(/\r\n/gm, '\n');
|
||||
|
||||
if (original !== formatted) {
|
||||
console.error('File not formatted:', file.relative);
|
||||
console.error("File not formatted. Run the 'Format Document' command to fix it:", file.relative);
|
||||
errorCount++;
|
||||
}
|
||||
cb(null, file);
|
||||
@@ -255,27 +324,52 @@ function hygiene(some) {
|
||||
.pipe(filter(f => !f.stat.isDirectory()))
|
||||
.pipe(filter(indentationFilter))
|
||||
.pipe(indentation)
|
||||
.pipe(filter(copyrightFilter));
|
||||
// {{SQL CARBON EDIT}}
|
||||
// .pipe(copyrights);
|
||||
.pipe(filter(copyrightFilter))
|
||||
.pipe(copyrights);
|
||||
|
||||
const typescript = result
|
||||
.pipe(filter(tslintFilter))
|
||||
.pipe(formatting)
|
||||
.pipe(tsl);
|
||||
.pipe(tsl)
|
||||
// {{SQL CARBON EDIT}}
|
||||
.pipe(filter(useStrictFilter))
|
||||
.pipe(useStrict);
|
||||
|
||||
const javascript = result
|
||||
.pipe(filter(eslintFilter))
|
||||
.pipe(gulpeslint('src/.eslintrc'))
|
||||
.pipe(gulpeslint.formatEach('compact'));
|
||||
// {{SQL CARBON EDIT}}
|
||||
// .pipe(gulpeslint.failAfterError());
|
||||
.pipe(gulpeslint.formatEach('compact'))
|
||||
.pipe(gulpeslint.failAfterError());
|
||||
|
||||
let count = 0;
|
||||
return es.merge(typescript, javascript)
|
||||
.pipe(es.through(function (data) {
|
||||
// {{SQL CARBON EDIT}}
|
||||
count++;
|
||||
if (process.env['TRAVIS'] && count % 10 === 0) {
|
||||
process.stdout.write('.');
|
||||
}
|
||||
this.emit('data', data);
|
||||
}, function () {
|
||||
process.stdout.write('\n');
|
||||
|
||||
const tslintResult = tsLinter.getResult();
|
||||
if (tslintResult.failures.length > 0) {
|
||||
for (const failure of tslintResult.failures) {
|
||||
const name = failure.getFileName();
|
||||
const position = failure.getStartPosition();
|
||||
const line = position.getLineAndCharacter().line;
|
||||
const character = position.getLineAndCharacter().character;
|
||||
|
||||
console.error(`${name}:${line + 1}:${character + 1}:${failure.getFailure()}`);
|
||||
}
|
||||
errorCount += tslintResult.failures.length;
|
||||
}
|
||||
|
||||
if (errorCount > 0) {
|
||||
this.emit('error', 'Hygiene failed with ' + errorCount + ' errors. Check \'build/gulpfile.hygiene.js\'.');
|
||||
} else {
|
||||
this.emit('end');
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
@@ -293,7 +387,7 @@ function createGitIndexVinyls(paths) {
|
||||
return e(err);
|
||||
}
|
||||
|
||||
cp.exec(`git show :${relativePath}`, { maxBuffer: 2000 * 1024, encoding: 'buffer' }, (err, out) => {
|
||||
cp.exec(`git show ":${relativePath}"`, { maxBuffer: 2000 * 1024, encoding: 'buffer' }, (err, out) => {
|
||||
if (err) {
|
||||
return e(err);
|
||||
}
|
||||
|
||||
@@ -6,19 +6,10 @@
|
||||
'use strict';
|
||||
|
||||
const gulp = require('gulp');
|
||||
const json = require('gulp-json-editor');
|
||||
const buffer = require('gulp-buffer');
|
||||
const filter = require('gulp-filter');
|
||||
const es = require('event-stream');
|
||||
const util = require('./lib/util');
|
||||
const remote = require('gulp-remote-src');
|
||||
const zip = require('gulp-vinyl-zip');
|
||||
const assign = require('object-assign');
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const jeditor = require('gulp-json-editor');
|
||||
|
||||
const pkg = require('../package.json');
|
||||
const product = require('../product.json');
|
||||
|
||||
gulp.task('mixin', function () {
|
||||
// {{SQL CARBON EDIT}}
|
||||
@@ -35,19 +26,53 @@ gulp.task('mixin', function () {
|
||||
return;
|
||||
}
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
let serviceUrl = 'https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery.json';
|
||||
if (quality === 'insider') {
|
||||
serviceUrl = `https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery-${quality}.json`;
|
||||
}
|
||||
// {{SQL CARBON EDIT}} - apply ADS insiders values if needed
|
||||
let newValues = {
|
||||
"nameShort": product.nameShort,
|
||||
"nameLong": product.nameLong,
|
||||
"applicationName": product.applicationName,
|
||||
"dataFolderName": product.dataFolderName,
|
||||
"win32MutexName": product.win32MutexName,
|
||||
"win32DirName": product.win32DirName,
|
||||
"win32NameVersion": product.win32NameVersion,
|
||||
"win32RegValueName": product.win32RegValueName,
|
||||
"win32AppId": product.win32AppId,
|
||||
"win32x64AppId": product.win32x64AppId,
|
||||
"win32UserAppId": product.win32UserAppId,
|
||||
"win32x64UserAppId": product.win32x64UserAppId,
|
||||
"win32AppUserModelId": product.win32AppUserModelId,
|
||||
"win32ShellNameShort": product.win32ShellNameShort,
|
||||
"darwinBundleIdentifier": product.darwinBundleIdentifier,
|
||||
"updateUrl": updateUrl,
|
||||
"quality": quality,
|
||||
"extensionsGallery": {
|
||||
"serviceUrl": serviceUrl
|
||||
"serviceUrl": 'https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery.json'
|
||||
}
|
||||
};
|
||||
|
||||
if (quality === 'insider') {
|
||||
let dashSuffix = '-insiders';
|
||||
let dotSuffix = '.insiders';
|
||||
let displaySuffix = ' - Insiders';
|
||||
|
||||
newValues.extensionsGallery.serviceUrl = `https://sqlopsextensions.blob.core.windows.net/marketplace/v1/extensionsGallery-${quality}.json`;
|
||||
newValues.nameShort += dashSuffix;
|
||||
newValues.nameLong += displaySuffix;
|
||||
newValues.applicationName += dashSuffix;
|
||||
newValues.dataFolderName += dashSuffix;
|
||||
newValues.win32MutexName += dashSuffix;
|
||||
newValues.win32DirName += displaySuffix;
|
||||
newValues.win32NameVersion += displaySuffix;
|
||||
newValues.win32RegValueName += dashSuffix;
|
||||
newValues.win32AppId = "{{9F0801B2-DEE3-4272-A2C6-FBDF25BAAF0F}";
|
||||
newValues.win32x64AppId = "{{6748A5FD-29EB-4BA6-B3C6-E7B981B8D6B0}";
|
||||
newValues.win32UserAppId = "{{0F8CD1ED-483C-40EB-8AD2-8ED784651AA1}";
|
||||
newValues.win32x64UserAppId += dashSuffix;
|
||||
newValues.win32AppUserModelId += dotSuffix;
|
||||
newValues.win32ShellNameShort += displaySuffix;
|
||||
newValues.darwinBundleIdentifier += dotSuffix;
|
||||
}
|
||||
|
||||
return gulp.src('./product.json')
|
||||
.pipe(jeditor(newValues))
|
||||
.pipe(gulp.dest('.'));
|
||||
|
||||
16
build/gulpfile.reh.js
Normal file
16
build/gulpfile.reh.js
Normal file
@@ -0,0 +1,16 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const gulp = require('gulp');
|
||||
|
||||
const noop = () => { return Promise.resolve(); };
|
||||
|
||||
gulp.task('vscode-reh-win32-ia32-min', noop);
|
||||
gulp.task('vscode-reh-win32-x64-min', noop);
|
||||
gulp.task('vscode-reh-darwin-min', noop);
|
||||
gulp.task('vscode-reh-linux-x64-min', noop);
|
||||
gulp.task('vscode-reh-linux-arm-min', noop);
|
||||
@@ -28,7 +28,6 @@ const formatFiles = (some) => {
|
||||
console.info('ran formatting on file ' + file.path + ' result: ' + result.message);
|
||||
if (result.error) {
|
||||
console.error(result.message);
|
||||
errorCount++;
|
||||
}
|
||||
cb(null, file);
|
||||
|
||||
@@ -40,7 +39,7 @@ const formatFiles = (some) => {
|
||||
.pipe(filter(f => !f.stat.isDirectory()))
|
||||
.pipe(formatting);
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
const formatStagedFiles = () => {
|
||||
const cp = require('child_process');
|
||||
@@ -81,4 +80,4 @@ const formatStagedFiles = () => {
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
}
|
||||
};
|
||||
@@ -1,15 +0,0 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const gulp = require('gulp');
|
||||
const mocha = require('gulp-mocha');
|
||||
|
||||
gulp.task('test', function () {
|
||||
return gulp.src('test/all.js')
|
||||
.pipe(mocha({ ui: 'tdd', delay: true }))
|
||||
.once('end', function () { process.exit(); });
|
||||
});
|
||||
@@ -20,6 +20,7 @@ const filter = require('gulp-filter');
|
||||
const json = require('gulp-json-editor');
|
||||
const _ = require('underscore');
|
||||
const util = require('./lib/util');
|
||||
const task = require('./lib/task');
|
||||
const ext = require('./lib/extensions');
|
||||
const buildfile = require('../src/buildfile');
|
||||
const common = require('./lib/optimize');
|
||||
@@ -32,17 +33,17 @@ const i18n = require('./lib/i18n');
|
||||
// {{SQL CARBON EDIT}}
|
||||
const serviceDownloader = require('service-downloader').ServiceDownloadProvider;
|
||||
const platformInfo = require('service-downloader/out/platform').PlatformInformation;
|
||||
const glob = require('glob');
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
const deps = require('./dependencies');
|
||||
const getElectronVersion = require('./lib/electron').getElectronVersion;
|
||||
const createAsar = require('./lib/asar').createAsar;
|
||||
const { compileBuildTask } = require('./gulpfile.compile');
|
||||
|
||||
const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname));
|
||||
// @ts-ignore
|
||||
// {{SQL CARBON EDIT}}
|
||||
var del = require('del');
|
||||
const extensionsRoot = path.join(root, 'extensions');
|
||||
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
|
||||
|
||||
const baseModules = Object.keys(process.binding('natives')).filter(n => !/^_|\//.test(n));
|
||||
// {{SQL CARBON EDIT}}
|
||||
const nodeModules = [
|
||||
@@ -51,33 +52,12 @@ const nodeModules = [
|
||||
'rxjs/Observable',
|
||||
'rxjs/Subject',
|
||||
'rxjs/Observer',
|
||||
'ng2-charts/ng2-charts']
|
||||
'ng2-charts']
|
||||
.concat(Object.keys(product.dependencies || {}))
|
||||
.concat(_.uniq(productionDependencies.map(d => d.name)))
|
||||
.concat(baseModules);
|
||||
|
||||
|
||||
// Build
|
||||
const builtInExtensions = require('./builtInExtensions.json');
|
||||
|
||||
const excludedExtensions = [
|
||||
'vscode-api-tests',
|
||||
'vscode-colorize-tests',
|
||||
'ms-vscode.node-debug',
|
||||
'ms-vscode.node-debug2',
|
||||
];
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const vsce = require('vsce');
|
||||
const sqlBuiltInExtensions = [
|
||||
// Add SQL built-in extensions here.
|
||||
// the extension will be excluded from SQLOps package and will have separate vsix packages
|
||||
'agent',
|
||||
'import',
|
||||
'profiler'
|
||||
];
|
||||
var azureExtensions = [ 'azurecore'];
|
||||
|
||||
const vscodeEntryPoints = _.flatten([
|
||||
buildfile.entrypoint('vs/workbench/workbench.main'),
|
||||
buildfile.base,
|
||||
@@ -90,22 +70,27 @@ const vscodeResources = [
|
||||
'out-build/cli.js',
|
||||
'out-build/driver.js',
|
||||
'out-build/bootstrap.js',
|
||||
'out-build/bootstrap-fork.js',
|
||||
'out-build/bootstrap-amd.js',
|
||||
'out-build/bootstrap-window.js',
|
||||
'out-build/paths.js',
|
||||
'out-build/vs/**/*.{svg,png,cur,html}',
|
||||
'!out-build/vs/code/browser/**/*.html',
|
||||
'out-build/vs/base/common/performance.js',
|
||||
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh}',
|
||||
'out-build/vs/base/node/languagePacks.js',
|
||||
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh,ps.sh}',
|
||||
'out-build/vs/base/browser/ui/octiconLabel/octicons/**',
|
||||
'out-build/vs/workbench/browser/media/*-theme.css',
|
||||
'out-build/vs/workbench/electron-browser/bootstrap/**',
|
||||
'out-build/vs/workbench/parts/debug/**/*.json',
|
||||
'out-build/vs/workbench/parts/execution/**/*.scpt',
|
||||
'out-build/vs/workbench/parts/webview/electron-browser/webview-pre.js',
|
||||
'out-build/vs/workbench/contrib/debug/**/*.json',
|
||||
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
|
||||
'out-build/vs/workbench/contrib/webview/browser/pre/*.js',
|
||||
'out-build/vs/workbench/contrib/webview/electron-browser/pre/*.js',
|
||||
'out-build/vs/**/markdown.css',
|
||||
'out-build/vs/workbench/parts/tasks/**/*.json',
|
||||
'out-build/vs/workbench/parts/welcome/walkThrough/**/*.md',
|
||||
'out-build/vs/workbench/contrib/tasks/**/*.json',
|
||||
'out-build/vs/workbench/contrib/welcome/walkThrough/**/*.md',
|
||||
'out-build/vs/workbench/services/files/**/*.exe',
|
||||
'out-build/vs/workbench/services/files/**/*.md',
|
||||
'out-build/vs/code/electron-browser/workbench/**',
|
||||
'out-build/vs/code/electron-browser/sharedProcess/sharedProcess.js',
|
||||
'out-build/vs/code/electron-browser/issue/issueReporter.js',
|
||||
'out-build/vs/code/electron-browser/processExplorer/processExplorer.js',
|
||||
@@ -117,18 +102,17 @@ const vscodeResources = [
|
||||
'out-build/sql/parts/admin/**/*.html',
|
||||
'out-build/sql/parts/connection/connectionDialog/media/*.{gif,png,svg}',
|
||||
'out-build/sql/parts/common/dblist/**/*.html',
|
||||
'out-build/sql/parts/dashboard/**/*.html',
|
||||
'out-build/sql/workbench/parts/dashboard/**/*.html',
|
||||
'out-build/sql/parts/disasterRecovery/**/*.html',
|
||||
'out-build/sql/parts/common/modal/media/**',
|
||||
'out-build/sql/parts/grid/load/lib/**',
|
||||
'out-build/sql/parts/grid/load/loadJquery.js',
|
||||
'out-build/sql/parts/grid/media/**',
|
||||
'out-build/sql/parts/grid/views/**/*.html',
|
||||
'out-build/sql/workbench/parts/grid/media/**',
|
||||
'out-build/sql/workbench/parts/grid/views/**/*.html',
|
||||
'out-build/sql/parts/tasks/**/*.html',
|
||||
'out-build/sql/parts/taskHistory/viewlet/media/**',
|
||||
'out-build/sql/parts/jobManagement/common/media/*.svg',
|
||||
'out-build/sql/media/objectTypes/*.svg',
|
||||
'out-build/sql/media/icons/*.svg',
|
||||
'out-build/sql/workbench/parts/notebook/media/**/*.svg',
|
||||
'!**/test/**'
|
||||
];
|
||||
|
||||
@@ -138,65 +122,84 @@ const BUNDLED_FILE_HEADER = [
|
||||
' *--------------------------------------------------------*/'
|
||||
].join('\n');
|
||||
|
||||
gulp.task('clean-optimized-vscode', util.rimraf('out-vscode'));
|
||||
gulp.task('optimize-vscode', ['clean-optimized-vscode', 'compile-build', 'compile-extensions-build'], common.optimizeTask({
|
||||
const optimizeVSCodeTask = task.define('optimize-vscode', task.series(
|
||||
task.parallel(
|
||||
util.rimraf('out-vscode'),
|
||||
compileBuildTask
|
||||
),
|
||||
common.optimizeTask({
|
||||
src: 'out-build',
|
||||
entryPoints: vscodeEntryPoints,
|
||||
otherSources: [],
|
||||
resources: vscodeResources,
|
||||
loaderConfig: common.loaderConfig(nodeModules),
|
||||
header: BUNDLED_FILE_HEADER,
|
||||
out: 'out-vscode',
|
||||
bundleInfo: undefined
|
||||
}));
|
||||
})
|
||||
));
|
||||
|
||||
|
||||
gulp.task('optimize-index-js', ['optimize-vscode'], () => {
|
||||
const fullpath = path.join(process.cwd(), 'out-vscode/vs/workbench/electron-browser/bootstrap/index.js');
|
||||
const optimizeIndexJSTask = task.define('optimize-index-js', task.series(
|
||||
optimizeVSCodeTask,
|
||||
() => {
|
||||
const fullpath = path.join(process.cwd(), 'out-vscode/bootstrap-window.js');
|
||||
const contents = fs.readFileSync(fullpath).toString();
|
||||
const newContents = contents.replace('[/*BUILD->INSERT_NODE_MODULES*/]', JSON.stringify(nodeModules));
|
||||
fs.writeFileSync(fullpath, newContents);
|
||||
});
|
||||
}
|
||||
));
|
||||
|
||||
const baseUrl = `https://ticino.blob.core.windows.net/sourcemaps/${commit}/core`;
|
||||
gulp.task('clean-minified-vscode', util.rimraf('out-vscode-min'));
|
||||
gulp.task('minify-vscode', ['clean-minified-vscode', 'optimize-index-js'], common.minifyTask('out-vscode', baseUrl));
|
||||
const sourceMappingURLBase = `https://ticino.blob.core.windows.net/sourcemaps/${commit}`;
|
||||
const minifyVSCodeTask = task.define('minify-vscode', task.series(
|
||||
task.parallel(
|
||||
util.rimraf('out-vscode-min'),
|
||||
optimizeIndexJSTask
|
||||
),
|
||||
common.minifyTask('out-vscode', `${sourceMappingURLBase}/core`)
|
||||
));
|
||||
|
||||
// Package
|
||||
|
||||
// @ts-ignore JSON checking: darwinCredits is optional
|
||||
const darwinCreditsTemplate = product.darwinCredits && _.template(fs.readFileSync(path.join(root, product.darwinCredits), 'utf8'));
|
||||
|
||||
function darwinBundleDocumentType(extensions, icon) {
|
||||
return {
|
||||
name: product.nameLong + ' document',
|
||||
role: 'Editor',
|
||||
ostypes: ["TEXT", "utxt", "TUTX", "****"],
|
||||
extensions: extensions,
|
||||
iconFile: icon
|
||||
};
|
||||
}
|
||||
|
||||
const config = {
|
||||
version: getElectronVersion(),
|
||||
productAppName: product.nameLong,
|
||||
companyName: 'Microsoft Corporation',
|
||||
copyright: 'Copyright (C) 2018 Microsoft. All rights reserved',
|
||||
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
|
||||
darwinIcon: 'resources/darwin/code.icns',
|
||||
darwinBundleIdentifier: product.darwinBundleIdentifier,
|
||||
darwinApplicationCategoryType: 'public.app-category.developer-tools',
|
||||
darwinHelpBookFolder: 'VS Code HelpBook',
|
||||
darwinHelpBookName: 'VS Code HelpBook',
|
||||
darwinBundleDocumentTypes: [{
|
||||
name: product.nameLong + ' document',
|
||||
role: 'Editor',
|
||||
ostypes: ["TEXT", "utxt", "TUTX", "****"],
|
||||
// {{SQL CARBON EDIT}}
|
||||
extensions: ["csv", "json", "sqlplan", "sql", "xml"],
|
||||
iconFile: 'resources/darwin/code_file.icns'
|
||||
}],
|
||||
darwinBundleDocumentTypes: [
|
||||
// {{SQL CARBON EDIT}} - Remove most document types and replace with ours
|
||||
darwinBundleDocumentType(["csv", "json", "sqlplan", "sql", "xml"], 'resources/darwin/code_file.icns'),
|
||||
],
|
||||
darwinBundleURLTypes: [{
|
||||
role: 'Viewer',
|
||||
name: product.nameLong,
|
||||
urlSchemes: [product.urlProtocol]
|
||||
}],
|
||||
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : void 0,
|
||||
darwinForceDarkModeSupport: true,
|
||||
darwinCredits: darwinCreditsTemplate ? Buffer.from(darwinCreditsTemplate({ commit: commit, date: new Date().toISOString() })) : undefined,
|
||||
linuxExecutableName: product.applicationName,
|
||||
winIcon: 'resources/win32/code.ico',
|
||||
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || void 0,
|
||||
token: process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined,
|
||||
|
||||
// @ts-ignore JSON checking: electronRepository is optional
|
||||
repo: product.electronRepository || void 0
|
||||
repo: product.electronRepository || undefined
|
||||
};
|
||||
|
||||
function getElectron(arch) {
|
||||
@@ -216,11 +219,11 @@ function getElectron(arch) {
|
||||
};
|
||||
}
|
||||
|
||||
gulp.task('clean-electron', util.rimraf('.build/electron'));
|
||||
gulp.task('electron', ['clean-electron'], getElectron(process.arch));
|
||||
gulp.task('electron-ia32', ['clean-electron'], getElectron('ia32'));
|
||||
gulp.task('electron-x64', ['clean-electron'], getElectron('x64'));
|
||||
|
||||
gulp.task(task.define('electron', task.series(util.rimraf('.build/electron'), getElectron(process.arch))));
|
||||
gulp.task(task.define('electron-ia32', task.series(util.rimraf('.build/electron'), getElectron('ia32'))));
|
||||
gulp.task(task.define('electron-x64', task.series(util.rimraf('.build/electron'), getElectron('x64'))));
|
||||
gulp.task(task.define('electron-arm', task.series(util.rimraf('.build/electron'), getElectron('armv7l'))));
|
||||
gulp.task(task.define('electron-arm64', task.series(util.rimraf('.build/electron'), getElectron('arm64'))));
|
||||
|
||||
/**
|
||||
* Compute checksums for some files.
|
||||
@@ -256,116 +259,36 @@ function computeChecksum(filename) {
|
||||
return hash;
|
||||
}
|
||||
|
||||
function packageBuiltInExtensions() {
|
||||
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
|
||||
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
|
||||
const packagePath = path.join(path.dirname(root), element.name + '.vsix');
|
||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
||||
vsce.createVSIX({
|
||||
cwd: element.path,
|
||||
packagePath: packagePath,
|
||||
useYarn: true
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
function packageAzureCoreTask(platform, arch) {
|
||||
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
||||
if (platform === 'darwin') {
|
||||
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', 'azurecore');
|
||||
} else {
|
||||
destination = path.join(destination, 'resources', 'app', 'extensions', 'azurecore');
|
||||
}
|
||||
|
||||
platform = platform || process.platform;
|
||||
|
||||
return () => {
|
||||
const root = path.resolve(path.join(__dirname, '..'));
|
||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => azureExtensions.indexOf(name) > -1);
|
||||
|
||||
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
||||
return ext.fromLocal(extension.path);
|
||||
}));
|
||||
|
||||
let result = localExtensions
|
||||
.pipe(util.skipDirectories())
|
||||
.pipe(util.fixWin32DirectoryPermissions())
|
||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
|
||||
|
||||
return result.pipe(vfs.dest(destination));
|
||||
};
|
||||
}
|
||||
|
||||
function packageTask(platform, arch, opts) {
|
||||
function packageTask(platform, arch, sourceFolderName, destinationFolderName, opts) {
|
||||
opts = opts || {};
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
||||
const destination = path.join(path.dirname(root), destinationFolderName);
|
||||
platform = platform || process.platform;
|
||||
|
||||
return () => {
|
||||
const out = opts.minified ? 'out-vscode-min' : 'out-vscode';
|
||||
const out = sourceFolderName;
|
||||
|
||||
const checksums = computeChecksums(out, [
|
||||
'vs/workbench/workbench.main.js',
|
||||
'vs/workbench/workbench.main.css',
|
||||
'vs/workbench/electron-browser/bootstrap/index.html',
|
||||
'vs/workbench/electron-browser/bootstrap/index.js',
|
||||
'vs/workbench/electron-browser/bootstrap/preload.js'
|
||||
'vs/code/electron-browser/workbench/workbench.html',
|
||||
'vs/code/electron-browser/workbench/workbench.js'
|
||||
]);
|
||||
|
||||
const src = gulp.src(out + '/**', { base: '.' })
|
||||
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }));
|
||||
|
||||
const root = path.resolve(path.join(__dirname, '..'));
|
||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
// {{SQL CARBON EDIT}}
|
||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => azureExtensions.indexOf(name) === -1);
|
||||
|
||||
packageBuiltInExtensions();
|
||||
|
||||
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
||||
return ext.fromLocal(extension.path)
|
||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
}));
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const extensionDepsSrc = [
|
||||
..._.flatten(extensionsProductionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
|
||||
];
|
||||
|
||||
const localExtensionDependencies = gulp.src(extensionDepsSrc, { base: '.', dot: true })
|
||||
.pipe(filter(['**', '!**/package-lock.json']))
|
||||
.pipe(util.cleanNodeModule('account-provider-azure', ['node_modules/date-utils/doc/**', 'node_modules/adal_node/node_modules/**'], undefined))
|
||||
.pipe(util.cleanNodeModule('typescript', ['**/**'], undefined));
|
||||
|
||||
const sources = es.merge(src, localExtensions, localExtensionDependencies)
|
||||
.pipe(rename(function (path) { path.dirname = path.dirname.replace(new RegExp('^' + out), 'out'); }))
|
||||
.pipe(util.setExecutableBit(['**/*.sh']))
|
||||
.pipe(filter(['**', '!**/*.js.map']));
|
||||
|
||||
const root = path.resolve(path.join(__dirname, '..'));
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
ext.packageBuiltInExtensions();
|
||||
|
||||
const sources = es.merge(src, ext.packageExtensionsStream({
|
||||
sourceMappingURLBase: sourceMappingURLBase
|
||||
}));
|
||||
|
||||
let version = packageJson.version;
|
||||
// @ts-ignore JSON checking: quality is optional
|
||||
const quality = product.quality;
|
||||
@@ -376,8 +299,15 @@ function packageTask(platform, arch, opts) {
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const name = (platform === 'darwin') ? 'Azure Data Studio' : product.nameShort;
|
||||
const packageJsonUpdates = { name, version };
|
||||
|
||||
// for linux url handling
|
||||
if (platform === 'linux') {
|
||||
packageJsonUpdates.desktopName = `${product.applicationName}-url-handler.desktop`;
|
||||
}
|
||||
|
||||
const packageJsonStream = gulp.src(['package.json'], { base: '.' })
|
||||
.pipe(json({ name, version }));
|
||||
.pipe(json(packageJsonUpdates));
|
||||
|
||||
const date = new Date().toISOString();
|
||||
const productJsonUpdate = { commit, date, checksums };
|
||||
@@ -389,14 +319,13 @@ function packageTask(platform, arch, opts) {
|
||||
const productJsonStream = gulp.src(['product.json'], { base: '.' })
|
||||
.pipe(json(productJsonUpdate));
|
||||
|
||||
const license = gulp.src(['LICENSES.chromium.html', 'LICENSE.txt', 'ThirdPartyNotices.txt', 'licenses/**'], { base: '.' });
|
||||
|
||||
const watermark = gulp.src(['resources/letterpress.svg', 'resources/letterpress-dark.svg', 'resources/letterpress-hc.svg'], { base: '.' });
|
||||
const license = gulp.src(['LICENSES.chromium.html', product.licenseFileName, 'ThirdPartyNotices.txt', 'licenses/**'], { base: '.', allowEmpty: true });
|
||||
|
||||
// TODO the API should be copied to `out` during compile, not here
|
||||
const api = gulp.src('src/vs/vscode.d.ts').pipe(rename('out/vs/vscode.d.ts'));
|
||||
// {{SQL CARBON EDIT}}
|
||||
const dataApi = gulp.src('src/vs/data.d.ts').pipe(rename('out/sql/data.d.ts'));
|
||||
const dataApi = gulp.src('src/sql/azdata.d.ts').pipe(rename('out/sql/azdata.d.ts'));
|
||||
const sqlopsAPI = gulp.src('src/sql/sqlops.d.ts').pipe(rename('out/sql/sqlops.d.ts'));
|
||||
|
||||
const depsSrc = [
|
||||
..._.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
|
||||
@@ -406,29 +335,7 @@ function packageTask(platform, arch, opts) {
|
||||
|
||||
const deps = gulp.src(depsSrc, { base: '.', dot: true })
|
||||
.pipe(filter(['**', '!**/package-lock.json']))
|
||||
.pipe(util.cleanNodeModule('fsevents', ['binding.gyp', 'fsevents.cc', 'build/**', 'src/**', 'test/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('oniguruma', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/*.js']))
|
||||
.pipe(util.cleanNodeModule('windows-mutex', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('native-keymap', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('native-is-elevated', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('native-watchdog', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('spdlog', ['binding.gyp', 'build/**', 'deps/**', 'src/**', 'test/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('jschardet', ['dist/**']))
|
||||
.pipe(util.cleanNodeModule('windows-foreground-love', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('windows-process-tree', ['binding.gyp', 'build/**', 'src/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('gc-signals', ['binding.gyp', 'build/**', 'src/**', 'deps/**'], ['**/*.node', 'src/index.js']))
|
||||
.pipe(util.cleanNodeModule('keytar', ['binding.gyp', 'build/**', 'src/**', 'script/**', 'node_modules/**'], ['**/*.node']))
|
||||
.pipe(util.cleanNodeModule('node-pty', ['binding.gyp', 'build/**', 'src/**', 'tools/**'], ['build/Release/*.exe', 'build/Release/*.dll', 'build/Release/*.node']))
|
||||
// {{SQL CARBON EDIT}}
|
||||
.pipe(util.cleanNodeModule('chart.js', ['node_modules/**'], undefined))
|
||||
.pipe(util.cleanNodeModule('emmet', ['node_modules/**'], undefined))
|
||||
.pipe(util.cleanNodeModule('pty.js', ['build/**'], ['build/Release/**']))
|
||||
.pipe(util.cleanNodeModule('jquery-ui', ['external/**', 'demos/**'], undefined))
|
||||
.pipe(util.cleanNodeModule('core-js', ['**/**'], undefined))
|
||||
.pipe(util.cleanNodeModule('slickgrid', ['node_modules/**', 'examples/**'], undefined))
|
||||
.pipe(util.cleanNodeModule('nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
|
||||
.pipe(util.cleanNodeModule('vscode-nsfw', ['binding.gyp', 'build/**', 'src/**', 'openpa/**', 'includes/**'], ['**/*.node', '**/*.a']))
|
||||
.pipe(util.cleanNodeModule('vsda', ['binding.gyp', 'README.md', 'build/**', '*.bat', '*.sh', '*.cpp', '*.h'], ['build/Release/vsda.node']))
|
||||
.pipe(util.cleanNodeModules(path.join(__dirname, '.nativeignore')))
|
||||
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*'], 'app/node_modules.asar'));
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
@@ -438,24 +345,33 @@ function packageTask(platform, arch, opts) {
|
||||
'node_modules/slickgrid/**/*.*',
|
||||
'node_modules/underscore/**/*.*',
|
||||
'node_modules/zone.js/**/*.*',
|
||||
'node_modules/chart.js/**/*.*'
|
||||
'node_modules/chart.js/**/*.*',
|
||||
'node_modules/chartjs-color/**/*.*',
|
||||
'node_modules/chartjs-color-string/**/*.*',
|
||||
'node_modules/color-convert/**/*.*',
|
||||
'node_modules/color-name/**/*.*',
|
||||
'node_modules/moment/**/*.*'
|
||||
], { base: '.', dot: true });
|
||||
|
||||
let all = es.merge(
|
||||
packageJsonStream,
|
||||
productJsonStream,
|
||||
license,
|
||||
watermark,
|
||||
api,
|
||||
// {{SQL CARBON EDIT}}
|
||||
copiedModules,
|
||||
dataApi,
|
||||
sqlopsAPI,
|
||||
sources,
|
||||
deps
|
||||
);
|
||||
|
||||
if (platform === 'win32') {
|
||||
all = es.merge(all, gulp.src(['resources/win32/code_file.ico', 'resources/win32/code_70x70.png', 'resources/win32/code_150x150.png'], { base: '.' }));
|
||||
all = es.merge(all, gulp.src([
|
||||
// {{SQL CARBON EDIT}} remove unused icons
|
||||
'resources/win32/code_70x70.png',
|
||||
'resources/win32/code_150x150.png'
|
||||
], { base: '.' }));
|
||||
} else if (platform === 'linux') {
|
||||
all = es.merge(all, gulp.src('resources/linux/code.png', { base: '.' }));
|
||||
} else if (platform === 'darwin') {
|
||||
@@ -471,8 +387,10 @@ function packageTask(platform, arch, opts) {
|
||||
.pipe(electron(_.extend({}, config, { platform, arch, ffmpegChromium: true })))
|
||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
|
||||
|
||||
// result = es.merge(result, gulp.src('resources/completions/**', { base: '.' }));
|
||||
|
||||
if (platform === 'win32') {
|
||||
result = es.merge(result, gulp.src('resources/win32/bin/code.js', { base: 'resources/win32' }));
|
||||
result = es.merge(result, gulp.src('resources/win32/bin/code.js', { base: 'resources/win32', allowEmpty: true }));
|
||||
|
||||
result = es.merge(result, gulp.src('resources/win32/bin/code.cmd', { base: 'resources/win32' })
|
||||
.pipe(replace('@@NAME@@', product.nameShort))
|
||||
@@ -480,47 +398,66 @@ function packageTask(platform, arch, opts) {
|
||||
|
||||
result = es.merge(result, gulp.src('resources/win32/bin/code.sh', { base: 'resources/win32' })
|
||||
.pipe(replace('@@NAME@@', product.nameShort))
|
||||
.pipe(replace('@@PRODNAME@@', product.nameLong))
|
||||
.pipe(replace('@@VERSION@@', version))
|
||||
.pipe(replace('@@COMMIT@@', commit))
|
||||
.pipe(replace('@@APPNAME@@', product.applicationName))
|
||||
.pipe(replace('@@QUALITY@@', quality))
|
||||
.pipe(rename(function (f) { f.basename = product.applicationName; f.extname = ''; })));
|
||||
|
||||
result = es.merge(result, gulp.src('resources/win32/VisualElementsManifest.xml', { base: 'resources/win32' })
|
||||
.pipe(rename(product.nameShort + '.VisualElementsManifest.xml')));
|
||||
} else if (platform === 'linux') {
|
||||
result = es.merge(result, gulp.src('resources/linux/bin/code.sh', { base: '.' })
|
||||
.pipe(replace('@@PRODNAME@@', product.nameLong))
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(rename('bin/' + product.applicationName)));
|
||||
}
|
||||
|
||||
// submit all stats that have been collected
|
||||
// during the build phase
|
||||
if (opts.stats) {
|
||||
result.on('end', () => {
|
||||
const { submitAllStats } = require('./lib/stats');
|
||||
submitAllStats(product, commit).then(() => console.log('Submitted bundle stats!'));
|
||||
});
|
||||
}
|
||||
|
||||
return result.pipe(vfs.dest(destination));
|
||||
};
|
||||
}
|
||||
|
||||
const buildRoot = path.dirname(root);
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
gulp.task('vscode-win32-x64-azurecore', ['optimize-vscode'], packageAzureCoreTask('win32', 'x64'));
|
||||
gulp.task('vscode-darwin-azurecore', ['optimize-vscode'], packageAzureCoreTask('darwin'));
|
||||
gulp.task('vscode-linux-x64-azurecore', ['optimize-vscode'], packageAzureCoreTask('linux', 'x64'));
|
||||
const BUILD_TARGETS = [
|
||||
{ platform: 'win32', arch: 'ia32' },
|
||||
{ platform: 'win32', arch: 'x64' },
|
||||
{ platform: 'darwin', arch: null, opts: { stats: true } },
|
||||
{ platform: 'linux', arch: 'ia32' },
|
||||
{ platform: 'linux', arch: 'x64' },
|
||||
{ platform: 'linux', arch: 'arm' },
|
||||
{ platform: 'linux', arch: 'arm64' },
|
||||
];
|
||||
BUILD_TARGETS.forEach(buildTarget => {
|
||||
const dashed = (str) => (str ? `-${str}` : ``);
|
||||
const platform = buildTarget.platform;
|
||||
const arch = buildTarget.arch;
|
||||
const opts = buildTarget.opts;
|
||||
|
||||
gulp.task('clean-vscode-win32-ia32', util.rimraf(path.join(buildRoot, 'azuredatastudio-win32-ia32')));
|
||||
gulp.task('clean-vscode-win32-x64', util.rimraf(path.join(buildRoot, 'azuredatastudio-win32-x64')));
|
||||
gulp.task('clean-vscode-darwin', util.rimraf(path.join(buildRoot, 'azuredatastudio-darwin')));
|
||||
gulp.task('clean-vscode-linux-ia32', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-ia32')));
|
||||
gulp.task('clean-vscode-linux-x64', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-x64')));
|
||||
gulp.task('clean-vscode-linux-arm', util.rimraf(path.join(buildRoot, 'azuredatastudio-linux-arm')));
|
||||
['', 'min'].forEach(minified => {
|
||||
const sourceFolderName = `out-vscode${dashed(minified)}`;
|
||||
const destinationFolderName = `azuredatastudio${dashed(platform)}${dashed(arch)}`;
|
||||
|
||||
gulp.task('vscode-win32-ia32', ['optimize-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32'));
|
||||
gulp.task('vscode-win32-x64', ['vscode-win32-x64-azurecore', 'optimize-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64'));
|
||||
gulp.task('vscode-darwin', ['vscode-darwin-azurecore', 'optimize-vscode', 'clean-vscode-darwin'], packageTask('darwin'));
|
||||
gulp.task('vscode-linux-ia32', ['optimize-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32'));
|
||||
gulp.task('vscode-linux-x64', ['vscode-linux-x64-azurecore', 'optimize-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64'));
|
||||
gulp.task('vscode-linux-arm', ['optimize-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm'));
|
||||
|
||||
gulp.task('vscode-win32-ia32-min', ['minify-vscode', 'clean-vscode-win32-ia32'], packageTask('win32', 'ia32', { minified: true }));
|
||||
gulp.task('vscode-win32-x64-min', ['minify-vscode', 'clean-vscode-win32-x64'], packageTask('win32', 'x64', { minified: true }));
|
||||
gulp.task('vscode-darwin-min', ['minify-vscode', 'clean-vscode-darwin'], packageTask('darwin', null, { minified: true }));
|
||||
gulp.task('vscode-linux-ia32-min', ['minify-vscode', 'clean-vscode-linux-ia32'], packageTask('linux', 'ia32', { minified: true }));
|
||||
gulp.task('vscode-linux-x64-min', ['minify-vscode', 'clean-vscode-linux-x64'], packageTask('linux', 'x64', { minified: true }));
|
||||
gulp.task('vscode-linux-arm-min', ['minify-vscode', 'clean-vscode-linux-arm'], packageTask('linux', 'arm', { minified: true }));
|
||||
const vscodeTask = task.define(`vscode${dashed(platform)}${dashed(arch)}${dashed(minified)}`, task.series(
|
||||
task.parallel(
|
||||
minified ? minifyVSCodeTask : optimizeVSCodeTask,
|
||||
util.rimraf(path.join(buildRoot, destinationFolderName))
|
||||
),
|
||||
packageTask(platform, arch, sourceFolderName, destinationFolderName, opts)
|
||||
));
|
||||
gulp.task(vscodeTask);
|
||||
});
|
||||
});
|
||||
|
||||
// Transifex Localizations
|
||||
|
||||
@@ -543,7 +480,11 @@ const apiHostname = process.env.TRANSIFEX_API_URL;
|
||||
const apiName = process.env.TRANSIFEX_API_NAME;
|
||||
const apiToken = process.env.TRANSIFEX_API_TOKEN;
|
||||
|
||||
gulp.task('vscode-translations-push', ['optimize-vscode'], function () {
|
||||
gulp.task(task.define(
|
||||
'vscode-translations-push',
|
||||
task.series(
|
||||
optimizeVSCodeTask,
|
||||
function () {
|
||||
const pathToMetadata = './out-vscode/nls.metadata.json';
|
||||
const pathToExtensions = './extensions/*';
|
||||
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
|
||||
@@ -554,9 +495,15 @@ gulp.task('vscode-translations-push', ['optimize-vscode'], function () {
|
||||
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
|
||||
).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
|
||||
).pipe(i18n.pushXlfFiles(apiHostname, apiName, apiToken));
|
||||
});
|
||||
}
|
||||
)
|
||||
));
|
||||
|
||||
gulp.task('vscode-translations-push-test', ['optimize-vscode'], function () {
|
||||
gulp.task(task.define(
|
||||
'vscode-translations-export',
|
||||
task.series(
|
||||
optimizeVSCodeTask,
|
||||
function () {
|
||||
const pathToMetadata = './out-vscode/nls.metadata.json';
|
||||
const pathToExtensions = './extensions/*';
|
||||
const pathToSetup = 'build/win32/**/{Default.isl,messages.en.isl}';
|
||||
@@ -565,46 +512,50 @@ gulp.task('vscode-translations-push-test', ['optimize-vscode'], function () {
|
||||
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
|
||||
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
|
||||
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
|
||||
// {{SQL CARBON EDIT}}
|
||||
// disable since function makes calls to VS Code Transifex API
|
||||
// ).pipe(i18n.findObsoleteResources(apiHostname, apiName, apiToken)
|
||||
).pipe(vfs.dest('../vscode-transifex-input'));
|
||||
});
|
||||
).pipe(vfs.dest('../vscode-translations-export'));
|
||||
}
|
||||
)
|
||||
));
|
||||
|
||||
gulp.task('vscode-translations-pull', function () {
|
||||
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
|
||||
i18n.pullCoreAndExtensionsXlfFiles(apiHostname, apiName, apiToken, language).pipe(vfs.dest(`../vscode-localization/${language.id}/build`));
|
||||
|
||||
return es.merge([...i18n.defaultLanguages, ...i18n.extraLanguages].map(language => {
|
||||
let includeDefault = !!innoSetupConfig[language.id].defaultInfo;
|
||||
i18n.pullSetupXlfFiles(apiHostname, apiName, apiToken, language, includeDefault).pipe(vfs.dest(`../vscode-localization/${language.id}/setup`));
|
||||
});
|
||||
return i18n.pullSetupXlfFiles(apiHostname, apiName, apiToken, language, includeDefault).pipe(vfs.dest(`../vscode-translations-import/${language.id}/setup`));
|
||||
}));
|
||||
});
|
||||
|
||||
gulp.task('vscode-translations-import', function () {
|
||||
// {{SQL CARBON EDIT}} - Replace function body with our own
|
||||
return new Promise(function(resolve) {
|
||||
[...i18n.defaultLanguages, ...i18n.extraLanguages].forEach(language => {
|
||||
gulp.src(`../vscode-localization/${language.id}/build/*/*.xlf`)
|
||||
let languageId = language.translationId ? language.translationId : language.id;
|
||||
gulp.src(`resources/xlf/${languageId}/**/*.xlf`)
|
||||
.pipe(i18n.prepareI18nFiles())
|
||||
.pipe(vfs.dest(`./i18n/${language.folderName}`));
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
// gulp.src(`../vscode-localization/${language.id}/setup/*/*.xlf`)
|
||||
// .pipe(i18n.prepareIslFiles(language, innoSetupConfig[language.id]))
|
||||
// .pipe(vfs.dest(`./build/win32/i18n`));
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
});
|
||||
|
||||
// Sourcemaps
|
||||
|
||||
gulp.task('upload-vscode-sourcemaps', ['minify-vscode'], () => {
|
||||
gulp.task('upload-vscode-sourcemaps', () => {
|
||||
const vs = gulp.src('out-vscode-min/**/*.map', { base: 'out-vscode-min' })
|
||||
.pipe(es.mapSync(f => {
|
||||
f.path = `${f.base}/core/${f.relative}`;
|
||||
return f;
|
||||
}));
|
||||
|
||||
const extensions = gulp.src('extensions/**/out/**/*.map', { base: '.' });
|
||||
const extensionsOut = gulp.src('extensions/**/out/**/*.map', { base: '.' });
|
||||
const extensionsDist = gulp.src('extensions/**/dist/**/*.map', { base: '.' });
|
||||
|
||||
return es.merge(vs, extensions)
|
||||
return es.merge(vs, extensionsOut, extensionsDist)
|
||||
.pipe(es.through(function (data) {
|
||||
// debug
|
||||
console.log('Uploading Sourcemap', data.relative);
|
||||
this.emit('data', data);
|
||||
}))
|
||||
.pipe(azure.upload({
|
||||
account: process.env.AZURE_STORAGE_ACCOUNT,
|
||||
key: process.env.AZURE_STORAGE_ACCESS_KEY,
|
||||
@@ -613,57 +564,8 @@ gulp.task('upload-vscode-sourcemaps', ['minify-vscode'], () => {
|
||||
}));
|
||||
});
|
||||
|
||||
const allConfigDetailsPath = path.join(os.tmpdir(), 'configuration.json');
|
||||
gulp.task('upload-vscode-configuration', ['generate-vscode-configuration'], () => {
|
||||
if (!shouldSetupSettingsSearch()) {
|
||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||
console.log(`Only runs on master and release branches, not ${branch}`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!fs.existsSync(allConfigDetailsPath)) {
|
||||
throw new Error(`configuration file at ${allConfigDetailsPath} does not exist`);
|
||||
}
|
||||
|
||||
const settingsSearchBuildId = getSettingsSearchBuildId(packageJson);
|
||||
if (!settingsSearchBuildId) {
|
||||
throw new Error('Failed to compute build number');
|
||||
}
|
||||
|
||||
return gulp.src(allConfigDetailsPath)
|
||||
.pipe(azure.upload({
|
||||
account: process.env.AZURE_STORAGE_ACCOUNT,
|
||||
key: process.env.AZURE_STORAGE_ACCESS_KEY,
|
||||
container: 'configuration',
|
||||
prefix: `${settingsSearchBuildId}/${commit}/`
|
||||
}));
|
||||
});
|
||||
|
||||
function shouldSetupSettingsSearch() {
|
||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||
return branch && (/\/master$/.test(branch) || branch.indexOf('/release/') >= 0);
|
||||
}
|
||||
|
||||
function getSettingsSearchBuildId(packageJson) {
|
||||
try {
|
||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||
const branchId = branch.indexOf('/release/') >= 0 ? 0 :
|
||||
/\/master$/.test(branch) ? 1 :
|
||||
2; // Some unexpected branch
|
||||
|
||||
const out = cp.execSync(`git rev-list HEAD --count`);
|
||||
const count = parseInt(out.toString());
|
||||
|
||||
// <version number><commit count><branchId (avoid unlikely conflicts)>
|
||||
// 1.25.1, 1,234,567 commits, master = 1250112345671
|
||||
return util.versionStringToNumber(packageJson.version) * 1e8 + count * 10 + branchId;
|
||||
} catch (e) {
|
||||
throw new Error('Could not determine build number: ' + e.toString());
|
||||
}
|
||||
}
|
||||
|
||||
// This task is only run for the MacOS build
|
||||
gulp.task('generate-vscode-configuration', () => {
|
||||
const generateVSCodeConfigurationTask = task.define('generate-vscode-configuration', () => {
|
||||
return new Promise((resolve, reject) => {
|
||||
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
|
||||
if (!buildDir) {
|
||||
@@ -700,6 +602,61 @@ gulp.task('generate-vscode-configuration', () => {
|
||||
});
|
||||
});
|
||||
|
||||
const allConfigDetailsPath = path.join(os.tmpdir(), 'configuration.json');
|
||||
gulp.task(task.define(
|
||||
'upload-vscode-configuration',
|
||||
task.series(
|
||||
generateVSCodeConfigurationTask,
|
||||
() => {
|
||||
if (!shouldSetupSettingsSearch()) {
|
||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||
console.log(`Only runs on master and release branches, not ${branch}`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!fs.existsSync(allConfigDetailsPath)) {
|
||||
throw new Error(`configuration file at ${allConfigDetailsPath} does not exist`);
|
||||
}
|
||||
|
||||
const settingsSearchBuildId = getSettingsSearchBuildId(packageJson);
|
||||
if (!settingsSearchBuildId) {
|
||||
throw new Error('Failed to compute build number');
|
||||
}
|
||||
|
||||
return gulp.src(allConfigDetailsPath)
|
||||
.pipe(azure.upload({
|
||||
account: process.env.AZURE_STORAGE_ACCOUNT,
|
||||
key: process.env.AZURE_STORAGE_ACCESS_KEY,
|
||||
container: 'configuration',
|
||||
prefix: `${settingsSearchBuildId}/${commit}/`
|
||||
}));
|
||||
}
|
||||
)
|
||||
));
|
||||
|
||||
function shouldSetupSettingsSearch() {
|
||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||
return branch && (/\/master$/.test(branch) || branch.indexOf('/release/') >= 0);
|
||||
}
|
||||
|
||||
function getSettingsSearchBuildId(packageJson) {
|
||||
try {
|
||||
const branch = process.env.BUILD_SOURCEBRANCH;
|
||||
const branchId = branch.indexOf('/release/') >= 0 ? 0 :
|
||||
/\/master$/.test(branch) ? 1 :
|
||||
2; // Some unexpected branch
|
||||
|
||||
const out = cp.execSync(`git rev-list HEAD --count`);
|
||||
const count = parseInt(out.toString());
|
||||
|
||||
// <version number><commit count><branchId (avoid unlikely conflicts)>
|
||||
// 1.25.1, 1,234,567 commits, master = 1250112345671
|
||||
return util.versionStringToNumber(packageJson.version) * 1e8 + count * 10 + branchId;
|
||||
} catch (e) {
|
||||
throw new Error('Could not determine build number: ' + e.toString());
|
||||
}
|
||||
}
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
// Install service locally before building carbon
|
||||
|
||||
@@ -725,3 +682,25 @@ gulp.task('install-sqltoolsservice', () => {
|
||||
return installService();
|
||||
});
|
||||
|
||||
function installSsmsMin() {
|
||||
const config = require('../extensions/admin-tool-ext-win/src/config.json');
|
||||
return platformInfo.getCurrent().then(p => {
|
||||
const runtime = p.runtimeId;
|
||||
// fix path since it won't be correct
|
||||
config.installDirectory = path.join(__dirname, '..', 'extensions', 'admin-tool-ext-win', config.installDirectory);
|
||||
var installer = new serviceDownloader(config);
|
||||
const serviceInstallFolder = installer.getInstallDirectory(runtime);
|
||||
const serviceCleanupFolder = path.join(serviceInstallFolder, '..');
|
||||
console.log('Cleaning up the install folder: ' + serviceCleanupFolder);
|
||||
return del(serviceCleanupFolder + '/*').then(() => {
|
||||
console.log('Installing the service. Install folder: ' + serviceInstallFolder);
|
||||
return installer.installService(runtime);
|
||||
}, delError => {
|
||||
console.log('failed to delete the install folder error: ' + delError);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
gulp.task('install-ssmsmin', () => {
|
||||
return installSsmsMin();
|
||||
});
|
||||
|
||||
@@ -12,14 +12,18 @@ const shell = require('gulp-shell');
|
||||
const es = require('event-stream');
|
||||
const vfs = require('vinyl-fs');
|
||||
const util = require('./lib/util');
|
||||
const task = require('./lib/task');
|
||||
const packageJson = require('../package.json');
|
||||
const product = require('../product.json');
|
||||
const rpmDependencies = require('../resources/linux/rpm/dependencies.json');
|
||||
const path = require('path');
|
||||
const root = path.dirname(__dirname);
|
||||
const commit = util.getVersion(root);
|
||||
|
||||
const linuxPackageRevision = Math.floor(new Date().getTime() / 1000);
|
||||
|
||||
function getDebPackageArch(arch) {
|
||||
return { x64: 'amd64', ia32: 'i386', arm: 'armhf' }[arch];
|
||||
return { x64: 'amd64', ia32: 'i386', arm: 'armhf', arm64: "arm64" }[arch];
|
||||
}
|
||||
|
||||
function prepareDebPackage(arch) {
|
||||
@@ -30,11 +34,17 @@ function prepareDebPackage(arch) {
|
||||
|
||||
return function () {
|
||||
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
|
||||
.pipe(rename('usr/share/applications/' + product.applicationName + '.desktop'));
|
||||
|
||||
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
|
||||
.pipe(rename('usr/share/applications/' + product.applicationName + '-url-handler.desktop'));
|
||||
|
||||
const desktops = es.merge(desktop, desktopUrlHandler)
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(replace('@@ICON@@', product.applicationName))
|
||||
.pipe(rename('usr/share/applications/' + product.applicationName + '.desktop'));
|
||||
.pipe(replace('@@ICON@@', product.linuxIconName))
|
||||
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
|
||||
|
||||
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
@@ -43,7 +53,13 @@ function prepareDebPackage(arch) {
|
||||
.pipe(rename('usr/share/appdata/' + product.applicationName + '.appdata.xml'));
|
||||
|
||||
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
||||
.pipe(rename('usr/share/pixmaps/' + product.applicationName + '.png'));
|
||||
.pipe(rename('usr/share/pixmaps/' + product.linuxIconName + '.png'));
|
||||
|
||||
// const bash_completion = gulp.src('resources/completions/bash/code')
|
||||
// .pipe(rename('usr/share/bash-completion/completions/code'));
|
||||
|
||||
// const zsh_completion = gulp.src('resources/completions/zsh/_code')
|
||||
// .pipe(rename('usr/share/zsh/vendor-completions/_code'));
|
||||
|
||||
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
|
||||
.pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; }));
|
||||
@@ -79,7 +95,7 @@ function prepareDebPackage(arch) {
|
||||
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
|
||||
.pipe(rename('DEBIAN/postinst'));
|
||||
|
||||
const all = es.merge(control, postinst, postrm, prerm, desktop, appdata, icon, code);
|
||||
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, icon, /* bash_completion, zsh_completion, */ code);
|
||||
|
||||
return all.pipe(vfs.dest(destination));
|
||||
};
|
||||
@@ -99,7 +115,7 @@ function getRpmBuildPath(rpmArch) {
|
||||
}
|
||||
|
||||
function getRpmPackageArch(arch) {
|
||||
return { x64: 'x86_64', ia32: 'i386', arm: 'armhf' }[arch];
|
||||
return { x64: 'x86_64', ia32: 'i386', arm: 'armhf', arm64: "arm64" }[arch];
|
||||
}
|
||||
|
||||
function prepareRpmPackage(arch) {
|
||||
@@ -109,11 +125,17 @@ function prepareRpmPackage(arch) {
|
||||
|
||||
return function () {
|
||||
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
|
||||
.pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '.desktop'));
|
||||
|
||||
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
|
||||
.pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '-url-handler.desktop'));
|
||||
|
||||
const desktops = es.merge(desktop, desktopUrlHandler)
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(replace('@@ICON@@', product.applicationName))
|
||||
.pipe(rename('BUILD/usr/share/applications/' + product.applicationName + '.desktop'));
|
||||
.pipe(replace('@@ICON@@', product.linuxIconName))
|
||||
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
|
||||
|
||||
const appdata = gulp.src('resources/linux/code.appdata.xml', { base: '.' })
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
@@ -122,7 +144,13 @@ function prepareRpmPackage(arch) {
|
||||
.pipe(rename('usr/share/appdata/' + product.applicationName + '.appdata.xml'));
|
||||
|
||||
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
||||
.pipe(rename('BUILD/usr/share/pixmaps/' + product.applicationName + '.png'));
|
||||
.pipe(rename('BUILD/usr/share/pixmaps/' + product.linuxIconName + '.png'));
|
||||
|
||||
// const bash_completion = gulp.src('resources/completions/bash/code')
|
||||
// .pipe(rename('BUILD/usr/share/bash-completion/completions/code'));
|
||||
|
||||
// const zsh_completion = gulp.src('resources/completions/zsh/_code')
|
||||
// .pipe(rename('BUILD/usr/share/zsh/site-functions/_code'));
|
||||
|
||||
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
|
||||
.pipe(rename(function (p) { p.dirname = 'BUILD/usr/share/' + product.applicationName + '/' + p.dirname; }));
|
||||
@@ -130,6 +158,7 @@ function prepareRpmPackage(arch) {
|
||||
const spec = gulp.src('resources/linux/rpm/code.spec.template', { base: '.' })
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
.pipe(replace('@@ICON@@', product.linuxIconName))
|
||||
.pipe(replace('@@VERSION@@', packageJson.version))
|
||||
.pipe(replace('@@RELEASE@@', linuxPackageRevision))
|
||||
.pipe(replace('@@ARCHITECTURE@@', rpmArch))
|
||||
@@ -144,7 +173,7 @@ function prepareRpmPackage(arch) {
|
||||
const specIcon = gulp.src('resources/linux/rpm/code.xpm', { base: '.' })
|
||||
.pipe(rename('SOURCES/' + product.applicationName + '.xpm'));
|
||||
|
||||
const all = es.merge(code, desktop, appdata, icon, spec, specIcon);
|
||||
const all = es.merge(code, desktops, appdata, icon, /* bash_completion, zsh_completion, */ spec, specIcon);
|
||||
|
||||
return all.pipe(vfs.dest(getRpmBuildPath(rpmArch)));
|
||||
};
|
||||
@@ -162,37 +191,45 @@ function buildRpmPackage(arch) {
|
||||
'cp "' + rpmOut + '/$(ls ' + rpmOut + ')" ' + destination + '/'
|
||||
]);
|
||||
}
|
||||
|
||||
function getSnapBuildPath(arch) {
|
||||
return `.build/linux/snap/${arch}/${product.applicationName}-${arch}`;
|
||||
}
|
||||
|
||||
function prepareSnapPackage(arch) {
|
||||
const binaryDir = '../VSCode-linux-' + arch;
|
||||
// {{SQL CARBON EDIT}}
|
||||
const binaryDir = '../azuredatastudio-linux-' + arch;
|
||||
const destination = getSnapBuildPath(arch);
|
||||
|
||||
return function () {
|
||||
const desktop = gulp.src('resources/linux/code.desktop', { base: '.' })
|
||||
.pipe(rename(`usr/share/applications/${product.applicationName}.desktop`));
|
||||
|
||||
const desktopUrlHandler = gulp.src('resources/linux/code-url-handler.desktop', { base: '.' })
|
||||
.pipe(rename(`usr/share/applications/${product.applicationName}-url-handler.desktop`));
|
||||
|
||||
const desktops = es.merge(desktop, desktopUrlHandler)
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.applicationName}.png`))
|
||||
.pipe(rename(`usr/share/applications/${product.applicationName}.desktop`));
|
||||
.pipe(replace('@@ICON@@', `/usr/share/pixmaps/${product.linuxIconName}.png`))
|
||||
.pipe(replace('@@URLPROTOCOL@@', product.urlProtocol));
|
||||
|
||||
const icon = gulp.src('resources/linux/code.png', { base: '.' })
|
||||
.pipe(rename(`usr/share/pixmaps/${product.applicationName}.png`));
|
||||
.pipe(rename(`usr/share/pixmaps/${product.linuxIconName}.png`));
|
||||
|
||||
const code = gulp.src(binaryDir + '/**/*', { base: binaryDir })
|
||||
.pipe(rename(function (p) { p.dirname = 'usr/share/' + product.applicationName + '/' + p.dirname; }));
|
||||
.pipe(rename(function (p) { p.dirname = `usr/share/${product.applicationName}/${p.dirname}`; }));
|
||||
|
||||
const snapcraft = gulp.src('resources/linux/snap/snapcraft.yaml', { base: '.' })
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(replace('@@VERSION@@', packageJson.version))
|
||||
.pipe(replace('@@VERSION@@', commit.substr(0, 8)))
|
||||
.pipe(rename('snap/snapcraft.yaml'));
|
||||
|
||||
const electronLaunch = gulp.src('resources/linux/snap/electron-launch', { base: '.' })
|
||||
.pipe(rename('electron-launch'));
|
||||
|
||||
const all = es.merge(desktop, icon, code, snapcraft, electronLaunch);
|
||||
const all = es.merge(desktops, icon, code, snapcraft, electronLaunch);
|
||||
|
||||
return all.pipe(vfs.dest(destination));
|
||||
};
|
||||
@@ -200,117 +237,39 @@ function prepareSnapPackage(arch) {
|
||||
|
||||
function buildSnapPackage(arch) {
|
||||
const snapBuildPath = getSnapBuildPath(arch);
|
||||
const snapFilename = `${product.applicationName}-${packageJson.version}-${linuxPackageRevision}-${arch}.snap`;
|
||||
return shell.task([
|
||||
`chmod +x ${snapBuildPath}/electron-launch`,
|
||||
`cd ${snapBuildPath} && snapcraft snap --output ../${snapFilename}`
|
||||
]);
|
||||
return shell.task(`cd ${snapBuildPath} && snapcraft build`);
|
||||
}
|
||||
|
||||
function getFlatpakArch(arch) {
|
||||
return { x64: 'x86_64', ia32: 'i386', arm: 'arm' }[arch];
|
||||
}
|
||||
|
||||
function prepareFlatpak(arch) {
|
||||
// {{SQL CARBON EDIT}}
|
||||
const binaryDir = '../azuredatastudio-linux-' + arch;
|
||||
const flatpakArch = getFlatpakArch(arch);
|
||||
const destination = '.build/linux/flatpak/' + flatpakArch;
|
||||
|
||||
return function () {
|
||||
// This is not imported in the global scope to avoid requiring ImageMagick
|
||||
// (or GraphicsMagick) when not building building Flatpak bundles.
|
||||
const imgResize = require('gulp-image-resize');
|
||||
|
||||
const all = [16, 24, 32, 48, 64, 128, 192, 256, 512].map(function (size) {
|
||||
return gulp.src('resources/linux/code.png', { base: '.' })
|
||||
.pipe(imgResize({ width: size, height: size, format: "png", noProfile: true }))
|
||||
.pipe(rename('share/icons/hicolor/' + size + 'x' + size + '/apps/' + flatpakManifest.appId + '.png'));
|
||||
});
|
||||
|
||||
all.push(gulp.src('resources/linux/code.desktop', { base: '.' })
|
||||
.pipe(replace('Exec=/usr/share/@@NAME@@/@@NAME@@', 'Exec=' + product.applicationName))
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
.pipe(replace('@@NAME_SHORT@@', product.nameShort))
|
||||
.pipe(replace('@@NAME@@', product.applicationName))
|
||||
.pipe(rename('share/applications/' + flatpakManifest.appId + '.desktop')));
|
||||
|
||||
all.push(gulp.src('resources/linux/code.appdata.xml', { base: '.' })
|
||||
.pipe(replace('@@NAME_LONG@@', product.nameLong))
|
||||
.pipe(replace('@@NAME@@', flatpakManifest.appId))
|
||||
.pipe(replace('@@LICENSE@@', product.licenseName))
|
||||
.pipe(rename('share/appdata/' + flatpakManifest.appId + '.appdata.xml')));
|
||||
|
||||
all.push(gulp.src(binaryDir + '/**/*', { base: binaryDir })
|
||||
.pipe(rename(function (p) {
|
||||
p.dirname = 'share/' + product.applicationName + '/' + p.dirname;
|
||||
})));
|
||||
|
||||
return es.merge(all).pipe(vfs.dest(destination));
|
||||
};
|
||||
}
|
||||
|
||||
function buildFlatpak(arch) {
|
||||
const flatpakArch = getFlatpakArch(arch);
|
||||
const manifest = {};
|
||||
for (var k in flatpakManifest) {
|
||||
manifest[k] = flatpakManifest[k];
|
||||
}
|
||||
manifest.files = [
|
||||
['.build/linux/flatpak/' + flatpakArch, '/'],
|
||||
const BUILD_TARGETS = [
|
||||
{ arch: 'ia32' },
|
||||
{ arch: 'x64' },
|
||||
{ arch: 'arm' },
|
||||
{ arch: 'arm64' },
|
||||
];
|
||||
const buildOptions = {
|
||||
arch: flatpakArch,
|
||||
subject: product.nameLong + ' ' + packageJson.version + '.' + linuxPackageRevision,
|
||||
};
|
||||
// If requested, use the configured path for the OSTree repository.
|
||||
if (process.env.FLATPAK_REPO) {
|
||||
buildOptions.repoDir = process.env.FLATPAK_REPO;
|
||||
} else {
|
||||
buildOptions.bundlePath = manifest.appId + '-' + flatpakArch + '.flatpak';
|
||||
}
|
||||
// Setup PGP signing if requested.
|
||||
if (process.env.GPG_KEY_ID !== undefined) {
|
||||
buildOptions.gpgSign = process.env.GPG_KEY_ID;
|
||||
if (process.env.GPG_HOMEDIR) {
|
||||
buildOptions.gpgHomedir = process.env.GPG_HOME_DIR;
|
||||
}
|
||||
}
|
||||
return function (cb) {
|
||||
require('flatpak-bundler').bundle(manifest, buildOptions, cb);
|
||||
};
|
||||
|
||||
BUILD_TARGETS.forEach((buildTarget) => {
|
||||
const arch = buildTarget.arch;
|
||||
|
||||
{
|
||||
const debArch = getDebPackageArch(arch);
|
||||
const prepareDebTask = task.define(`vscode-linux-${arch}-prepare-deb`, task.series(util.rimraf(`.build/linux/deb/${debArch}`), prepareDebPackage(arch)));
|
||||
// gulp.task(prepareDebTask);
|
||||
const buildDebTask = task.define(`vscode-linux-${arch}-build-deb`, task.series(prepareDebTask, buildDebPackage(arch)));
|
||||
gulp.task(buildDebTask);
|
||||
}
|
||||
|
||||
gulp.task('clean-vscode-linux-ia32-deb', util.rimraf('.build/linux/deb/i386'));
|
||||
gulp.task('clean-vscode-linux-x64-deb', util.rimraf('.build/linux/deb/amd64'));
|
||||
gulp.task('clean-vscode-linux-arm-deb', util.rimraf('.build/linux/deb/armhf'));
|
||||
gulp.task('clean-vscode-linux-ia32-rpm', util.rimraf('.build/linux/rpm/i386'));
|
||||
gulp.task('clean-vscode-linux-x64-rpm', util.rimraf('.build/linux/rpm/x86_64'));
|
||||
gulp.task('clean-vscode-linux-arm-rpm', util.rimraf('.build/linux/rpm/armhf'));
|
||||
gulp.task('clean-vscode-linux-ia32-snap', util.rimraf('.build/linux/snap/x64'));
|
||||
gulp.task('clean-vscode-linux-x64-snap', util.rimraf('.build/linux/snap/x64'));
|
||||
gulp.task('clean-vscode-linux-arm-snap', util.rimraf('.build/linux/snap/x64'));
|
||||
gulp.task('clean-vscode-linux-ia32-flatpak', util.rimraf('.build/linux/flatpak/i386'));
|
||||
gulp.task('clean-vscode-linux-x64-flatpak', util.rimraf('.build/linux/flatpak/x86_64'));
|
||||
gulp.task('clean-vscode-linux-arm-flatpak', util.rimraf('.build/linux/flatpak/arm'));
|
||||
{
|
||||
const rpmArch = getRpmPackageArch(arch);
|
||||
const prepareRpmTask = task.define(`vscode-linux-${arch}-prepare-rpm`, task.series(util.rimraf(`.build/linux/rpm/${rpmArch}`), prepareRpmPackage(arch)));
|
||||
// gulp.task(prepareRpmTask);
|
||||
const buildRpmTask = task.define(`vscode-linux-${arch}-build-rpm`, task.series(prepareRpmTask, buildRpmPackage(arch)));
|
||||
gulp.task(buildRpmTask);
|
||||
}
|
||||
|
||||
gulp.task('vscode-linux-ia32-prepare-deb', ['clean-vscode-linux-ia32-deb'], prepareDebPackage('ia32'));
|
||||
gulp.task('vscode-linux-x64-prepare-deb', ['clean-vscode-linux-x64-deb'], prepareDebPackage('x64'));
|
||||
gulp.task('vscode-linux-arm-prepare-deb', ['clean-vscode-linux-arm-deb'], prepareDebPackage('arm'));
|
||||
gulp.task('vscode-linux-ia32-build-deb', ['vscode-linux-ia32-prepare-deb'], buildDebPackage('ia32'));
|
||||
gulp.task('vscode-linux-x64-build-deb', ['vscode-linux-x64-prepare-deb'], buildDebPackage('x64'));
|
||||
gulp.task('vscode-linux-arm-build-deb', ['vscode-linux-arm-prepare-deb'], buildDebPackage('arm'));
|
||||
|
||||
gulp.task('vscode-linux-ia32-prepare-rpm', ['clean-vscode-linux-ia32-rpm'], prepareRpmPackage('ia32'));
|
||||
gulp.task('vscode-linux-x64-prepare-rpm', ['clean-vscode-linux-x64-rpm'], prepareRpmPackage('x64'));
|
||||
gulp.task('vscode-linux-arm-prepare-rpm', ['clean-vscode-linux-arm-rpm'], prepareRpmPackage('arm'));
|
||||
gulp.task('vscode-linux-ia32-build-rpm', ['vscode-linux-ia32-prepare-rpm'], buildRpmPackage('ia32'));
|
||||
gulp.task('vscode-linux-x64-build-rpm', ['vscode-linux-x64-prepare-rpm'], buildRpmPackage('x64'));
|
||||
gulp.task('vscode-linux-arm-build-rpm', ['vscode-linux-arm-prepare-rpm'], buildRpmPackage('arm'));
|
||||
|
||||
gulp.task('vscode-linux-ia32-prepare-snap', ['clean-vscode-linux-ia32-snap'], prepareSnapPackage('ia32'));
|
||||
gulp.task('vscode-linux-x64-prepare-snap', ['clean-vscode-linux-x64-snap'], prepareSnapPackage('x64'));
|
||||
gulp.task('vscode-linux-arm-prepare-snap', ['clean-vscode-linux-arm-snap'], prepareSnapPackage('arm'));
|
||||
gulp.task('vscode-linux-ia32-build-snap', ['vscode-linux-ia32-prepare-snap'], buildSnapPackage('ia32'));
|
||||
gulp.task('vscode-linux-x64-build-snap', ['vscode-linux-x64-prepare-snap'], buildSnapPackage('x64'));
|
||||
gulp.task('vscode-linux-arm-build-snap', ['vscode-linux-arm-prepare-snap'], buildSnapPackage('arm'));
|
||||
{
|
||||
const prepareSnapTask = task.define(`vscode-linux-${arch}-prepare-snap`, task.series(util.rimraf(`.build/linux/snap/${arch}`), prepareSnapPackage(arch)));
|
||||
gulp.task(prepareSnapTask);
|
||||
const buildSnapTask = task.define(`vscode-linux-${arch}-build-snap`, task.series(prepareSnapTask, buildSnapPackage(arch)));
|
||||
gulp.task(buildSnapTask);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -12,9 +12,11 @@ const assert = require('assert');
|
||||
const cp = require('child_process');
|
||||
const _7z = require('7zip')['7z'];
|
||||
const util = require('./lib/util');
|
||||
const task = require('./lib/task');
|
||||
const pkg = require('../package.json');
|
||||
const product = require('../product.json');
|
||||
const vfs = require('vinyl-fs');
|
||||
const rcedit = require('rcedit');
|
||||
const mkdirp = require('mkdirp');
|
||||
|
||||
const repoPath = path.dirname(__dirname);
|
||||
@@ -25,18 +27,21 @@ const zipPath = arch => path.join(zipDir(arch), `VSCode-win32-${arch}.zip`);
|
||||
const setupDir = (arch, target) => path.join(repoPath, '.build', `win32-${arch}`, `${target}-setup`);
|
||||
const issPath = path.join(__dirname, 'win32', 'code.iss');
|
||||
const innoSetupPath = path.join(path.dirname(path.dirname(require.resolve('innosetup-compiler'))), 'bin', 'ISCC.exe');
|
||||
const signPS1 = path.join(repoPath, 'build', 'tfs', 'win32', 'sign.ps1');
|
||||
// const signPS1 = path.join(repoPath, 'build', 'azure-pipelines', 'win32', 'sign.ps1');
|
||||
|
||||
function packageInnoSetup(iss, options, cb) {
|
||||
options = options || {};
|
||||
|
||||
const definitions = options.definitions || {};
|
||||
const debug = process.argv.some(arg => arg === '--debug-inno');
|
||||
|
||||
if (debug) {
|
||||
if (process.argv.some(arg => arg === '--debug-inno')) {
|
||||
definitions['Debug'] = 'true';
|
||||
}
|
||||
|
||||
if (process.argv.some(arg => arg === '--sign')) {
|
||||
definitions['Sign'] = 'true';
|
||||
}
|
||||
|
||||
const keys = Object.keys(definitions);
|
||||
|
||||
keys.forEach(key => assert(typeof definitions[key] === 'string', `Missing value for '${key}' in Inno Setup package step`));
|
||||
@@ -103,8 +108,8 @@ function buildWin32Setup(arch, target) {
|
||||
}
|
||||
|
||||
function defineWin32SetupTasks(arch, target) {
|
||||
gulp.task(`clean-vscode-win32-${arch}-${target}-setup`, util.rimraf(setupDir(arch, target)));
|
||||
gulp.task(`vscode-win32-${arch}-${target}-setup`, [`clean-vscode-win32-${arch}-${target}-setup`], buildWin32Setup(arch, target));
|
||||
const cleanTask = util.rimraf(setupDir(arch, target));
|
||||
gulp.task(task.define(`vscode-win32-${arch}-${target}-setup`, task.series(cleanTask, buildWin32Setup(arch, target))));
|
||||
}
|
||||
|
||||
defineWin32SetupTasks('ia32', 'system');
|
||||
@@ -122,11 +127,8 @@ function archiveWin32Setup(arch) {
|
||||
};
|
||||
}
|
||||
|
||||
gulp.task('clean-vscode-win32-ia32-archive', util.rimraf(zipDir('ia32')));
|
||||
gulp.task('vscode-win32-ia32-archive', ['clean-vscode-win32-ia32-archive'], archiveWin32Setup('ia32'));
|
||||
|
||||
gulp.task('clean-vscode-win32-x64-archive', util.rimraf(zipDir('x64')));
|
||||
gulp.task('vscode-win32-x64-archive', ['clean-vscode-win32-x64-archive'], archiveWin32Setup('x64'));
|
||||
gulp.task(task.define('vscode-win32-ia32-archive', task.series(util.rimraf(zipDir('ia32')), archiveWin32Setup('ia32'))));
|
||||
gulp.task(task.define('vscode-win32-x64-archive', task.series(util.rimraf(zipDir('x64')), archiveWin32Setup('x64'))));
|
||||
|
||||
function copyInnoUpdater(arch) {
|
||||
return () => {
|
||||
@@ -135,5 +137,12 @@ function copyInnoUpdater(arch) {
|
||||
};
|
||||
}
|
||||
|
||||
gulp.task('vscode-win32-ia32-copy-inno-updater', copyInnoUpdater('ia32'));
|
||||
gulp.task('vscode-win32-x64-copy-inno-updater', copyInnoUpdater('x64'));
|
||||
function patchInnoUpdater(arch) {
|
||||
return cb => {
|
||||
const icon = path.join(repoPath, 'resources', 'win32', 'code.ico');
|
||||
rcedit(path.join(buildPath(arch), 'tools', 'inno_updater.exe'), { icon }, cb);
|
||||
};
|
||||
}
|
||||
|
||||
gulp.task(task.define('vscode-win32-ia32-inno-updater', task.series(copyInnoUpdater('ia32'), patchInnoUpdater('ia32'))));
|
||||
gulp.task(task.define('vscode-win32-x64-inno-updater', task.series(copyInnoUpdater('x64'), patchInnoUpdater('x64'))));
|
||||
15
build/jsconfig.json
Normal file
15
build/jsconfig.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"module": "commonjs",
|
||||
"target": "es2017",
|
||||
"jsx": "preserve",
|
||||
"checkJs": true
|
||||
},
|
||||
"include": [
|
||||
"**/*.js"
|
||||
],
|
||||
"exclude": [
|
||||
"node_modules",
|
||||
"**/node_modules/*"
|
||||
]
|
||||
}
|
||||
@@ -4,33 +4,33 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var path = require("path");
|
||||
var es = require("event-stream");
|
||||
var pickle = require("chromium-pickle-js");
|
||||
var Filesystem = require("asar/lib/filesystem");
|
||||
var VinylFile = require("vinyl");
|
||||
var minimatch = require("minimatch");
|
||||
const path = require("path");
|
||||
const es = require("event-stream");
|
||||
const pickle = require('chromium-pickle-js');
|
||||
const Filesystem = require('asar/lib/filesystem');
|
||||
const VinylFile = require("vinyl");
|
||||
const minimatch = require("minimatch");
|
||||
function createAsar(folderPath, unpackGlobs, destFilename) {
|
||||
var shouldUnpackFile = function (file) {
|
||||
for (var i = 0; i < unpackGlobs.length; i++) {
|
||||
const shouldUnpackFile = (file) => {
|
||||
for (let i = 0; i < unpackGlobs.length; i++) {
|
||||
if (minimatch(file.relative, unpackGlobs[i])) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
var filesystem = new Filesystem(folderPath);
|
||||
var out = [];
|
||||
const filesystem = new Filesystem(folderPath);
|
||||
const out = [];
|
||||
// Keep track of pending inserts
|
||||
var pendingInserts = 0;
|
||||
var onFileInserted = function () { pendingInserts--; };
|
||||
let pendingInserts = 0;
|
||||
let onFileInserted = () => { pendingInserts--; };
|
||||
// Do not insert twice the same directory
|
||||
var seenDir = {};
|
||||
var insertDirectoryRecursive = function (dir) {
|
||||
const seenDir = {};
|
||||
const insertDirectoryRecursive = (dir) => {
|
||||
if (seenDir[dir]) {
|
||||
return;
|
||||
}
|
||||
var lastSlash = dir.lastIndexOf('/');
|
||||
let lastSlash = dir.lastIndexOf('/');
|
||||
if (lastSlash === -1) {
|
||||
lastSlash = dir.lastIndexOf('\\');
|
||||
}
|
||||
@@ -40,8 +40,8 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
|
||||
seenDir[dir] = true;
|
||||
filesystem.insertDirectory(dir);
|
||||
};
|
||||
var insertDirectoryForFile = function (file) {
|
||||
var lastSlash = file.lastIndexOf('/');
|
||||
const insertDirectoryForFile = (file) => {
|
||||
let lastSlash = file.lastIndexOf('/');
|
||||
if (lastSlash === -1) {
|
||||
lastSlash = file.lastIndexOf('\\');
|
||||
}
|
||||
@@ -49,7 +49,7 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
|
||||
insertDirectoryRecursive(file.substring(0, lastSlash));
|
||||
}
|
||||
};
|
||||
var insertFile = function (relativePath, stat, shouldUnpack) {
|
||||
const insertFile = (relativePath, stat, shouldUnpack) => {
|
||||
insertDirectoryForFile(relativePath);
|
||||
pendingInserts++;
|
||||
filesystem.insertFile(relativePath, shouldUnpack, { stat: stat }, {}, onFileInserted);
|
||||
@@ -59,13 +59,13 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
|
||||
return;
|
||||
}
|
||||
if (!file.stat.isFile()) {
|
||||
throw new Error("unknown item in stream!");
|
||||
throw new Error(`unknown item in stream!`);
|
||||
}
|
||||
var shouldUnpack = shouldUnpackFile(file);
|
||||
const shouldUnpack = shouldUnpackFile(file);
|
||||
insertFile(file.relative, { size: file.contents.length, mode: file.stat.mode }, shouldUnpack);
|
||||
if (shouldUnpack) {
|
||||
// The file goes outside of xx.asar, in a folder xx.asar.unpacked
|
||||
var relative = path.relative(folderPath, file.path);
|
||||
const relative = path.relative(folderPath, file.path);
|
||||
this.queue(new VinylFile({
|
||||
cwd: folderPath,
|
||||
base: folderPath,
|
||||
@@ -79,34 +79,33 @@ function createAsar(folderPath, unpackGlobs, destFilename) {
|
||||
out.push(file.contents);
|
||||
}
|
||||
}, function () {
|
||||
var _this = this;
|
||||
var finish = function () {
|
||||
let finish = () => {
|
||||
{
|
||||
var headerPickle = pickle.createEmpty();
|
||||
const headerPickle = pickle.createEmpty();
|
||||
headerPickle.writeString(JSON.stringify(filesystem.header));
|
||||
var headerBuf = headerPickle.toBuffer();
|
||||
var sizePickle = pickle.createEmpty();
|
||||
const headerBuf = headerPickle.toBuffer();
|
||||
const sizePickle = pickle.createEmpty();
|
||||
sizePickle.writeUInt32(headerBuf.length);
|
||||
var sizeBuf = sizePickle.toBuffer();
|
||||
const sizeBuf = sizePickle.toBuffer();
|
||||
out.unshift(headerBuf);
|
||||
out.unshift(sizeBuf);
|
||||
}
|
||||
var contents = Buffer.concat(out);
|
||||
const contents = Buffer.concat(out);
|
||||
out.length = 0;
|
||||
_this.queue(new VinylFile({
|
||||
this.queue(new VinylFile({
|
||||
cwd: folderPath,
|
||||
base: folderPath,
|
||||
path: destFilename,
|
||||
contents: contents
|
||||
}));
|
||||
_this.queue(null);
|
||||
this.queue(null);
|
||||
};
|
||||
// Call finish() only when all file inserts have finished...
|
||||
if (pendingInserts === 0) {
|
||||
finish();
|
||||
}
|
||||
else {
|
||||
onFileInserted = function () {
|
||||
onFileInserted = () => {
|
||||
pendingInserts--;
|
||||
if (pendingInserts === 0) {
|
||||
finish();
|
||||
|
||||
@@ -7,8 +7,8 @@
|
||||
|
||||
import * as path from 'path';
|
||||
import * as es from 'event-stream';
|
||||
import * as pickle from 'chromium-pickle-js';
|
||||
import * as Filesystem from 'asar/lib/filesystem';
|
||||
const pickle = require('chromium-pickle-js');
|
||||
const Filesystem = require('asar/lib/filesystem');
|
||||
import * as VinylFile from 'vinyl';
|
||||
import * as minimatch from 'minimatch';
|
||||
|
||||
|
||||
@@ -14,7 +14,8 @@ const es = require('event-stream');
|
||||
const rename = require('gulp-rename');
|
||||
const vfs = require('vinyl-fs');
|
||||
const ext = require('./extensions');
|
||||
const util = require('gulp-util');
|
||||
const fancyLog = require('fancy-log');
|
||||
const ansiColors = require('ansi-colors');
|
||||
|
||||
const root = path.dirname(path.dirname(__dirname));
|
||||
const builtInExtensions = require('../builtInExtensions.json');
|
||||
@@ -43,22 +44,22 @@ function isUpToDate(extension) {
|
||||
|
||||
function syncMarketplaceExtension(extension) {
|
||||
if (isUpToDate(extension)) {
|
||||
util.log(util.colors.blue('[marketplace]'), `${extension.name}@${extension.version}`, util.colors.green('✔︎'));
|
||||
fancyLog(ansiColors.blue('[marketplace]'), `${extension.name}@${extension.version}`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
|
||||
rimraf.sync(getExtensionPath(extension));
|
||||
|
||||
return ext.fromMarketplace(extension.name, extension.version)
|
||||
return ext.fromMarketplace(extension.name, extension.version, extension.metadata)
|
||||
.pipe(rename(p => p.dirname = `${extension.name}/${p.dirname}`))
|
||||
.pipe(vfs.dest('.build/builtInExtensions'))
|
||||
.on('end', () => util.log(util.colors.blue('[marketplace]'), extension.name, util.colors.green('✔︎')));
|
||||
.on('end', () => fancyLog(ansiColors.blue('[marketplace]'), extension.name, ansiColors.green('✔︎')));
|
||||
}
|
||||
|
||||
function syncExtension(extension, controlState) {
|
||||
switch (controlState) {
|
||||
case 'disabled':
|
||||
util.log(util.colors.blue('[disabled]'), util.colors.gray(extension.name));
|
||||
fancyLog(ansiColors.blue('[disabled]'), ansiColors.gray(extension.name));
|
||||
return es.readArray([]);
|
||||
|
||||
case 'marketplace':
|
||||
@@ -66,15 +67,15 @@ function syncExtension(extension, controlState) {
|
||||
|
||||
default:
|
||||
if (!fs.existsSync(controlState)) {
|
||||
util.log(util.colors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
||||
fancyLog(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but that path does not exist.`));
|
||||
return es.readArray([]);
|
||||
|
||||
} else if (!fs.existsSync(path.join(controlState, 'package.json'))) {
|
||||
util.log(util.colors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
||||
fancyLog(ansiColors.red(`Error: Built-in extension '${extension.name}' is configured to run from '${controlState}' but there is no 'package.json' file in that directory.`));
|
||||
return es.readArray([]);
|
||||
}
|
||||
|
||||
util.log(util.colors.blue('[local]'), `${extension.name}: ${util.colors.cyan(controlState)}`, util.colors.green('✔︎'));
|
||||
fancyLog(ansiColors.blue('[local]'), `${extension.name}: ${ansiColors.cyan(controlState)}`, ansiColors.green('✔︎'));
|
||||
return es.readArray([]);
|
||||
}
|
||||
}
|
||||
@@ -93,8 +94,8 @@ function writeControlFile(control) {
|
||||
}
|
||||
|
||||
function main() {
|
||||
util.log('Syncronizing built-in extensions...');
|
||||
util.log(`You can manage built-in extensions with the ${util.colors.cyan('--builtin')} flag`);
|
||||
fancyLog('Syncronizing built-in extensions...');
|
||||
fancyLog(`You can manage built-in extensions with the ${ansiColors.cyan('--builtin')} flag`);
|
||||
|
||||
const control = readControlFile();
|
||||
const streams = [];
|
||||
|
||||
@@ -4,19 +4,19 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var fs = require("fs");
|
||||
var path = require("path");
|
||||
var vm = require("vm");
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const vm = require("vm");
|
||||
/**
|
||||
* Bundle `entryPoints` given config `config`.
|
||||
*/
|
||||
function bundle(entryPoints, config, callback) {
|
||||
var entryPointsMap = {};
|
||||
entryPoints.forEach(function (module) {
|
||||
const entryPointsMap = {};
|
||||
entryPoints.forEach((module) => {
|
||||
entryPointsMap[module.name] = module;
|
||||
});
|
||||
var allMentionedModulesMap = {};
|
||||
entryPoints.forEach(function (module) {
|
||||
const allMentionedModulesMap = {};
|
||||
entryPoints.forEach((module) => {
|
||||
allMentionedModulesMap[module.name] = true;
|
||||
(module.include || []).forEach(function (includedModule) {
|
||||
allMentionedModulesMap[includedModule] = true;
|
||||
@@ -25,26 +25,30 @@ function bundle(entryPoints, config, callback) {
|
||||
allMentionedModulesMap[excludedModule] = true;
|
||||
});
|
||||
});
|
||||
var code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
|
||||
var r = vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
|
||||
var loaderModule = { exports: {} };
|
||||
const code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
|
||||
const r = vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
|
||||
const loaderModule = { exports: {} };
|
||||
r.call({}, require, loaderModule, loaderModule.exports);
|
||||
var loader = loaderModule.exports;
|
||||
const loader = loaderModule.exports;
|
||||
config.isBuild = true;
|
||||
config.paths = config.paths || {};
|
||||
if (!config.paths['vs/nls']) {
|
||||
config.paths['vs/nls'] = 'out-build/vs/nls.build';
|
||||
}
|
||||
if (!config.paths['vs/css']) {
|
||||
config.paths['vs/css'] = 'out-build/vs/css.build';
|
||||
}
|
||||
loader.config(config);
|
||||
loader(['require'], function (localRequire) {
|
||||
var resolvePath = function (path) {
|
||||
var r = localRequire.toUrl(path);
|
||||
loader(['require'], (localRequire) => {
|
||||
const resolvePath = (path) => {
|
||||
const r = localRequire.toUrl(path);
|
||||
if (!/\.js/.test(r)) {
|
||||
return r + '.js';
|
||||
}
|
||||
return r;
|
||||
};
|
||||
for (var moduleId in entryPointsMap) {
|
||||
var entryPoint = entryPointsMap[moduleId];
|
||||
for (const moduleId in entryPointsMap) {
|
||||
const entryPoint = entryPointsMap[moduleId];
|
||||
if (entryPoint.append) {
|
||||
entryPoint.append = entryPoint.append.map(resolvePath);
|
||||
}
|
||||
@@ -53,59 +57,59 @@ function bundle(entryPoints, config, callback) {
|
||||
}
|
||||
}
|
||||
});
|
||||
loader(Object.keys(allMentionedModulesMap), function () {
|
||||
var modules = loader.getBuildInfo();
|
||||
var partialResult = emitEntryPoints(modules, entryPointsMap);
|
||||
var cssInlinedResources = loader('vs/css').getInlinedResources();
|
||||
loader(Object.keys(allMentionedModulesMap), () => {
|
||||
const modules = loader.getBuildInfo();
|
||||
const partialResult = emitEntryPoints(modules, entryPointsMap);
|
||||
const cssInlinedResources = loader('vs/css').getInlinedResources();
|
||||
callback(null, {
|
||||
files: partialResult.files,
|
||||
cssInlinedResources: cssInlinedResources,
|
||||
bundleData: partialResult.bundleData
|
||||
});
|
||||
}, function (err) { return callback(err, null); });
|
||||
}, (err) => callback(err, null));
|
||||
}
|
||||
exports.bundle = bundle;
|
||||
function emitEntryPoints(modules, entryPoints) {
|
||||
var modulesMap = {};
|
||||
modules.forEach(function (m) {
|
||||
const modulesMap = {};
|
||||
modules.forEach((m) => {
|
||||
modulesMap[m.id] = m;
|
||||
});
|
||||
var modulesGraph = {};
|
||||
modules.forEach(function (m) {
|
||||
const modulesGraph = {};
|
||||
modules.forEach((m) => {
|
||||
modulesGraph[m.id] = m.dependencies;
|
||||
});
|
||||
var sortedModules = topologicalSort(modulesGraph);
|
||||
var result = [];
|
||||
var usedPlugins = {};
|
||||
var bundleData = {
|
||||
const sortedModules = topologicalSort(modulesGraph);
|
||||
let result = [];
|
||||
const usedPlugins = {};
|
||||
const bundleData = {
|
||||
graph: modulesGraph,
|
||||
bundles: {}
|
||||
};
|
||||
Object.keys(entryPoints).forEach(function (moduleToBundle) {
|
||||
var info = entryPoints[moduleToBundle];
|
||||
var rootNodes = [moduleToBundle].concat(info.include || []);
|
||||
var allDependencies = visit(rootNodes, modulesGraph);
|
||||
var excludes = ['require', 'exports', 'module'].concat(info.exclude || []);
|
||||
excludes.forEach(function (excludeRoot) {
|
||||
var allExcludes = visit([excludeRoot], modulesGraph);
|
||||
Object.keys(allExcludes).forEach(function (exclude) {
|
||||
Object.keys(entryPoints).forEach((moduleToBundle) => {
|
||||
const info = entryPoints[moduleToBundle];
|
||||
const rootNodes = [moduleToBundle].concat(info.include || []);
|
||||
const allDependencies = visit(rootNodes, modulesGraph);
|
||||
const excludes = ['require', 'exports', 'module'].concat(info.exclude || []);
|
||||
excludes.forEach((excludeRoot) => {
|
||||
const allExcludes = visit([excludeRoot], modulesGraph);
|
||||
Object.keys(allExcludes).forEach((exclude) => {
|
||||
delete allDependencies[exclude];
|
||||
});
|
||||
});
|
||||
var includedModules = sortedModules.filter(function (module) {
|
||||
const includedModules = sortedModules.filter((module) => {
|
||||
return allDependencies[module];
|
||||
});
|
||||
bundleData.bundles[moduleToBundle] = includedModules;
|
||||
var res = emitEntryPoint(modulesMap, modulesGraph, moduleToBundle, includedModules, info.prepend, info.append, info.dest);
|
||||
const res = emitEntryPoint(modulesMap, modulesGraph, moduleToBundle, includedModules, info.prepend || [], info.append || [], info.dest);
|
||||
result = result.concat(res.files);
|
||||
for (var pluginName in res.usedPlugins) {
|
||||
for (const pluginName in res.usedPlugins) {
|
||||
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
|
||||
}
|
||||
});
|
||||
Object.keys(usedPlugins).forEach(function (pluginName) {
|
||||
var plugin = usedPlugins[pluginName];
|
||||
Object.keys(usedPlugins).forEach((pluginName) => {
|
||||
const plugin = usedPlugins[pluginName];
|
||||
if (typeof plugin.finishBuild === 'function') {
|
||||
var write = function (filename, contents) {
|
||||
const write = (filename, contents) => {
|
||||
result.push({
|
||||
dest: filename,
|
||||
sources: [{
|
||||
@@ -124,16 +128,16 @@ function emitEntryPoints(modules, entryPoints) {
|
||||
};
|
||||
}
|
||||
function extractStrings(destFiles) {
|
||||
var parseDefineCall = function (moduleMatch, depsMatch) {
|
||||
var module = moduleMatch.replace(/^"|"$/g, '');
|
||||
var deps = depsMatch.split(',');
|
||||
deps = deps.map(function (dep) {
|
||||
const parseDefineCall = (moduleMatch, depsMatch) => {
|
||||
const module = moduleMatch.replace(/^"|"$/g, '');
|
||||
let deps = depsMatch.split(',');
|
||||
deps = deps.map((dep) => {
|
||||
dep = dep.trim();
|
||||
dep = dep.replace(/^"|"$/g, '');
|
||||
dep = dep.replace(/^'|'$/g, '');
|
||||
var prefix = null;
|
||||
var _path = null;
|
||||
var pieces = dep.split('!');
|
||||
let prefix = null;
|
||||
let _path = null;
|
||||
const pieces = dep.split('!');
|
||||
if (pieces.length > 1) {
|
||||
prefix = pieces[0] + '!';
|
||||
_path = pieces[1];
|
||||
@@ -143,7 +147,7 @@ function extractStrings(destFiles) {
|
||||
_path = pieces[0];
|
||||
}
|
||||
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
|
||||
var res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
|
||||
const res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
|
||||
return prefix + res;
|
||||
}
|
||||
return prefix + _path;
|
||||
@@ -153,7 +157,7 @@ function extractStrings(destFiles) {
|
||||
deps: deps
|
||||
};
|
||||
};
|
||||
destFiles.forEach(function (destFile, index) {
|
||||
destFiles.forEach((destFile) => {
|
||||
if (!/\.js$/.test(destFile.dest)) {
|
||||
return;
|
||||
}
|
||||
@@ -161,44 +165,44 @@ function extractStrings(destFiles) {
|
||||
return;
|
||||
}
|
||||
// Do one pass to record the usage counts for each module id
|
||||
var useCounts = {};
|
||||
destFile.sources.forEach(function (source) {
|
||||
var matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
|
||||
const useCounts = {};
|
||||
destFile.sources.forEach((source) => {
|
||||
const matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
|
||||
if (!matches) {
|
||||
return;
|
||||
}
|
||||
var defineCall = parseDefineCall(matches[1], matches[2]);
|
||||
const defineCall = parseDefineCall(matches[1], matches[2]);
|
||||
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
|
||||
defineCall.deps.forEach(function (dep) {
|
||||
defineCall.deps.forEach((dep) => {
|
||||
useCounts[dep] = (useCounts[dep] || 0) + 1;
|
||||
});
|
||||
});
|
||||
var sortedByUseModules = Object.keys(useCounts);
|
||||
sortedByUseModules.sort(function (a, b) {
|
||||
const sortedByUseModules = Object.keys(useCounts);
|
||||
sortedByUseModules.sort((a, b) => {
|
||||
return useCounts[b] - useCounts[a];
|
||||
});
|
||||
var replacementMap = {};
|
||||
sortedByUseModules.forEach(function (module, index) {
|
||||
const replacementMap = {};
|
||||
sortedByUseModules.forEach((module, index) => {
|
||||
replacementMap[module] = index;
|
||||
});
|
||||
destFile.sources.forEach(function (source) {
|
||||
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, function (_, moduleMatch, depsMatch) {
|
||||
var defineCall = parseDefineCall(moduleMatch, depsMatch);
|
||||
return "define(__m[" + replacementMap[defineCall.module] + "/*" + defineCall.module + "*/], __M([" + defineCall.deps.map(function (dep) { return replacementMap[dep] + '/*' + dep + '*/'; }).join(',') + "])";
|
||||
destFile.sources.forEach((source) => {
|
||||
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, (_, moduleMatch, depsMatch) => {
|
||||
const defineCall = parseDefineCall(moduleMatch, depsMatch);
|
||||
return `define(__m[${replacementMap[defineCall.module]}/*${defineCall.module}*/], __M([${defineCall.deps.map(dep => replacementMap[dep] + '/*' + dep + '*/').join(',')}])`;
|
||||
});
|
||||
});
|
||||
destFile.sources.unshift({
|
||||
path: null,
|
||||
contents: [
|
||||
'(function() {',
|
||||
"var __m = " + JSON.stringify(sortedByUseModules) + ";",
|
||||
"var __M = function(deps) {",
|
||||
" var result = [];",
|
||||
" for (var i = 0, len = deps.length; i < len; i++) {",
|
||||
" result[i] = __m[deps[i]];",
|
||||
" }",
|
||||
" return result;",
|
||||
"};"
|
||||
`var __m = ${JSON.stringify(sortedByUseModules)};`,
|
||||
`var __M = function(deps) {`,
|
||||
` var result = [];`,
|
||||
` for (var i = 0, len = deps.length; i < len; i++) {`,
|
||||
` result[i] = __m[deps[i]];`,
|
||||
` }`,
|
||||
` return result;`,
|
||||
`};`
|
||||
].join('\n')
|
||||
});
|
||||
destFile.sources.push({
|
||||
@@ -210,7 +214,7 @@ function extractStrings(destFiles) {
|
||||
}
|
||||
function removeDuplicateTSBoilerplate(destFiles) {
|
||||
// Taken from typescript compiler => emitFiles
|
||||
var BOILERPLATE = [
|
||||
const BOILERPLATE = [
|
||||
{ start: /^var __extends/, end: /^}\)\(\);$/ },
|
||||
{ start: /^var __assign/, end: /^};$/ },
|
||||
{ start: /^var __decorate/, end: /^};$/ },
|
||||
@@ -219,14 +223,14 @@ function removeDuplicateTSBoilerplate(destFiles) {
|
||||
{ start: /^var __awaiter/, end: /^};$/ },
|
||||
{ start: /^var __generator/, end: /^};$/ },
|
||||
];
|
||||
destFiles.forEach(function (destFile) {
|
||||
var SEEN_BOILERPLATE = [];
|
||||
destFile.sources.forEach(function (source) {
|
||||
var lines = source.contents.split(/\r\n|\n|\r/);
|
||||
var newLines = [];
|
||||
var IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE;
|
||||
for (var i = 0; i < lines.length; i++) {
|
||||
var line = lines[i];
|
||||
destFiles.forEach((destFile) => {
|
||||
const SEEN_BOILERPLATE = [];
|
||||
destFile.sources.forEach((source) => {
|
||||
const lines = source.contents.split(/\r\n|\n|\r/);
|
||||
const newLines = [];
|
||||
let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE;
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
if (IS_REMOVING_BOILERPLATE) {
|
||||
newLines.push('');
|
||||
if (END_BOILERPLATE.test(line)) {
|
||||
@@ -234,8 +238,8 @@ function removeDuplicateTSBoilerplate(destFiles) {
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (var j = 0; j < BOILERPLATE.length; j++) {
|
||||
var boilerplate = BOILERPLATE[j];
|
||||
for (let j = 0; j < BOILERPLATE.length; j++) {
|
||||
const boilerplate = BOILERPLATE[j];
|
||||
if (boilerplate.start.test(line)) {
|
||||
if (SEEN_BOILERPLATE[j]) {
|
||||
IS_REMOVING_BOILERPLATE = true;
|
||||
@@ -263,45 +267,45 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
|
||||
if (!dest) {
|
||||
dest = entryPoint + '.js';
|
||||
}
|
||||
var mainResult = {
|
||||
const mainResult = {
|
||||
sources: [],
|
||||
dest: dest
|
||||
}, results = [mainResult];
|
||||
var usedPlugins = {};
|
||||
var getLoaderPlugin = function (pluginName) {
|
||||
const usedPlugins = {};
|
||||
const getLoaderPlugin = (pluginName) => {
|
||||
if (!usedPlugins[pluginName]) {
|
||||
usedPlugins[pluginName] = modulesMap[pluginName].exports;
|
||||
}
|
||||
return usedPlugins[pluginName];
|
||||
};
|
||||
includedModules.forEach(function (c) {
|
||||
var bangIndex = c.indexOf('!');
|
||||
includedModules.forEach((c) => {
|
||||
const bangIndex = c.indexOf('!');
|
||||
if (bangIndex >= 0) {
|
||||
var pluginName = c.substr(0, bangIndex);
|
||||
var plugin = getLoaderPlugin(pluginName);
|
||||
const pluginName = c.substr(0, bangIndex);
|
||||
const plugin = getLoaderPlugin(pluginName);
|
||||
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
|
||||
return;
|
||||
}
|
||||
var module = modulesMap[c];
|
||||
const module = modulesMap[c];
|
||||
if (module.path === 'empty:') {
|
||||
return;
|
||||
}
|
||||
var contents = readFileAndRemoveBOM(module.path);
|
||||
const contents = readFileAndRemoveBOM(module.path);
|
||||
if (module.shim) {
|
||||
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
|
||||
}
|
||||
else {
|
||||
mainResult.sources.push(emitNamedModule(c, deps[c], module.defineLocation, module.path, contents));
|
||||
mainResult.sources.push(emitNamedModule(c, module.defineLocation, module.path, contents));
|
||||
}
|
||||
});
|
||||
Object.keys(usedPlugins).forEach(function (pluginName) {
|
||||
var plugin = usedPlugins[pluginName];
|
||||
Object.keys(usedPlugins).forEach((pluginName) => {
|
||||
const plugin = usedPlugins[pluginName];
|
||||
if (typeof plugin.writeFile === 'function') {
|
||||
var req = (function () {
|
||||
const req = (() => {
|
||||
throw new Error('no-no!');
|
||||
});
|
||||
req.toUrl = function (something) { return something; };
|
||||
var write = function (filename, contents) {
|
||||
req.toUrl = something => something;
|
||||
const write = (filename, contents) => {
|
||||
results.push({
|
||||
dest: filename,
|
||||
sources: [{
|
||||
@@ -313,15 +317,15 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
|
||||
plugin.writeFile(pluginName, entryPoint, req, write, {});
|
||||
}
|
||||
});
|
||||
var toIFile = function (path) {
|
||||
var contents = readFileAndRemoveBOM(path);
|
||||
const toIFile = (path) => {
|
||||
const contents = readFileAndRemoveBOM(path);
|
||||
return {
|
||||
path: path,
|
||||
contents: contents
|
||||
};
|
||||
};
|
||||
var toPrepend = (prepend || []).map(toIFile);
|
||||
var toAppend = (append || []).map(toIFile);
|
||||
const toPrepend = (prepend || []).map(toIFile);
|
||||
const toAppend = (append || []).map(toIFile);
|
||||
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
|
||||
return {
|
||||
files: results,
|
||||
@@ -329,8 +333,8 @@ function emitEntryPoint(modulesMap, deps, entryPoint, includedModules, prepend,
|
||||
};
|
||||
}
|
||||
function readFileAndRemoveBOM(path) {
|
||||
var BOM_CHAR_CODE = 65279;
|
||||
var contents = fs.readFileSync(path, 'utf8');
|
||||
const BOM_CHAR_CODE = 65279;
|
||||
let contents = fs.readFileSync(path, 'utf8');
|
||||
// Remove BOM
|
||||
if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
|
||||
contents = contents.substring(1);
|
||||
@@ -338,15 +342,15 @@ function readFileAndRemoveBOM(path) {
|
||||
return contents;
|
||||
}
|
||||
function emitPlugin(entryPoint, plugin, pluginName, moduleName) {
|
||||
var result = '';
|
||||
let result = '';
|
||||
if (typeof plugin.write === 'function') {
|
||||
var write = (function (what) {
|
||||
const write = ((what) => {
|
||||
result += what;
|
||||
});
|
||||
write.getEntryPoint = function () {
|
||||
write.getEntryPoint = () => {
|
||||
return entryPoint;
|
||||
};
|
||||
write.asModule = function (moduleId, code) {
|
||||
write.asModule = (moduleId, code) => {
|
||||
code = code.replace(/^define\(/, 'define("' + moduleId + '",');
|
||||
result += code;
|
||||
};
|
||||
@@ -357,20 +361,20 @@ function emitPlugin(entryPoint, plugin, pluginName, moduleName) {
|
||||
contents: result
|
||||
};
|
||||
}
|
||||
function emitNamedModule(moduleId, myDeps, defineCallPosition, path, contents) {
|
||||
function emitNamedModule(moduleId, defineCallPosition, path, contents) {
|
||||
// `defineCallPosition` is the position in code: |define()
|
||||
var defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
|
||||
const defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
|
||||
// `parensOffset` is the position in code: define|()
|
||||
var parensOffset = contents.indexOf('(', defineCallOffset);
|
||||
var insertStr = '"' + moduleId + '", ';
|
||||
const parensOffset = contents.indexOf('(', defineCallOffset);
|
||||
const insertStr = '"' + moduleId + '", ';
|
||||
return {
|
||||
path: path,
|
||||
contents: contents.substr(0, parensOffset + 1) + insertStr + contents.substr(parensOffset + 1)
|
||||
};
|
||||
}
|
||||
function emitShimmedModule(moduleId, myDeps, factory, path, contents) {
|
||||
var strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
|
||||
var strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
|
||||
const strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
|
||||
const strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
|
||||
return {
|
||||
path: path,
|
||||
contents: contents + '\n;\n' + strDefine
|
||||
@@ -383,7 +387,8 @@ function positionToOffset(str, desiredLine, desiredCol) {
|
||||
if (desiredLine === 1) {
|
||||
return desiredCol - 1;
|
||||
}
|
||||
var line = 1, lastNewLineOffset = -1;
|
||||
let line = 1;
|
||||
let lastNewLineOffset = -1;
|
||||
do {
|
||||
if (desiredLine === line) {
|
||||
return lastNewLineOffset + 1 + desiredCol - 1;
|
||||
@@ -397,14 +402,15 @@ function positionToOffset(str, desiredLine, desiredCol) {
|
||||
* Return a set of reachable nodes in `graph` starting from `rootNodes`
|
||||
*/
|
||||
function visit(rootNodes, graph) {
|
||||
var result = {}, queue = rootNodes;
|
||||
rootNodes.forEach(function (node) {
|
||||
const result = {};
|
||||
const queue = rootNodes;
|
||||
rootNodes.forEach((node) => {
|
||||
result[node] = true;
|
||||
});
|
||||
while (queue.length > 0) {
|
||||
var el = queue.shift();
|
||||
var myEdges = graph[el] || [];
|
||||
myEdges.forEach(function (toNode) {
|
||||
const el = queue.shift();
|
||||
const myEdges = graph[el] || [];
|
||||
myEdges.forEach((toNode) => {
|
||||
if (!result[toNode]) {
|
||||
result[toNode] = true;
|
||||
queue.push(toNode);
|
||||
@@ -417,11 +423,11 @@ function visit(rootNodes, graph) {
|
||||
* Perform a topological sort on `graph`
|
||||
*/
|
||||
function topologicalSort(graph) {
|
||||
var allNodes = {}, outgoingEdgeCount = {}, inverseEdges = {};
|
||||
Object.keys(graph).forEach(function (fromNode) {
|
||||
const allNodes = {}, outgoingEdgeCount = {}, inverseEdges = {};
|
||||
Object.keys(graph).forEach((fromNode) => {
|
||||
allNodes[fromNode] = true;
|
||||
outgoingEdgeCount[fromNode] = graph[fromNode].length;
|
||||
graph[fromNode].forEach(function (toNode) {
|
||||
graph[fromNode].forEach((toNode) => {
|
||||
allNodes[toNode] = true;
|
||||
outgoingEdgeCount[toNode] = outgoingEdgeCount[toNode] || 0;
|
||||
inverseEdges[toNode] = inverseEdges[toNode] || [];
|
||||
@@ -429,8 +435,8 @@ function topologicalSort(graph) {
|
||||
});
|
||||
});
|
||||
// https://en.wikipedia.org/wiki/Topological_sorting
|
||||
var S = [], L = [];
|
||||
Object.keys(allNodes).forEach(function (node) {
|
||||
const S = [], L = [];
|
||||
Object.keys(allNodes).forEach((node) => {
|
||||
if (outgoingEdgeCount[node] === 0) {
|
||||
delete outgoingEdgeCount[node];
|
||||
S.push(node);
|
||||
@@ -439,10 +445,10 @@ function topologicalSort(graph) {
|
||||
while (S.length > 0) {
|
||||
// Ensure the exact same order all the time with the same inputs
|
||||
S.sort();
|
||||
var n = S.shift();
|
||||
const n = S.shift();
|
||||
L.push(n);
|
||||
var myInverseEdges = inverseEdges[n] || [];
|
||||
myInverseEdges.forEach(function (m) {
|
||||
const myInverseEdges = inverseEdges[n] || [];
|
||||
myInverseEdges.forEach((m) => {
|
||||
outgoingEdgeCount[m]--;
|
||||
if (outgoingEdgeCount[m] === 0) {
|
||||
delete outgoingEdgeCount[m];
|
||||
|
||||
@@ -3,9 +3,9 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import fs = require('fs');
|
||||
import path = require('path');
|
||||
import vm = require('vm');
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
import * as vm from 'vm';
|
||||
|
||||
interface IPosition {
|
||||
line: number;
|
||||
@@ -46,7 +46,7 @@ export interface IEntryPoint {
|
||||
name: string;
|
||||
include?: string[];
|
||||
exclude?: string[];
|
||||
prepend: string[];
|
||||
prepend?: string[];
|
||||
append?: string[];
|
||||
dest?: string;
|
||||
}
|
||||
@@ -64,7 +64,7 @@ interface INodeSet {
|
||||
}
|
||||
|
||||
export interface IFile {
|
||||
path: string;
|
||||
path: string | null;
|
||||
contents: string;
|
||||
}
|
||||
|
||||
@@ -97,13 +97,13 @@ export interface ILoaderConfig {
|
||||
/**
|
||||
* Bundle `entryPoints` given config `config`.
|
||||
*/
|
||||
export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callback: (err: any, result: IBundleResult) => void): void {
|
||||
let entryPointsMap: IEntryPointMap = {};
|
||||
export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callback: (err: any, result: IBundleResult | null) => void): void {
|
||||
const entryPointsMap: IEntryPointMap = {};
|
||||
entryPoints.forEach((module: IEntryPoint) => {
|
||||
entryPointsMap[module.name] = module;
|
||||
});
|
||||
|
||||
let allMentionedModulesMap: { [modules: string]: boolean; } = {};
|
||||
const allMentionedModulesMap: { [modules: string]: boolean; } = {};
|
||||
entryPoints.forEach((module: IEntryPoint) => {
|
||||
allMentionedModulesMap[module.name] = true;
|
||||
(module.include || []).forEach(function (includedModule) {
|
||||
@@ -115,28 +115,32 @@ export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callba
|
||||
});
|
||||
|
||||
|
||||
var code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
|
||||
var r: Function = <any>vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
|
||||
var loaderModule = { exports: {} };
|
||||
const code = require('fs').readFileSync(path.join(__dirname, '../../src/vs/loader.js'));
|
||||
const r: Function = <any>vm.runInThisContext('(function(require, module, exports) { ' + code + '\n});');
|
||||
const loaderModule = { exports: {} };
|
||||
r.call({}, require, loaderModule, loaderModule.exports);
|
||||
|
||||
var loader: any = loaderModule.exports;
|
||||
const loader: any = loaderModule.exports;
|
||||
config.isBuild = true;
|
||||
config.paths = config.paths || {};
|
||||
if (!config.paths['vs/nls']) {
|
||||
config.paths['vs/nls'] = 'out-build/vs/nls.build';
|
||||
}
|
||||
if (!config.paths['vs/css']) {
|
||||
config.paths['vs/css'] = 'out-build/vs/css.build';
|
||||
}
|
||||
loader.config(config);
|
||||
|
||||
loader(['require'], (localRequire) => {
|
||||
let resolvePath = (path: string) => {
|
||||
let r = localRequire.toUrl(path);
|
||||
loader(['require'], (localRequire: any) => {
|
||||
const resolvePath = (path: string) => {
|
||||
const r = localRequire.toUrl(path);
|
||||
if (!/\.js/.test(r)) {
|
||||
return r + '.js';
|
||||
}
|
||||
return r;
|
||||
};
|
||||
for (let moduleId in entryPointsMap) {
|
||||
let entryPoint = entryPointsMap[moduleId];
|
||||
for (const moduleId in entryPointsMap) {
|
||||
const entryPoint = entryPointsMap[moduleId];
|
||||
if (entryPoint.append) {
|
||||
entryPoint.append = entryPoint.append.map(resolvePath);
|
||||
}
|
||||
@@ -147,76 +151,76 @@ export function bundle(entryPoints: IEntryPoint[], config: ILoaderConfig, callba
|
||||
});
|
||||
|
||||
loader(Object.keys(allMentionedModulesMap), () => {
|
||||
let modules = <IBuildModuleInfo[]>loader.getBuildInfo();
|
||||
let partialResult = emitEntryPoints(modules, entryPointsMap);
|
||||
let cssInlinedResources = loader('vs/css').getInlinedResources();
|
||||
const modules = <IBuildModuleInfo[]>loader.getBuildInfo();
|
||||
const partialResult = emitEntryPoints(modules, entryPointsMap);
|
||||
const cssInlinedResources = loader('vs/css').getInlinedResources();
|
||||
callback(null, {
|
||||
files: partialResult.files,
|
||||
cssInlinedResources: cssInlinedResources,
|
||||
bundleData: partialResult.bundleData
|
||||
});
|
||||
}, (err) => callback(err, null));
|
||||
}, (err: any) => callback(err, null));
|
||||
}
|
||||
|
||||
function emitEntryPoints(modules: IBuildModuleInfo[], entryPoints: IEntryPointMap): IPartialBundleResult {
|
||||
let modulesMap: IBuildModuleInfoMap = {};
|
||||
const modulesMap: IBuildModuleInfoMap = {};
|
||||
modules.forEach((m: IBuildModuleInfo) => {
|
||||
modulesMap[m.id] = m;
|
||||
});
|
||||
|
||||
let modulesGraph: IGraph = {};
|
||||
const modulesGraph: IGraph = {};
|
||||
modules.forEach((m: IBuildModuleInfo) => {
|
||||
modulesGraph[m.id] = m.dependencies;
|
||||
});
|
||||
|
||||
let sortedModules = topologicalSort(modulesGraph);
|
||||
const sortedModules = topologicalSort(modulesGraph);
|
||||
|
||||
let result: IConcatFile[] = [];
|
||||
let usedPlugins: IPluginMap = {};
|
||||
let bundleData: IBundleData = {
|
||||
const usedPlugins: IPluginMap = {};
|
||||
const bundleData: IBundleData = {
|
||||
graph: modulesGraph,
|
||||
bundles: {}
|
||||
};
|
||||
|
||||
Object.keys(entryPoints).forEach((moduleToBundle: string) => {
|
||||
let info = entryPoints[moduleToBundle];
|
||||
let rootNodes = [moduleToBundle].concat(info.include || []);
|
||||
let allDependencies = visit(rootNodes, modulesGraph);
|
||||
let excludes: string[] = ['require', 'exports', 'module'].concat(info.exclude || []);
|
||||
const info = entryPoints[moduleToBundle];
|
||||
const rootNodes = [moduleToBundle].concat(info.include || []);
|
||||
const allDependencies = visit(rootNodes, modulesGraph);
|
||||
const excludes: string[] = ['require', 'exports', 'module'].concat(info.exclude || []);
|
||||
|
||||
excludes.forEach((excludeRoot: string) => {
|
||||
let allExcludes = visit([excludeRoot], modulesGraph);
|
||||
const allExcludes = visit([excludeRoot], modulesGraph);
|
||||
Object.keys(allExcludes).forEach((exclude: string) => {
|
||||
delete allDependencies[exclude];
|
||||
});
|
||||
});
|
||||
|
||||
let includedModules = sortedModules.filter((module: string) => {
|
||||
const includedModules = sortedModules.filter((module: string) => {
|
||||
return allDependencies[module];
|
||||
});
|
||||
|
||||
bundleData.bundles[moduleToBundle] = includedModules;
|
||||
|
||||
let res = emitEntryPoint(
|
||||
const res = emitEntryPoint(
|
||||
modulesMap,
|
||||
modulesGraph,
|
||||
moduleToBundle,
|
||||
includedModules,
|
||||
info.prepend,
|
||||
info.append,
|
||||
info.prepend || [],
|
||||
info.append || [],
|
||||
info.dest
|
||||
);
|
||||
|
||||
result = result.concat(res.files);
|
||||
for (let pluginName in res.usedPlugins) {
|
||||
for (const pluginName in res.usedPlugins) {
|
||||
usedPlugins[pluginName] = usedPlugins[pluginName] || res.usedPlugins[pluginName];
|
||||
}
|
||||
});
|
||||
|
||||
Object.keys(usedPlugins).forEach((pluginName: string) => {
|
||||
let plugin = usedPlugins[pluginName];
|
||||
const plugin = usedPlugins[pluginName];
|
||||
if (typeof plugin.finishBuild === 'function') {
|
||||
let write = (filename: string, contents: string) => {
|
||||
const write = (filename: string, contents: string) => {
|
||||
result.push({
|
||||
dest: filename,
|
||||
sources: [{
|
||||
@@ -237,16 +241,16 @@ function emitEntryPoints(modules: IBuildModuleInfo[], entryPoints: IEntryPointMa
|
||||
}
|
||||
|
||||
function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
|
||||
let parseDefineCall = (moduleMatch: string, depsMatch: string) => {
|
||||
let module = moduleMatch.replace(/^"|"$/g, '');
|
||||
const parseDefineCall = (moduleMatch: string, depsMatch: string) => {
|
||||
const module = moduleMatch.replace(/^"|"$/g, '');
|
||||
let deps = depsMatch.split(',');
|
||||
deps = deps.map((dep) => {
|
||||
dep = dep.trim();
|
||||
dep = dep.replace(/^"|"$/g, '');
|
||||
dep = dep.replace(/^'|'$/g, '');
|
||||
let prefix: string = null;
|
||||
let _path: string = null;
|
||||
let pieces = dep.split('!');
|
||||
let prefix: string | null = null;
|
||||
let _path: string | null = null;
|
||||
const pieces = dep.split('!');
|
||||
if (pieces.length > 1) {
|
||||
prefix = pieces[0] + '!';
|
||||
_path = pieces[1];
|
||||
@@ -256,7 +260,7 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
|
||||
}
|
||||
|
||||
if (/^\.\//.test(_path) || /^\.\.\//.test(_path)) {
|
||||
let res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
|
||||
const res = path.join(path.dirname(module), _path).replace(/\\/g, '/');
|
||||
return prefix + res;
|
||||
}
|
||||
return prefix + _path;
|
||||
@@ -267,7 +271,7 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
|
||||
};
|
||||
};
|
||||
|
||||
destFiles.forEach((destFile, index) => {
|
||||
destFiles.forEach((destFile) => {
|
||||
if (!/\.js$/.test(destFile.dest)) {
|
||||
return;
|
||||
}
|
||||
@@ -276,33 +280,33 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
|
||||
}
|
||||
|
||||
// Do one pass to record the usage counts for each module id
|
||||
let useCounts: { [moduleId: string]: number; } = {};
|
||||
const useCounts: { [moduleId: string]: number; } = {};
|
||||
destFile.sources.forEach((source) => {
|
||||
let matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
|
||||
const matches = source.contents.match(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/);
|
||||
if (!matches) {
|
||||
return;
|
||||
}
|
||||
|
||||
let defineCall = parseDefineCall(matches[1], matches[2]);
|
||||
const defineCall = parseDefineCall(matches[1], matches[2]);
|
||||
useCounts[defineCall.module] = (useCounts[defineCall.module] || 0) + 1;
|
||||
defineCall.deps.forEach((dep) => {
|
||||
useCounts[dep] = (useCounts[dep] || 0) + 1;
|
||||
});
|
||||
});
|
||||
|
||||
let sortedByUseModules = Object.keys(useCounts);
|
||||
const sortedByUseModules = Object.keys(useCounts);
|
||||
sortedByUseModules.sort((a, b) => {
|
||||
return useCounts[b] - useCounts[a];
|
||||
});
|
||||
|
||||
let replacementMap: { [moduleId: string]: number; } = {};
|
||||
const replacementMap: { [moduleId: string]: number; } = {};
|
||||
sortedByUseModules.forEach((module, index) => {
|
||||
replacementMap[module] = index;
|
||||
});
|
||||
|
||||
destFile.sources.forEach((source) => {
|
||||
source.contents = source.contents.replace(/define\(("[^"]+"),\s*\[(((, )?("|')[^"']+("|'))+)\]/, (_, moduleMatch, depsMatch) => {
|
||||
let defineCall = parseDefineCall(moduleMatch, depsMatch);
|
||||
const defineCall = parseDefineCall(moduleMatch, depsMatch);
|
||||
return `define(__m[${replacementMap[defineCall.module]}/*${defineCall.module}*/], __M([${defineCall.deps.map(dep => replacementMap[dep] + '/*' + dep + '*/').join(',')}])`;
|
||||
});
|
||||
});
|
||||
@@ -332,7 +336,7 @@ function extractStrings(destFiles: IConcatFile[]): IConcatFile[] {
|
||||
|
||||
function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
|
||||
// Taken from typescript compiler => emitFiles
|
||||
let BOILERPLATE = [
|
||||
const BOILERPLATE = [
|
||||
{ start: /^var __extends/, end: /^}\)\(\);$/ },
|
||||
{ start: /^var __assign/, end: /^};$/ },
|
||||
{ start: /^var __decorate/, end: /^};$/ },
|
||||
@@ -343,22 +347,22 @@ function removeDuplicateTSBoilerplate(destFiles: IConcatFile[]): IConcatFile[] {
|
||||
];
|
||||
|
||||
destFiles.forEach((destFile) => {
|
||||
let SEEN_BOILERPLATE = [];
|
||||
const SEEN_BOILERPLATE: boolean[] = [];
|
||||
destFile.sources.forEach((source) => {
|
||||
let lines = source.contents.split(/\r\n|\n|\r/);
|
||||
let newLines: string[] = [];
|
||||
const lines = source.contents.split(/\r\n|\n|\r/);
|
||||
const newLines: string[] = [];
|
||||
let IS_REMOVING_BOILERPLATE = false, END_BOILERPLATE: RegExp;
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
let line = lines[i];
|
||||
const line = lines[i];
|
||||
if (IS_REMOVING_BOILERPLATE) {
|
||||
newLines.push('');
|
||||
if (END_BOILERPLATE.test(line)) {
|
||||
if (END_BOILERPLATE!.test(line)) {
|
||||
IS_REMOVING_BOILERPLATE = false;
|
||||
}
|
||||
} else {
|
||||
for (let j = 0; j < BOILERPLATE.length; j++) {
|
||||
let boilerplate = BOILERPLATE[j];
|
||||
const boilerplate = BOILERPLATE[j];
|
||||
if (boilerplate.start.test(line)) {
|
||||
if (SEEN_BOILERPLATE[j]) {
|
||||
IS_REMOVING_BOILERPLATE = true;
|
||||
@@ -398,19 +402,19 @@ function emitEntryPoint(
|
||||
includedModules: string[],
|
||||
prepend: string[],
|
||||
append: string[],
|
||||
dest: string
|
||||
dest: string | undefined
|
||||
): IEmitEntryPointResult {
|
||||
if (!dest) {
|
||||
dest = entryPoint + '.js';
|
||||
}
|
||||
let mainResult: IConcatFile = {
|
||||
const mainResult: IConcatFile = {
|
||||
sources: [],
|
||||
dest: dest
|
||||
},
|
||||
results: IConcatFile[] = [mainResult];
|
||||
|
||||
let usedPlugins: IPluginMap = {};
|
||||
let getLoaderPlugin = (pluginName: string): ILoaderPlugin => {
|
||||
const usedPlugins: IPluginMap = {};
|
||||
const getLoaderPlugin = (pluginName: string): ILoaderPlugin => {
|
||||
if (!usedPlugins[pluginName]) {
|
||||
usedPlugins[pluginName] = modulesMap[pluginName].exports;
|
||||
}
|
||||
@@ -418,39 +422,39 @@ function emitEntryPoint(
|
||||
};
|
||||
|
||||
includedModules.forEach((c: string) => {
|
||||
let bangIndex = c.indexOf('!');
|
||||
const bangIndex = c.indexOf('!');
|
||||
|
||||
if (bangIndex >= 0) {
|
||||
let pluginName = c.substr(0, bangIndex);
|
||||
let plugin = getLoaderPlugin(pluginName);
|
||||
const pluginName = c.substr(0, bangIndex);
|
||||
const plugin = getLoaderPlugin(pluginName);
|
||||
mainResult.sources.push(emitPlugin(entryPoint, plugin, pluginName, c.substr(bangIndex + 1)));
|
||||
return;
|
||||
}
|
||||
|
||||
let module = modulesMap[c];
|
||||
const module = modulesMap[c];
|
||||
|
||||
if (module.path === 'empty:') {
|
||||
return;
|
||||
}
|
||||
|
||||
let contents = readFileAndRemoveBOM(module.path);
|
||||
const contents = readFileAndRemoveBOM(module.path);
|
||||
|
||||
if (module.shim) {
|
||||
mainResult.sources.push(emitShimmedModule(c, deps[c], module.shim, module.path, contents));
|
||||
} else {
|
||||
mainResult.sources.push(emitNamedModule(c, deps[c], module.defineLocation, module.path, contents));
|
||||
mainResult.sources.push(emitNamedModule(c, module.defineLocation, module.path, contents));
|
||||
}
|
||||
});
|
||||
|
||||
Object.keys(usedPlugins).forEach((pluginName: string) => {
|
||||
let plugin = usedPlugins[pluginName];
|
||||
const plugin = usedPlugins[pluginName];
|
||||
if (typeof plugin.writeFile === 'function') {
|
||||
let req: ILoaderPluginReqFunc = <any>(() => {
|
||||
const req: ILoaderPluginReqFunc = <any>(() => {
|
||||
throw new Error('no-no!');
|
||||
});
|
||||
req.toUrl = something => something;
|
||||
|
||||
let write = (filename: string, contents: string) => {
|
||||
const write = (filename: string, contents: string) => {
|
||||
results.push({
|
||||
dest: filename,
|
||||
sources: [{
|
||||
@@ -463,16 +467,16 @@ function emitEntryPoint(
|
||||
}
|
||||
});
|
||||
|
||||
let toIFile = (path): IFile => {
|
||||
let contents = readFileAndRemoveBOM(path);
|
||||
const toIFile = (path: string): IFile => {
|
||||
const contents = readFileAndRemoveBOM(path);
|
||||
return {
|
||||
path: path,
|
||||
contents: contents
|
||||
};
|
||||
};
|
||||
|
||||
let toPrepend = (prepend || []).map(toIFile);
|
||||
let toAppend = (append || []).map(toIFile);
|
||||
const toPrepend = (prepend || []).map(toIFile);
|
||||
const toAppend = (append || []).map(toIFile);
|
||||
|
||||
mainResult.sources = toPrepend.concat(mainResult.sources).concat(toAppend);
|
||||
|
||||
@@ -483,8 +487,8 @@ function emitEntryPoint(
|
||||
}
|
||||
|
||||
function readFileAndRemoveBOM(path: string): string {
|
||||
var BOM_CHAR_CODE = 65279;
|
||||
var contents = fs.readFileSync(path, 'utf8');
|
||||
const BOM_CHAR_CODE = 65279;
|
||||
let contents = fs.readFileSync(path, 'utf8');
|
||||
// Remove BOM
|
||||
if (contents.charCodeAt(0) === BOM_CHAR_CODE) {
|
||||
contents = contents.substring(1);
|
||||
@@ -495,7 +499,7 @@ function readFileAndRemoveBOM(path: string): string {
|
||||
function emitPlugin(entryPoint: string, plugin: ILoaderPlugin, pluginName: string, moduleName: string): IFile {
|
||||
let result = '';
|
||||
if (typeof plugin.write === 'function') {
|
||||
let write: ILoaderPluginWriteFunc = <any>((what) => {
|
||||
const write: ILoaderPluginWriteFunc = <any>((what: string) => {
|
||||
result += what;
|
||||
});
|
||||
write.getEntryPoint = () => {
|
||||
@@ -513,15 +517,15 @@ function emitPlugin(entryPoint: string, plugin: ILoaderPlugin, pluginName: strin
|
||||
};
|
||||
}
|
||||
|
||||
function emitNamedModule(moduleId: string, myDeps: string[], defineCallPosition: IPosition, path: string, contents: string): IFile {
|
||||
function emitNamedModule(moduleId: string, defineCallPosition: IPosition, path: string, contents: string): IFile {
|
||||
|
||||
// `defineCallPosition` is the position in code: |define()
|
||||
let defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
|
||||
const defineCallOffset = positionToOffset(contents, defineCallPosition.line, defineCallPosition.col);
|
||||
|
||||
// `parensOffset` is the position in code: define|()
|
||||
let parensOffset = contents.indexOf('(', defineCallOffset);
|
||||
const parensOffset = contents.indexOf('(', defineCallOffset);
|
||||
|
||||
let insertStr = '"' + moduleId + '", ';
|
||||
const insertStr = '"' + moduleId + '", ';
|
||||
|
||||
return {
|
||||
path: path,
|
||||
@@ -530,8 +534,8 @@ function emitNamedModule(moduleId: string, myDeps: string[], defineCallPosition:
|
||||
}
|
||||
|
||||
function emitShimmedModule(moduleId: string, myDeps: string[], factory: string, path: string, contents: string): IFile {
|
||||
let strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
|
||||
let strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
|
||||
const strDeps = (myDeps.length > 0 ? '"' + myDeps.join('", "') + '"' : '');
|
||||
const strDefine = 'define("' + moduleId + '", [' + strDeps + '], ' + factory + ');';
|
||||
return {
|
||||
path: path,
|
||||
contents: contents + '\n;\n' + strDefine
|
||||
@@ -546,8 +550,8 @@ function positionToOffset(str: string, desiredLine: number, desiredCol: number):
|
||||
return desiredCol - 1;
|
||||
}
|
||||
|
||||
let line = 1,
|
||||
lastNewLineOffset = -1;
|
||||
let line = 1;
|
||||
let lastNewLineOffset = -1;
|
||||
|
||||
do {
|
||||
if (desiredLine === line) {
|
||||
@@ -565,16 +569,16 @@ function positionToOffset(str: string, desiredLine: number, desiredCol: number):
|
||||
* Return a set of reachable nodes in `graph` starting from `rootNodes`
|
||||
*/
|
||||
function visit(rootNodes: string[], graph: IGraph): INodeSet {
|
||||
let result: INodeSet = {},
|
||||
queue = rootNodes;
|
||||
const result: INodeSet = {};
|
||||
const queue = rootNodes;
|
||||
|
||||
rootNodes.forEach((node) => {
|
||||
result[node] = true;
|
||||
});
|
||||
|
||||
while (queue.length > 0) {
|
||||
let el = queue.shift();
|
||||
let myEdges = graph[el] || [];
|
||||
const el = queue.shift();
|
||||
const myEdges = graph[el!] || [];
|
||||
myEdges.forEach((toNode) => {
|
||||
if (!result[toNode]) {
|
||||
result[toNode] = true;
|
||||
@@ -591,7 +595,7 @@ function visit(rootNodes: string[], graph: IGraph): INodeSet {
|
||||
*/
|
||||
function topologicalSort(graph: IGraph): string[] {
|
||||
|
||||
let allNodes: INodeSet = {},
|
||||
const allNodes: INodeSet = {},
|
||||
outgoingEdgeCount: { [node: string]: number; } = {},
|
||||
inverseEdges: IGraph = {};
|
||||
|
||||
@@ -609,7 +613,7 @@ function topologicalSort(graph: IGraph): string[] {
|
||||
});
|
||||
|
||||
// https://en.wikipedia.org/wiki/Topological_sorting
|
||||
let S: string[] = [],
|
||||
const S: string[] = [],
|
||||
L: string[] = [];
|
||||
|
||||
Object.keys(allNodes).forEach((node: string) => {
|
||||
@@ -623,10 +627,10 @@ function topologicalSort(graph: IGraph): string[] {
|
||||
// Ensure the exact same order all the time with the same inputs
|
||||
S.sort();
|
||||
|
||||
let n: string = S.shift();
|
||||
const n: string = S.shift()!;
|
||||
L.push(n);
|
||||
|
||||
let myInverseEdges = inverseEdges[n] || [];
|
||||
const myInverseEdges = inverseEdges[n] || [];
|
||||
myInverseEdges.forEach((m: string) => {
|
||||
outgoingEdgeCount[m]--;
|
||||
if (outgoingEdgeCount[m] === 0) {
|
||||
|
||||
@@ -4,44 +4,54 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var gulp = require("gulp");
|
||||
var tsb = require("gulp-tsb");
|
||||
var es = require("event-stream");
|
||||
var watch = require('./watch');
|
||||
var nls = require("./nls");
|
||||
var util = require("./util");
|
||||
var reporter_1 = require("./reporter");
|
||||
var path = require("path");
|
||||
var bom = require("gulp-bom");
|
||||
var sourcemaps = require("gulp-sourcemaps");
|
||||
var _ = require("underscore");
|
||||
var monacodts = require("../monaco/api");
|
||||
var fs = require("fs");
|
||||
var reporter = reporter_1.createReporter();
|
||||
const es = require("event-stream");
|
||||
const fs = require("fs");
|
||||
const gulp = require("gulp");
|
||||
const bom = require("gulp-bom");
|
||||
const sourcemaps = require("gulp-sourcemaps");
|
||||
const tsb = require("gulp-tsb");
|
||||
const path = require("path");
|
||||
const _ = require("underscore");
|
||||
const monacodts = require("../monaco/api");
|
||||
const nls = require("./nls");
|
||||
const reporter_1 = require("./reporter");
|
||||
const util = require("./util");
|
||||
const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
const watch = require('./watch');
|
||||
const reporter = reporter_1.createReporter();
|
||||
function getTypeScriptCompilerOptions(src) {
|
||||
var rootDir = path.join(__dirname, "../../" + src);
|
||||
var options = require("../../" + src + "/tsconfig.json").compilerOptions;
|
||||
const rootDir = path.join(__dirname, `../../${src}`);
|
||||
const tsconfig = require(`../../${src}/tsconfig.json`);
|
||||
let options;
|
||||
if (tsconfig.extends) {
|
||||
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
|
||||
}
|
||||
else {
|
||||
options = tsconfig.compilerOptions;
|
||||
}
|
||||
options.verbose = false;
|
||||
options.sourceMap = true;
|
||||
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
||||
options.sourceMap = false;
|
||||
}
|
||||
options.rootDir = rootDir;
|
||||
options.baseUrl = rootDir;
|
||||
options.sourceRoot = util.toFileUri(rootDir);
|
||||
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
|
||||
return options;
|
||||
}
|
||||
function createCompile(src, build, emitError) {
|
||||
var opts = _.clone(getTypeScriptCompilerOptions(src));
|
||||
const opts = _.clone(getTypeScriptCompilerOptions(src));
|
||||
opts.inlineSources = !!build;
|
||||
opts.noFilesystemLookup = true;
|
||||
var ts = tsb.create(opts, null, null, function (err) { return reporter(err.toString()); });
|
||||
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
|
||||
return function (token) {
|
||||
var utf8Filter = util.filter(function (data) { return /(\/|\\)test(\/|\\).*utf8/.test(data.path); });
|
||||
var tsFilter = util.filter(function (data) { return /\.ts$/.test(data.path); });
|
||||
var noDeclarationsFilter = util.filter(function (data) { return !(/\.d\.ts$/.test(data.path)); });
|
||||
var input = es.through();
|
||||
var output = input
|
||||
const utf8Filter = util.filter(data => /(\/|\\)test(\/|\\).*utf8/.test(data.path));
|
||||
const tsFilter = util.filter(data => /\.ts$/.test(data.path));
|
||||
const noDeclarationsFilter = util.filter(data => !(/\.d\.ts$/.test(data.path)));
|
||||
const input = es.through();
|
||||
const output = input
|
||||
.pipe(utf8Filter)
|
||||
.pipe(bom())
|
||||
.pipe(utf8Filter.restore)
|
||||
@@ -57,91 +67,136 @@ function createCompile(src, build, emitError) {
|
||||
sourceRoot: opts.sourceRoot
|
||||
}))
|
||||
.pipe(tsFilter.restore)
|
||||
.pipe(reporter.end(emitError));
|
||||
.pipe(reporter.end(!!emitError));
|
||||
return es.duplex(input, output);
|
||||
};
|
||||
}
|
||||
const typesDts = [
|
||||
'node_modules/typescript/lib/*.d.ts',
|
||||
'node_modules/@types/**/*.d.ts',
|
||||
'!node_modules/@types/webpack/**/*',
|
||||
'!node_modules/@types/uglify-js/**/*',
|
||||
];
|
||||
function compileTask(src, out, build) {
|
||||
return function () {
|
||||
var compile = createCompile(src, build, true);
|
||||
var srcPipe = es.merge(gulp.src(src + "/**", { base: "" + src }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
|
||||
const compile = createCompile(src, build, true);
|
||||
const srcPipe = es.merge(gulp.src(`${src}/**`, { base: `${src}` }), gulp.src(typesDts));
|
||||
let generator = new MonacoGenerator(false);
|
||||
if (src === 'src') {
|
||||
generator.execute();
|
||||
}
|
||||
return srcPipe
|
||||
.pipe(generator.stream)
|
||||
.pipe(compile())
|
||||
.pipe(dtsFilter)
|
||||
.pipe(gulp.dest(out))
|
||||
.pipe(dtsFilter.restore)
|
||||
.pipe(src !== 'src' ? es.through() : monacodtsTask(out, false));
|
||||
.pipe(gulp.dest(out));
|
||||
};
|
||||
}
|
||||
exports.compileTask = compileTask;
|
||||
function watchTask(out, build) {
|
||||
return function () {
|
||||
var compile = createCompile('src', build);
|
||||
var src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src('node_modules/typescript/lib/lib.d.ts'));
|
||||
var watchSrc = watch('src/**', { base: 'src' });
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
var dtsFilter = util.filter(function (data) { return !/\.d\.ts$/.test(data.path); });
|
||||
const compile = createCompile('src', build);
|
||||
const src = es.merge(gulp.src('src/**', { base: 'src' }), gulp.src(typesDts));
|
||||
const watchSrc = watch('src/**', { base: 'src' });
|
||||
let generator = new MonacoGenerator(true);
|
||||
generator.execute();
|
||||
return watchSrc
|
||||
.pipe(generator.stream)
|
||||
.pipe(util.incremental(compile, src, true))
|
||||
.pipe(dtsFilter)
|
||||
.pipe(gulp.dest(out))
|
||||
.pipe(dtsFilter.restore)
|
||||
.pipe(monacodtsTask(out, true));
|
||||
.pipe(gulp.dest(out));
|
||||
};
|
||||
}
|
||||
exports.watchTask = watchTask;
|
||||
function monacodtsTask(out, isWatch) {
|
||||
var basePath = path.resolve(process.cwd(), out);
|
||||
var neededFiles = {};
|
||||
monacodts.getFilesToWatch(out).forEach(function (filePath) {
|
||||
filePath = path.normalize(filePath);
|
||||
neededFiles[filePath] = true;
|
||||
});
|
||||
var inputFiles = {};
|
||||
for (var filePath in neededFiles) {
|
||||
if (/\bsrc(\/|\\)vs\b/.test(filePath)) {
|
||||
// This file is needed from source => simply read it now
|
||||
inputFiles[filePath] = fs.readFileSync(filePath).toString();
|
||||
}
|
||||
}
|
||||
var setInputFile = function (filePath, contents) {
|
||||
if (inputFiles[filePath] === contents) {
|
||||
// no change
|
||||
const REPO_SRC_FOLDER = path.join(__dirname, '../../src');
|
||||
class MonacoGenerator {
|
||||
constructor(isWatch) {
|
||||
this._executeSoonTimer = null;
|
||||
this._isWatch = isWatch;
|
||||
this.stream = es.through();
|
||||
this._watchers = [];
|
||||
this._watchedFiles = {};
|
||||
let onWillReadFile = (moduleId, filePath) => {
|
||||
if (!this._isWatch) {
|
||||
return;
|
||||
}
|
||||
inputFiles[filePath] = contents;
|
||||
var neededInputFilesCount = Object.keys(neededFiles).length;
|
||||
var availableInputFilesCount = Object.keys(inputFiles).length;
|
||||
if (neededInputFilesCount === availableInputFilesCount) {
|
||||
run();
|
||||
if (this._watchedFiles[filePath]) {
|
||||
return;
|
||||
}
|
||||
};
|
||||
var run = function () {
|
||||
var result = monacodts.run(out, inputFiles);
|
||||
if (!result.isTheSame) {
|
||||
if (isWatch) {
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
}
|
||||
else {
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
|
||||
}
|
||||
}
|
||||
};
|
||||
var resultStream;
|
||||
if (isWatch) {
|
||||
watch('build/monaco/*').pipe(es.through(function () {
|
||||
run();
|
||||
}));
|
||||
}
|
||||
resultStream = es.through(function (data) {
|
||||
var filePath = path.normalize(path.resolve(basePath, data.relative));
|
||||
if (neededFiles[filePath]) {
|
||||
setInputFile(filePath, data.contents.toString());
|
||||
}
|
||||
this.emit('data', data);
|
||||
this._watchedFiles[filePath] = true;
|
||||
const watcher = fs.watch(filePath);
|
||||
watcher.addListener('change', () => {
|
||||
this._declarationResolver.invalidateCache(moduleId);
|
||||
this._executeSoon();
|
||||
});
|
||||
return resultStream;
|
||||
watcher.addListener('error', (err) => {
|
||||
console.error(`Encountered error while watching ${filePath}.`);
|
||||
console.log(err);
|
||||
delete this._watchedFiles[filePath];
|
||||
for (let i = 0; i < this._watchers.length; i++) {
|
||||
if (this._watchers[i] === watcher) {
|
||||
this._watchers.splice(i, 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
watcher.close();
|
||||
this._declarationResolver.invalidateCache(moduleId);
|
||||
this._executeSoon();
|
||||
});
|
||||
this._watchers.push(watcher);
|
||||
};
|
||||
this._fsProvider = new class extends monacodts.FSProvider {
|
||||
readFileSync(moduleId, filePath) {
|
||||
onWillReadFile(moduleId, filePath);
|
||||
return super.readFileSync(moduleId, filePath);
|
||||
}
|
||||
};
|
||||
this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider);
|
||||
if (this._isWatch) {
|
||||
const recipeWatcher = fs.watch(monacodts.RECIPE_PATH);
|
||||
recipeWatcher.addListener('change', () => {
|
||||
this._executeSoon();
|
||||
});
|
||||
this._watchers.push(recipeWatcher);
|
||||
}
|
||||
}
|
||||
_executeSoon() {
|
||||
if (this._executeSoonTimer !== null) {
|
||||
clearTimeout(this._executeSoonTimer);
|
||||
this._executeSoonTimer = null;
|
||||
}
|
||||
this._executeSoonTimer = setTimeout(() => {
|
||||
this._executeSoonTimer = null;
|
||||
this.execute();
|
||||
}, 20);
|
||||
}
|
||||
dispose() {
|
||||
this._watchers.forEach(watcher => watcher.close());
|
||||
}
|
||||
_run() {
|
||||
let r = monacodts.run3(this._declarationResolver);
|
||||
if (!r && !this._isWatch) {
|
||||
// The build must always be able to generate the monaco.d.ts
|
||||
throw new Error(`monaco.d.ts generation error - Cannot continue`);
|
||||
}
|
||||
return r;
|
||||
}
|
||||
_log(message, ...rest) {
|
||||
fancyLog(ansiColors.cyan('[monaco.d.ts]'), message, ...rest);
|
||||
}
|
||||
execute() {
|
||||
const startTime = Date.now();
|
||||
const result = this._run();
|
||||
if (!result) {
|
||||
// nothing really changed
|
||||
return;
|
||||
}
|
||||
if (result.isTheSame) {
|
||||
return;
|
||||
}
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
fs.writeFileSync(path.join(REPO_SRC_FOLDER, 'vs/editor/common/standalone/standaloneEnums.ts'), result.enums);
|
||||
this._log(`monaco.d.ts is changed - total time took ${Date.now() - startTime} ms`);
|
||||
if (!this._isWatch) {
|
||||
this.stream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,31 +5,41 @@
|
||||
|
||||
'use strict';
|
||||
|
||||
import * as gulp from 'gulp';
|
||||
import * as tsb from 'gulp-tsb';
|
||||
import * as es from 'event-stream';
|
||||
const watch = require('./watch');
|
||||
import * as nls from './nls';
|
||||
import * as util from './util';
|
||||
import { createReporter } from './reporter';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import * as gulp from 'gulp';
|
||||
import * as bom from 'gulp-bom';
|
||||
import * as sourcemaps from 'gulp-sourcemaps';
|
||||
import * as tsb from 'gulp-tsb';
|
||||
import * as path from 'path';
|
||||
import * as _ from 'underscore';
|
||||
import * as monacodts from '../monaco/api';
|
||||
import * as fs from 'fs';
|
||||
import * as nls from './nls';
|
||||
import { createReporter } from './reporter';
|
||||
import * as util from './util';
|
||||
import * as fancyLog from 'fancy-log';
|
||||
import * as ansiColors from 'ansi-colors';
|
||||
|
||||
const watch = require('./watch');
|
||||
|
||||
const reporter = createReporter();
|
||||
|
||||
function getTypeScriptCompilerOptions(src: string) {
|
||||
const rootDir = path.join(__dirname, `../../${src}`);
|
||||
const options = require(`../../${src}/tsconfig.json`).compilerOptions;
|
||||
const tsconfig = require(`../../${src}/tsconfig.json`);
|
||||
let options: { [key: string]: any };
|
||||
if (tsconfig.extends) {
|
||||
options = Object.assign({}, require(path.join(rootDir, tsconfig.extends)).compilerOptions, tsconfig.compilerOptions);
|
||||
} else {
|
||||
options = tsconfig.compilerOptions;
|
||||
}
|
||||
options.verbose = false;
|
||||
options.sourceMap = true;
|
||||
if (process.env['VSCODE_NO_SOURCEMAP']) { // To be used by developers in a hurry
|
||||
options.sourceMap = false;
|
||||
}
|
||||
options.rootDir = rootDir;
|
||||
options.baseUrl = rootDir;
|
||||
options.sourceRoot = util.toFileUri(rootDir);
|
||||
options.newLine = /\r\n/.test(fs.readFileSync(__filename, 'utf8')) ? 'CRLF' : 'LF';
|
||||
return options;
|
||||
@@ -40,7 +50,7 @@ function createCompile(src: string, build: boolean, emitError?: boolean): (token
|
||||
opts.inlineSources = !!build;
|
||||
opts.noFilesystemLookup = true;
|
||||
|
||||
const ts = tsb.create(opts, null, null, err => reporter(err.toString()));
|
||||
const ts = tsb.create(opts, true, undefined, err => reporter(err.toString()));
|
||||
|
||||
return function (token?: util.ICancellationToken) {
|
||||
|
||||
@@ -65,12 +75,19 @@ function createCompile(src: string, build: boolean, emitError?: boolean): (token
|
||||
sourceRoot: opts.sourceRoot
|
||||
}))
|
||||
.pipe(tsFilter.restore)
|
||||
.pipe(reporter.end(emitError));
|
||||
.pipe(reporter.end(!!emitError));
|
||||
|
||||
return es.duplex(input, output);
|
||||
};
|
||||
}
|
||||
|
||||
const typesDts = [
|
||||
'node_modules/typescript/lib/*.d.ts',
|
||||
'node_modules/@types/**/*.d.ts',
|
||||
'!node_modules/@types/webpack/**/*',
|
||||
'!node_modules/@types/uglify-js/**/*',
|
||||
];
|
||||
|
||||
export function compileTask(src: string, out: string, build: boolean): () => NodeJS.ReadWriteStream {
|
||||
|
||||
return function () {
|
||||
@@ -78,18 +95,18 @@ export function compileTask(src: string, out: string, build: boolean): () => Nod
|
||||
|
||||
const srcPipe = es.merge(
|
||||
gulp.src(`${src}/**`, { base: `${src}` }),
|
||||
gulp.src('node_modules/typescript/lib/lib.d.ts'),
|
||||
gulp.src(typesDts),
|
||||
);
|
||||
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
|
||||
let generator = new MonacoGenerator(false);
|
||||
if (src === 'src') {
|
||||
generator.execute();
|
||||
}
|
||||
|
||||
return srcPipe
|
||||
.pipe(generator.stream)
|
||||
.pipe(compile())
|
||||
.pipe(dtsFilter)
|
||||
.pipe(gulp.dest(out))
|
||||
.pipe(dtsFilter.restore)
|
||||
.pipe(src !== 'src' ? es.through() : monacodtsTask(out, false));
|
||||
.pipe(gulp.dest(out));
|
||||
};
|
||||
}
|
||||
|
||||
@@ -100,80 +117,128 @@ export function watchTask(out: string, build: boolean): () => NodeJS.ReadWriteSt
|
||||
|
||||
const src = es.merge(
|
||||
gulp.src('src/**', { base: 'src' }),
|
||||
gulp.src('node_modules/typescript/lib/lib.d.ts'),
|
||||
gulp.src(typesDts),
|
||||
);
|
||||
const watchSrc = watch('src/**', { base: 'src' });
|
||||
|
||||
// Do not write .d.ts files to disk, as they are not needed there.
|
||||
const dtsFilter = util.filter(data => !/\.d\.ts$/.test(data.path));
|
||||
let generator = new MonacoGenerator(true);
|
||||
generator.execute();
|
||||
|
||||
return watchSrc
|
||||
.pipe(generator.stream)
|
||||
.pipe(util.incremental(compile, src, true))
|
||||
.pipe(dtsFilter)
|
||||
.pipe(gulp.dest(out))
|
||||
.pipe(dtsFilter.restore)
|
||||
.pipe(monacodtsTask(out, true));
|
||||
.pipe(gulp.dest(out));
|
||||
};
|
||||
}
|
||||
|
||||
function monacodtsTask(out: string, isWatch: boolean): NodeJS.ReadWriteStream {
|
||||
const REPO_SRC_FOLDER = path.join(__dirname, '../../src');
|
||||
|
||||
const basePath = path.resolve(process.cwd(), out);
|
||||
class MonacoGenerator {
|
||||
private readonly _isWatch: boolean;
|
||||
public readonly stream: NodeJS.ReadWriteStream;
|
||||
|
||||
const neededFiles: { [file: string]: boolean; } = {};
|
||||
monacodts.getFilesToWatch(out).forEach(function (filePath) {
|
||||
filePath = path.normalize(filePath);
|
||||
neededFiles[filePath] = true;
|
||||
});
|
||||
private readonly _watchers: fs.FSWatcher[];
|
||||
private readonly _watchedFiles: { [filePath: string]: boolean; };
|
||||
private readonly _fsProvider: monacodts.FSProvider;
|
||||
private readonly _declarationResolver: monacodts.DeclarationResolver;
|
||||
|
||||
const inputFiles: { [file: string]: string; } = {};
|
||||
for (let filePath in neededFiles) {
|
||||
if (/\bsrc(\/|\\)vs\b/.test(filePath)) {
|
||||
// This file is needed from source => simply read it now
|
||||
inputFiles[filePath] = fs.readFileSync(filePath).toString();
|
||||
}
|
||||
}
|
||||
|
||||
const setInputFile = (filePath: string, contents: string) => {
|
||||
if (inputFiles[filePath] === contents) {
|
||||
// no change
|
||||
constructor(isWatch: boolean) {
|
||||
this._isWatch = isWatch;
|
||||
this.stream = es.through();
|
||||
this._watchers = [];
|
||||
this._watchedFiles = {};
|
||||
let onWillReadFile = (moduleId: string, filePath: string) => {
|
||||
if (!this._isWatch) {
|
||||
return;
|
||||
}
|
||||
inputFiles[filePath] = contents;
|
||||
const neededInputFilesCount = Object.keys(neededFiles).length;
|
||||
const availableInputFilesCount = Object.keys(inputFiles).length;
|
||||
if (neededInputFilesCount === availableInputFilesCount) {
|
||||
run();
|
||||
if (this._watchedFiles[filePath]) {
|
||||
return;
|
||||
}
|
||||
};
|
||||
this._watchedFiles[filePath] = true;
|
||||
|
||||
const run = () => {
|
||||
const result = monacodts.run(out, inputFiles);
|
||||
if (!result.isTheSame) {
|
||||
if (isWatch) {
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
} else {
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
resultStream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let resultStream: NodeJS.ReadWriteStream;
|
||||
|
||||
if (isWatch) {
|
||||
watch('build/monaco/*').pipe(es.through(function () {
|
||||
run();
|
||||
}));
|
||||
}
|
||||
|
||||
resultStream = es.through(function (data) {
|
||||
const filePath = path.normalize(path.resolve(basePath, data.relative));
|
||||
if (neededFiles[filePath]) {
|
||||
setInputFile(filePath, data.contents.toString());
|
||||
}
|
||||
this.emit('data', data);
|
||||
const watcher = fs.watch(filePath);
|
||||
watcher.addListener('change', () => {
|
||||
this._declarationResolver.invalidateCache(moduleId);
|
||||
this._executeSoon();
|
||||
});
|
||||
|
||||
return resultStream;
|
||||
watcher.addListener('error', (err) => {
|
||||
console.error(`Encountered error while watching ${filePath}.`);
|
||||
console.log(err);
|
||||
delete this._watchedFiles[filePath];
|
||||
for (let i = 0; i < this._watchers.length; i++) {
|
||||
if (this._watchers[i] === watcher) {
|
||||
this._watchers.splice(i, 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
watcher.close();
|
||||
this._declarationResolver.invalidateCache(moduleId);
|
||||
this._executeSoon();
|
||||
});
|
||||
this._watchers.push(watcher);
|
||||
};
|
||||
this._fsProvider = new class extends monacodts.FSProvider {
|
||||
public readFileSync(moduleId: string, filePath: string): Buffer {
|
||||
onWillReadFile(moduleId, filePath);
|
||||
return super.readFileSync(moduleId, filePath);
|
||||
}
|
||||
};
|
||||
this._declarationResolver = new monacodts.DeclarationResolver(this._fsProvider);
|
||||
|
||||
if (this._isWatch) {
|
||||
const recipeWatcher = fs.watch(monacodts.RECIPE_PATH);
|
||||
recipeWatcher.addListener('change', () => {
|
||||
this._executeSoon();
|
||||
});
|
||||
this._watchers.push(recipeWatcher);
|
||||
}
|
||||
}
|
||||
|
||||
private _executeSoonTimer: NodeJS.Timer | null = null;
|
||||
private _executeSoon(): void {
|
||||
if (this._executeSoonTimer !== null) {
|
||||
clearTimeout(this._executeSoonTimer);
|
||||
this._executeSoonTimer = null;
|
||||
}
|
||||
this._executeSoonTimer = setTimeout(() => {
|
||||
this._executeSoonTimer = null;
|
||||
this.execute();
|
||||
}, 20);
|
||||
}
|
||||
|
||||
public dispose(): void {
|
||||
this._watchers.forEach(watcher => watcher.close());
|
||||
}
|
||||
|
||||
private _run(): monacodts.IMonacoDeclarationResult | null {
|
||||
let r = monacodts.run3(this._declarationResolver);
|
||||
if (!r && !this._isWatch) {
|
||||
// The build must always be able to generate the monaco.d.ts
|
||||
throw new Error(`monaco.d.ts generation error - Cannot continue`);
|
||||
}
|
||||
return r;
|
||||
}
|
||||
|
||||
private _log(message: any, ...rest: any[]): void {
|
||||
fancyLog(ansiColors.cyan('[monaco.d.ts]'), message, ...rest);
|
||||
}
|
||||
|
||||
public execute(): void {
|
||||
const startTime = Date.now();
|
||||
const result = this._run();
|
||||
if (!result) {
|
||||
// nothing really changed
|
||||
return;
|
||||
}
|
||||
if (result.isTheSame) {
|
||||
return;
|
||||
}
|
||||
|
||||
fs.writeFileSync(result.filePath, result.content);
|
||||
fs.writeFileSync(path.join(REPO_SRC_FOLDER, 'vs/editor/common/standalone/standaloneEnums.ts'), result.enums);
|
||||
this._log(`monaco.d.ts is changed - total time took ${Date.now() - startTime} ms`);
|
||||
if (!this._isWatch) {
|
||||
this.stream.emit('error', 'monaco.d.ts is no longer up to date. Please run gulp watch and commit the new file.');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ const root = path.dirname(path.dirname(__dirname));
|
||||
|
||||
function getElectronVersion() {
|
||||
const yarnrc = fs.readFileSync(path.join(root, '.yarnrc'), 'utf8');
|
||||
// @ts-ignore
|
||||
const target = /^target "(.*)"$/m.exec(yarnrc)[1];
|
||||
|
||||
return target;
|
||||
@@ -19,6 +20,7 @@ function getElectronVersion() {
|
||||
module.exports.getElectronVersion = getElectronVersion;
|
||||
|
||||
// returns 0 if the right version of electron is in .build/electron
|
||||
// @ts-ignore
|
||||
if (require.main === module) {
|
||||
const version = getElectronVersion();
|
||||
const versionFile = path.join(root, '.build', 'electron', 'version');
|
||||
|
||||
@@ -4,116 +4,330 @@
|
||||
* Licensed under the Source EULA. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var es = require("event-stream");
|
||||
var assign = require("object-assign");
|
||||
var remote = require("gulp-remote-src");
|
||||
var flatmap = require('gulp-flatmap');
|
||||
var vzip = require('gulp-vinyl-zip');
|
||||
var filter = require('gulp-filter');
|
||||
var rename = require('gulp-rename');
|
||||
var util = require('gulp-util');
|
||||
var buffer = require('gulp-buffer');
|
||||
var json = require('gulp-json-editor');
|
||||
var fs = require("fs");
|
||||
var path = require("path");
|
||||
var vsce = require("vsce");
|
||||
var File = require("vinyl");
|
||||
function fromLocal(extensionPath) {
|
||||
var result = es.through();
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
||||
.then(function (fileNames) {
|
||||
var files = fileNames
|
||||
.map(function (fileName) { return path.join(extensionPath, fileName); })
|
||||
.map(function (filePath) { return new File({
|
||||
const es = require("event-stream");
|
||||
const fs = require("fs");
|
||||
const glob = require("glob");
|
||||
const gulp = require("gulp");
|
||||
const path = require("path");
|
||||
const File = require("vinyl");
|
||||
const vsce = require("vsce");
|
||||
const stats_1 = require("./stats");
|
||||
const util2 = require("./util");
|
||||
const remote = require("gulp-remote-src");
|
||||
const vzip = require('gulp-vinyl-zip');
|
||||
const filter = require("gulp-filter");
|
||||
const rename = require("gulp-rename");
|
||||
const fancyLog = require("fancy-log");
|
||||
const ansiColors = require("ansi-colors");
|
||||
const buffer = require('gulp-buffer');
|
||||
const json = require("gulp-json-editor");
|
||||
const webpack = require('webpack');
|
||||
const webpackGulp = require('webpack-stream');
|
||||
const root = path.resolve(path.join(__dirname, '..', '..'));
|
||||
// {{SQL CARBON EDIT}}
|
||||
const _ = require("underscore");
|
||||
const vfs = require("vinyl-fs");
|
||||
const deps = require('../dependencies');
|
||||
const extensionsRoot = path.join(root, 'extensions');
|
||||
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
|
||||
function packageBuiltInExtensions() {
|
||||
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
|
||||
const visxDirectory = path.join(path.dirname(root), 'vsix');
|
||||
try {
|
||||
if (!fs.existsSync(visxDirectory)) {
|
||||
fs.mkdirSync(visxDirectory);
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
// don't fail the build if the output directory already exists
|
||||
console.warn(err);
|
||||
}
|
||||
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
|
||||
let pkgJson = JSON.parse(fs.readFileSync(path.join(element.path, 'package.json'), { encoding: 'utf8' }));
|
||||
const packagePath = path.join(visxDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
|
||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
||||
vsce.createVSIX({
|
||||
cwd: element.path,
|
||||
packagePath: packagePath,
|
||||
useYarn: true
|
||||
});
|
||||
});
|
||||
}
|
||||
exports.packageBuiltInExtensions = packageBuiltInExtensions;
|
||||
function packageExtensionTask(extensionName, platform, arch) {
|
||||
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
||||
if (platform === 'darwin') {
|
||||
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
|
||||
}
|
||||
else {
|
||||
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
|
||||
}
|
||||
platform = platform || process.platform;
|
||||
return () => {
|
||||
const root = path.resolve(path.join(__dirname, '../..'));
|
||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => extensionName === name);
|
||||
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
||||
return fromLocal(extension.path);
|
||||
}));
|
||||
let result = localExtensions
|
||||
.pipe(util2.skipDirectories())
|
||||
.pipe(util2.fixWin32DirectoryPermissions())
|
||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
|
||||
return result.pipe(vfs.dest(destination));
|
||||
};
|
||||
}
|
||||
exports.packageExtensionTask = packageExtensionTask;
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
function fromLocal(extensionPath, sourceMappingURLBase) {
|
||||
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
|
||||
if (fs.existsSync(webpackFilename)) {
|
||||
return fromLocalWebpack(extensionPath, sourceMappingURLBase);
|
||||
}
|
||||
else {
|
||||
return fromLocalNormal(extensionPath);
|
||||
}
|
||||
}
|
||||
function fromLocalWebpack(extensionPath, sourceMappingURLBase) {
|
||||
const result = es.through();
|
||||
const packagedDependencies = [];
|
||||
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
|
||||
if (packageJsonConfig.dependencies) {
|
||||
const webpackRootConfig = require(path.join(extensionPath, 'extension.webpack.config.js'));
|
||||
for (const key in webpackRootConfig.externals) {
|
||||
if (key in packageJsonConfig.dependencies) {
|
||||
packagedDependencies.push(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
|
||||
const files = fileNames
|
||||
.map(fileName => path.join(extensionPath, fileName))
|
||||
.map(filePath => new File({
|
||||
path: filePath,
|
||||
stat: fs.statSync(filePath),
|
||||
base: extensionPath,
|
||||
contents: fs.createReadStream(filePath)
|
||||
}); });
|
||||
}));
|
||||
const filesStream = es.readArray(files);
|
||||
// check for a webpack configuration files, then invoke webpack
|
||||
// and merge its output with the files stream. also rewrite the package.json
|
||||
// file to a new entry point
|
||||
const webpackConfigLocations = glob.sync(path.join(extensionPath, '/**/extension.webpack.config.js'), { ignore: ['**/node_modules'] });
|
||||
const packageJsonFilter = filter(f => {
|
||||
if (path.basename(f.path) === 'package.json') {
|
||||
// only modify package.json's next to the webpack file.
|
||||
// to be safe, use existsSync instead of path comparison.
|
||||
return fs.existsSync(path.join(path.dirname(f.path), 'extension.webpack.config.js'));
|
||||
}
|
||||
return false;
|
||||
}, { restore: true });
|
||||
const patchFilesStream = filesStream
|
||||
.pipe(packageJsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(json((data) => {
|
||||
if (data.main) {
|
||||
// hardcoded entry point directory!
|
||||
data.main = data.main.replace('/out/', /dist/);
|
||||
}
|
||||
return data;
|
||||
}))
|
||||
.pipe(packageJsonFilter.restore);
|
||||
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => () => {
|
||||
const webpackDone = (err, stats) => {
|
||||
fancyLog(`Bundled extension: ${ansiColors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
|
||||
if (err) {
|
||||
result.emit('error', err);
|
||||
}
|
||||
const { compilation } = stats;
|
||||
if (compilation.errors.length > 0) {
|
||||
result.emit('error', compilation.errors.join('\n'));
|
||||
}
|
||||
if (compilation.warnings.length > 0) {
|
||||
result.emit('error', compilation.warnings.join('\n'));
|
||||
}
|
||||
};
|
||||
const webpackConfig = Object.assign({}, require(webpackConfigPath), { mode: 'production' });
|
||||
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
|
||||
return webpackGulp(webpackConfig, webpack, webpackDone)
|
||||
.pipe(es.through(function (data) {
|
||||
data.stat = data.stat || {};
|
||||
data.base = extensionPath;
|
||||
this.emit('data', data);
|
||||
}))
|
||||
.pipe(es.through(function (data) {
|
||||
// source map handling:
|
||||
// * rewrite sourceMappingURL
|
||||
// * save to disk so that upload-task picks this up
|
||||
if (sourceMappingURLBase) {
|
||||
const contents = data.contents.toString('utf8');
|
||||
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
|
||||
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
|
||||
}), 'utf8');
|
||||
if (/\.js\.map$/.test(data.path)) {
|
||||
if (!fs.existsSync(path.dirname(data.path))) {
|
||||
fs.mkdirSync(path.dirname(data.path));
|
||||
}
|
||||
fs.writeFileSync(data.path, data.contents);
|
||||
}
|
||||
}
|
||||
this.emit('data', data);
|
||||
}));
|
||||
});
|
||||
es.merge(sequence(webpackStreams), patchFilesStream)
|
||||
// .pipe(es.through(function (data) {
|
||||
// // debug
|
||||
// console.log('out', data.path, data.contents.length);
|
||||
// this.emit('data', data);
|
||||
// }))
|
||||
.pipe(result);
|
||||
}).catch(err => {
|
||||
console.error(extensionPath);
|
||||
console.error(packagedDependencies);
|
||||
result.emit('error', err);
|
||||
});
|
||||
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
|
||||
}
|
||||
function fromLocalNormal(extensionPath) {
|
||||
const result = es.through();
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
||||
.then(fileNames => {
|
||||
const files = fileNames
|
||||
.map(fileName => path.join(extensionPath, fileName))
|
||||
.map(filePath => new File({
|
||||
path: filePath,
|
||||
stat: fs.statSync(filePath),
|
||||
base: extensionPath,
|
||||
contents: fs.createReadStream(filePath)
|
||||
}));
|
||||
es.readArray(files).pipe(result);
|
||||
})
|
||||
.catch(function (err) { return result.emit('error', err); });
|
||||
return result;
|
||||
.catch(err => result.emit('error', err));
|
||||
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
|
||||
}
|
||||
exports.fromLocal = fromLocal;
|
||||
function error(err) {
|
||||
var result = es.through();
|
||||
setTimeout(function () { return result.emit('error', err); });
|
||||
return result;
|
||||
}
|
||||
var baseHeaders = {
|
||||
const baseHeaders = {
|
||||
'X-Market-Client-Id': 'VSCode Build',
|
||||
'User-Agent': 'VSCode Build',
|
||||
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
|
||||
};
|
||||
function fromMarketplace(extensionName, version) {
|
||||
var filterType = 7;
|
||||
var value = extensionName;
|
||||
var criterium = { filterType: filterType, value: value };
|
||||
var criteria = [criterium];
|
||||
var pageNumber = 1;
|
||||
var pageSize = 1;
|
||||
var sortBy = 0;
|
||||
var sortOrder = 0;
|
||||
var flags = 0x1 | 0x2 | 0x80;
|
||||
var assetTypes = ['Microsoft.VisualStudio.Services.VSIXPackage'];
|
||||
var filters = [{ criteria: criteria, pageNumber: pageNumber, pageSize: pageSize, sortBy: sortBy, sortOrder: sortOrder }];
|
||||
var body = JSON.stringify({ filters: filters, assetTypes: assetTypes, flags: flags });
|
||||
var headers = assign({}, baseHeaders, {
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json;api-version=3.0-preview.1',
|
||||
'Content-Length': body.length
|
||||
});
|
||||
var options = {
|
||||
base: 'https://marketplace.visualstudio.com/_apis/public/gallery',
|
||||
requestOptions: {
|
||||
method: 'POST',
|
||||
gzip: true,
|
||||
headers: headers,
|
||||
body: body
|
||||
}
|
||||
};
|
||||
return remote('/extensionquery', options)
|
||||
.pipe(flatmap(function (stream, f) {
|
||||
var rawResult = f.contents.toString('utf8');
|
||||
var result = JSON.parse(rawResult);
|
||||
var extension = result.results[0].extensions[0];
|
||||
if (!extension) {
|
||||
return error("No such extension: " + extension);
|
||||
}
|
||||
var metadata = {
|
||||
id: extension.extensionId,
|
||||
publisherId: extension.publisher,
|
||||
publisherDisplayName: extension.publisher.displayName
|
||||
};
|
||||
var extensionVersion = extension.versions.filter(function (v) { return v.version === version; })[0];
|
||||
if (!extensionVersion) {
|
||||
return error("No such extension version: " + extensionName + " @ " + version);
|
||||
}
|
||||
var asset = extensionVersion.files.filter(function (f) { return f.assetType === 'Microsoft.VisualStudio.Services.VSIXPackage'; })[0];
|
||||
if (!asset) {
|
||||
return error("No VSIX found for extension version: " + extensionName + " @ " + version);
|
||||
}
|
||||
util.log('Downloading extension:', util.colors.yellow(extensionName + "@" + version), '...');
|
||||
var options = {
|
||||
base: asset.source,
|
||||
function fromMarketplace(extensionName, version, metadata) {
|
||||
const [publisher, name] = extensionName.split('.');
|
||||
const url = `https://marketplace.visualstudio.com/_apis/public/gallery/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`;
|
||||
fancyLog('Downloading extension:', ansiColors.yellow(`${extensionName}@${version}`), '...');
|
||||
const options = {
|
||||
base: url,
|
||||
requestOptions: {
|
||||
gzip: true,
|
||||
headers: baseHeaders
|
||||
}
|
||||
};
|
||||
const packageJsonFilter = filter('package.json', { restore: true });
|
||||
return remote('', options)
|
||||
.pipe(flatmap(function (stream) {
|
||||
var packageJsonFilter = filter('package.json', { restore: true });
|
||||
return stream
|
||||
.pipe(vzip.src())
|
||||
.pipe(filter('extension/**'))
|
||||
.pipe(rename(function (p) { return p.dirname = p.dirname.replace(/^extension\/?/, ''); }))
|
||||
.pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, '')))
|
||||
.pipe(packageJsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(json({ __metadata: metadata }))
|
||||
.pipe(packageJsonFilter.restore);
|
||||
}));
|
||||
}));
|
||||
}
|
||||
exports.fromMarketplace = fromMarketplace;
|
||||
const excludedExtensions = [
|
||||
'vscode-api-tests',
|
||||
'vscode-colorize-tests',
|
||||
'vscode-test-resolver',
|
||||
'ms-vscode.node-debug',
|
||||
'ms-vscode.node-debug2',
|
||||
// {{SQL CARBON EDIT}}
|
||||
'integration-tests'
|
||||
];
|
||||
// {{SQL CARBON EDIT}}
|
||||
const sqlBuiltInExtensions = [
|
||||
// Add SQL built-in extensions here.
|
||||
// the extension will be excluded from SQLOps package and will have separate vsix packages
|
||||
'admin-tool-ext-win',
|
||||
'agent',
|
||||
'import',
|
||||
'profiler',
|
||||
'admin-pack',
|
||||
'big-data-cluster',
|
||||
'dacpac',
|
||||
'schema-compare',
|
||||
'resource-deployment',
|
||||
'cms'
|
||||
];
|
||||
const builtInExtensions = require('../builtInExtensions.json');
|
||||
/**
|
||||
* We're doing way too much stuff at once, with webpack et al. So much stuff
|
||||
* that while downloading extensions from the marketplace, node js doesn't get enough
|
||||
* stack frames to complete the download in under 2 minutes, at which point the
|
||||
* marketplace server cuts off the http request. So, we sequentialize the extensino tasks.
|
||||
*/
|
||||
function sequence(streamProviders) {
|
||||
const result = es.through();
|
||||
function pop() {
|
||||
if (streamProviders.length === 0) {
|
||||
result.emit('end');
|
||||
}
|
||||
else {
|
||||
const fn = streamProviders.shift();
|
||||
fn()
|
||||
.on('end', function () { setTimeout(pop, 0); })
|
||||
.pipe(result, { end: false });
|
||||
}
|
||||
}
|
||||
pop();
|
||||
return result;
|
||||
}
|
||||
function packageExtensionsStream(optsIn) {
|
||||
const opts = optsIn || {};
|
||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
// {{SQL CARBON EDIT}}
|
||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1);
|
||||
const localExtensions = () => sequence([...localExtensionDescriptions.map(extension => () => {
|
||||
return fromLocal(extension.path, opts.sourceMappingURLBase)
|
||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
})]);
|
||||
// {{SQL CARBON EDIT}}
|
||||
const extensionDepsSrc = [
|
||||
..._.flatten(extensionsProductionDependencies.map((d) => path.relative(root, d.path)).map((d) => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
|
||||
];
|
||||
const localExtensionDependencies = () => gulp.src(extensionDepsSrc, { base: '.', dot: true })
|
||||
.pipe(filter(['**', '!**/package-lock.json']));
|
||||
// Original code commented out here
|
||||
// const localExtensionDependencies = () => gulp.src('extensions/node_modules/**', { base: '.' });
|
||||
// const marketplaceExtensions = () => es.merge(
|
||||
// ...builtInExtensions
|
||||
// .filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
|
||||
// .map(extension => {
|
||||
// return fromMarketplace(extension.name, extension.version, extension.metadata)
|
||||
// .pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
// })
|
||||
// );
|
||||
return sequence([localExtensions, localExtensionDependencies,])
|
||||
.pipe(util2.setExecutableBit(['**/*.sh']))
|
||||
.pipe(filter(['**', '!**/*.js.map']));
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
}
|
||||
exports.packageExtensionsStream = packageExtensionsStream;
|
||||
|
||||
@@ -4,22 +4,232 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as es from 'event-stream';
|
||||
import { Stream } from 'stream';
|
||||
import assign = require('object-assign');
|
||||
import remote = require('gulp-remote-src');
|
||||
const flatmap = require('gulp-flatmap');
|
||||
const vzip = require('gulp-vinyl-zip');
|
||||
const filter = require('gulp-filter');
|
||||
const rename = require('gulp-rename');
|
||||
const util = require('gulp-util');
|
||||
const buffer = require('gulp-buffer');
|
||||
const json = require('gulp-json-editor');
|
||||
import * as fs from 'fs';
|
||||
import * as glob from 'glob';
|
||||
import * as gulp from 'gulp';
|
||||
import * as path from 'path';
|
||||
import * as vsce from 'vsce';
|
||||
import { Stream } from 'stream';
|
||||
import * as File from 'vinyl';
|
||||
import * as vsce from 'vsce';
|
||||
import { createStatsStream } from './stats';
|
||||
import * as util2 from './util';
|
||||
import remote = require('gulp-remote-src');
|
||||
const vzip = require('gulp-vinyl-zip');
|
||||
import filter = require('gulp-filter');
|
||||
import rename = require('gulp-rename');
|
||||
import * as fancyLog from 'fancy-log';
|
||||
import * as ansiColors from 'ansi-colors';
|
||||
const buffer = require('gulp-buffer');
|
||||
import json = require('gulp-json-editor');
|
||||
const webpack = require('webpack');
|
||||
const webpackGulp = require('webpack-stream');
|
||||
|
||||
export function fromLocal(extensionPath: string): Stream {
|
||||
const root = path.resolve(path.join(__dirname, '..', '..'));
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
import * as _ from 'underscore';
|
||||
import * as vfs from 'vinyl-fs';
|
||||
const deps = require('../dependencies');
|
||||
const extensionsRoot = path.join(root, 'extensions');
|
||||
const extensionsProductionDependencies = deps.getProductionDependencies(extensionsRoot);
|
||||
|
||||
export function packageBuiltInExtensions() {
|
||||
const sqlBuiltInLocalExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) >= 0);
|
||||
const visxDirectory = path.join(path.dirname(root), 'vsix');
|
||||
try {
|
||||
if (!fs.existsSync(visxDirectory)) {
|
||||
fs.mkdirSync(visxDirectory);
|
||||
}
|
||||
} catch (err) {
|
||||
// don't fail the build if the output directory already exists
|
||||
console.warn(err);
|
||||
}
|
||||
sqlBuiltInLocalExtensionDescriptions.forEach(element => {
|
||||
let pkgJson = JSON.parse(fs.readFileSync(path.join(element.path, 'package.json'), { encoding: 'utf8' }));
|
||||
const packagePath = path.join(visxDirectory, `${pkgJson.name}-${pkgJson.version}.vsix`);
|
||||
console.info('Creating vsix for ' + element.path + ' result:' + packagePath);
|
||||
vsce.createVSIX({
|
||||
cwd: element.path,
|
||||
packagePath: packagePath,
|
||||
useYarn: true
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export function packageExtensionTask(extensionName: string, platform: string, arch: string) {
|
||||
var destination = path.join(path.dirname(root), 'azuredatastudio') + (platform ? '-' + platform : '') + (arch ? '-' + arch : '');
|
||||
if (platform === 'darwin') {
|
||||
destination = path.join(destination, 'Azure Data Studio.app', 'Contents', 'Resources', 'app', 'extensions', extensionName);
|
||||
} else {
|
||||
destination = path.join(destination, 'resources', 'app', 'extensions', extensionName);
|
||||
}
|
||||
|
||||
platform = platform || process.platform;
|
||||
|
||||
return () => {
|
||||
const root = path.resolve(path.join(__dirname, '../..'));
|
||||
const localExtensionDescriptions = glob.sync('extensions/*/package.json')
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => extensionName === name);
|
||||
|
||||
const localExtensions = es.merge(...localExtensionDescriptions.map(extension => {
|
||||
return fromLocal(extension.path);
|
||||
}));
|
||||
|
||||
let result = localExtensions
|
||||
.pipe(util2.skipDirectories())
|
||||
.pipe(util2.fixWin32DirectoryPermissions())
|
||||
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version']));
|
||||
|
||||
return result.pipe(vfs.dest(destination));
|
||||
};
|
||||
}
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
|
||||
function fromLocal(extensionPath: string, sourceMappingURLBase?: string): Stream {
|
||||
const webpackFilename = path.join(extensionPath, 'extension.webpack.config.js');
|
||||
if (fs.existsSync(webpackFilename)) {
|
||||
return fromLocalWebpack(extensionPath, sourceMappingURLBase);
|
||||
} else {
|
||||
return fromLocalNormal(extensionPath);
|
||||
}
|
||||
}
|
||||
|
||||
function fromLocalWebpack(extensionPath: string, sourceMappingURLBase: string | undefined): Stream {
|
||||
const result = es.through();
|
||||
|
||||
const packagedDependencies: string[] = [];
|
||||
const packageJsonConfig = require(path.join(extensionPath, 'package.json'));
|
||||
if (packageJsonConfig.dependencies) {
|
||||
const webpackRootConfig = require(path.join(extensionPath, 'extension.webpack.config.js'));
|
||||
for (const key in webpackRootConfig.externals) {
|
||||
if (key in packageJsonConfig.dependencies) {
|
||||
packagedDependencies.push(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn, packagedDependencies }).then(fileNames => {
|
||||
const files = fileNames
|
||||
.map(fileName => path.join(extensionPath, fileName))
|
||||
.map(filePath => new File({
|
||||
path: filePath,
|
||||
stat: fs.statSync(filePath),
|
||||
base: extensionPath,
|
||||
contents: fs.createReadStream(filePath) as any
|
||||
}));
|
||||
|
||||
const filesStream = es.readArray(files);
|
||||
|
||||
// check for a webpack configuration files, then invoke webpack
|
||||
// and merge its output with the files stream. also rewrite the package.json
|
||||
// file to a new entry point
|
||||
const webpackConfigLocations = (<string[]>glob.sync(
|
||||
path.join(extensionPath, '/**/extension.webpack.config.js'),
|
||||
{ ignore: ['**/node_modules'] }
|
||||
));
|
||||
|
||||
const packageJsonFilter = filter(f => {
|
||||
if (path.basename(f.path) === 'package.json') {
|
||||
// only modify package.json's next to the webpack file.
|
||||
// to be safe, use existsSync instead of path comparison.
|
||||
return fs.existsSync(path.join(path.dirname(f.path), 'extension.webpack.config.js'));
|
||||
}
|
||||
return false;
|
||||
}, { restore: true });
|
||||
|
||||
const patchFilesStream = filesStream
|
||||
.pipe(packageJsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(json((data: any) => {
|
||||
if (data.main) {
|
||||
// hardcoded entry point directory!
|
||||
data.main = data.main.replace('/out/', /dist/);
|
||||
}
|
||||
return data;
|
||||
}))
|
||||
.pipe(packageJsonFilter.restore);
|
||||
|
||||
|
||||
const webpackStreams = webpackConfigLocations.map(webpackConfigPath => () => {
|
||||
|
||||
const webpackDone = (err: any, stats: any) => {
|
||||
fancyLog(`Bundled extension: ${ansiColors.yellow(path.join(path.basename(extensionPath), path.relative(extensionPath, webpackConfigPath)))}...`);
|
||||
if (err) {
|
||||
result.emit('error', err);
|
||||
}
|
||||
const { compilation } = stats;
|
||||
if (compilation.errors.length > 0) {
|
||||
result.emit('error', compilation.errors.join('\n'));
|
||||
}
|
||||
if (compilation.warnings.length > 0) {
|
||||
result.emit('error', compilation.warnings.join('\n'));
|
||||
}
|
||||
};
|
||||
|
||||
const webpackConfig = {
|
||||
...require(webpackConfigPath),
|
||||
...{ mode: 'production' }
|
||||
};
|
||||
const relativeOutputPath = path.relative(extensionPath, webpackConfig.output.path);
|
||||
|
||||
return webpackGulp(webpackConfig, webpack, webpackDone)
|
||||
.pipe(es.through(function (data) {
|
||||
data.stat = data.stat || {};
|
||||
data.base = extensionPath;
|
||||
this.emit('data', data);
|
||||
}))
|
||||
.pipe(es.through(function (data: File) {
|
||||
// source map handling:
|
||||
// * rewrite sourceMappingURL
|
||||
// * save to disk so that upload-task picks this up
|
||||
if (sourceMappingURLBase) {
|
||||
const contents = (<Buffer>data.contents).toString('utf8');
|
||||
data.contents = Buffer.from(contents.replace(/\n\/\/# sourceMappingURL=(.*)$/gm, function (_m, g1) {
|
||||
return `\n//# sourceMappingURL=${sourceMappingURLBase}/extensions/${path.basename(extensionPath)}/${relativeOutputPath}/${g1}`;
|
||||
}), 'utf8');
|
||||
|
||||
if (/\.js\.map$/.test(data.path)) {
|
||||
if (!fs.existsSync(path.dirname(data.path))) {
|
||||
fs.mkdirSync(path.dirname(data.path));
|
||||
}
|
||||
fs.writeFileSync(data.path, data.contents);
|
||||
}
|
||||
}
|
||||
this.emit('data', data);
|
||||
}));
|
||||
});
|
||||
|
||||
es.merge(sequence(webpackStreams), patchFilesStream)
|
||||
// .pipe(es.through(function (data) {
|
||||
// // debug
|
||||
// console.log('out', data.path, data.contents.length);
|
||||
// this.emit('data', data);
|
||||
// }))
|
||||
.pipe(result);
|
||||
|
||||
}).catch(err => {
|
||||
console.error(extensionPath);
|
||||
console.error(packagedDependencies);
|
||||
result.emit('error', err);
|
||||
});
|
||||
|
||||
return result.pipe(createStatsStream(path.basename(extensionPath)));
|
||||
}
|
||||
|
||||
function fromLocalNormal(extensionPath: string): Stream {
|
||||
const result = es.through();
|
||||
|
||||
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
|
||||
@@ -37,13 +247,7 @@ export function fromLocal(extensionPath: string): Stream {
|
||||
})
|
||||
.catch(err => result.emit('error', err));
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function error(err: any): Stream {
|
||||
const result = es.through();
|
||||
setTimeout(() => result.emit('error', err));
|
||||
return result;
|
||||
return result.pipe(createStatsStream(path.basename(extensionPath)));
|
||||
}
|
||||
|
||||
const baseHeaders = {
|
||||
@@ -52,82 +256,142 @@ const baseHeaders = {
|
||||
'X-Market-User-Id': '291C1CD0-051A-4123-9B4B-30D60EF52EE2',
|
||||
};
|
||||
|
||||
export function fromMarketplace(extensionName: string, version: string): Stream {
|
||||
const filterType = 7;
|
||||
const value = extensionName;
|
||||
const criterium = { filterType, value };
|
||||
const criteria = [criterium];
|
||||
const pageNumber = 1;
|
||||
const pageSize = 1;
|
||||
const sortBy = 0;
|
||||
const sortOrder = 0;
|
||||
const flags = 0x1 | 0x2 | 0x80;
|
||||
const assetTypes = ['Microsoft.VisualStudio.Services.VSIXPackage'];
|
||||
const filters = [{ criteria, pageNumber, pageSize, sortBy, sortOrder }];
|
||||
const body = JSON.stringify({ filters, assetTypes, flags });
|
||||
const headers: any = assign({}, baseHeaders, {
|
||||
'Content-Type': 'application/json',
|
||||
'Accept': 'application/json;api-version=3.0-preview.1',
|
||||
'Content-Length': body.length
|
||||
});
|
||||
export function fromMarketplace(extensionName: string, version: string, metadata: any): Stream {
|
||||
const [publisher, name] = extensionName.split('.');
|
||||
const url = `https://marketplace.visualstudio.com/_apis/public/gallery/publishers/${publisher}/vsextensions/${name}/${version}/vspackage`;
|
||||
|
||||
fancyLog('Downloading extension:', ansiColors.yellow(`${extensionName}@${version}`), '...');
|
||||
|
||||
const options = {
|
||||
base: 'https://marketplace.visualstudio.com/_apis/public/gallery',
|
||||
requestOptions: {
|
||||
method: 'POST',
|
||||
gzip: true,
|
||||
headers,
|
||||
body: body
|
||||
}
|
||||
};
|
||||
|
||||
return remote('/extensionquery', options)
|
||||
.pipe(flatmap((stream, f) => {
|
||||
const rawResult = f.contents.toString('utf8');
|
||||
const result = JSON.parse(rawResult);
|
||||
const extension = result.results[0].extensions[0];
|
||||
if (!extension) {
|
||||
return error(`No such extension: ${extension}`);
|
||||
}
|
||||
|
||||
const metadata = {
|
||||
id: extension.extensionId,
|
||||
publisherId: extension.publisher,
|
||||
publisherDisplayName: extension.publisher.displayName
|
||||
};
|
||||
|
||||
const extensionVersion = extension.versions.filter(v => v.version === version)[0];
|
||||
if (!extensionVersion) {
|
||||
return error(`No such extension version: ${extensionName} @ ${version}`);
|
||||
}
|
||||
|
||||
const asset = extensionVersion.files.filter(f => f.assetType === 'Microsoft.VisualStudio.Services.VSIXPackage')[0];
|
||||
if (!asset) {
|
||||
return error(`No VSIX found for extension version: ${extensionName} @ ${version}`);
|
||||
}
|
||||
|
||||
util.log('Downloading extension:', util.colors.yellow(`${extensionName}@${version}`), '...');
|
||||
|
||||
const options = {
|
||||
base: asset.source,
|
||||
base: url,
|
||||
requestOptions: {
|
||||
gzip: true,
|
||||
headers: baseHeaders
|
||||
}
|
||||
};
|
||||
|
||||
return remote('', options)
|
||||
.pipe(flatmap(stream => {
|
||||
const packageJsonFilter = filter('package.json', { restore: true });
|
||||
|
||||
return stream
|
||||
return remote('', options)
|
||||
.pipe(vzip.src())
|
||||
.pipe(filter('extension/**'))
|
||||
.pipe(rename(p => p.dirname = p.dirname.replace(/^extension\/?/, '')))
|
||||
.pipe(rename(p => p.dirname = p.dirname!.replace(/^extension\/?/, '')))
|
||||
.pipe(packageJsonFilter)
|
||||
.pipe(buffer())
|
||||
.pipe(json({ __metadata: metadata }))
|
||||
.pipe(packageJsonFilter.restore);
|
||||
}));
|
||||
}));
|
||||
}
|
||||
|
||||
interface IPackageExtensionsOptions {
|
||||
/**
|
||||
* Set to undefined to package all of them.
|
||||
*/
|
||||
desiredExtensions?: string[];
|
||||
sourceMappingURLBase?: string;
|
||||
}
|
||||
|
||||
const excludedExtensions = [
|
||||
'vscode-api-tests',
|
||||
'vscode-colorize-tests',
|
||||
'vscode-test-resolver',
|
||||
'ms-vscode.node-debug',
|
||||
'ms-vscode.node-debug2',
|
||||
// {{SQL CARBON EDIT}}
|
||||
'integration-tests'
|
||||
];
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const sqlBuiltInExtensions = [
|
||||
// Add SQL built-in extensions here.
|
||||
// the extension will be excluded from SQLOps package and will have separate vsix packages
|
||||
'admin-tool-ext-win',
|
||||
'agent',
|
||||
'import',
|
||||
'profiler',
|
||||
'admin-pack',
|
||||
'big-data-cluster',
|
||||
'dacpac',
|
||||
'schema-compare',
|
||||
'resource-deployment',
|
||||
'cms'
|
||||
];
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
|
||||
interface IBuiltInExtension {
|
||||
name: string;
|
||||
version: string;
|
||||
repo: string;
|
||||
metadata: any;
|
||||
}
|
||||
|
||||
const builtInExtensions: IBuiltInExtension[] = require('../builtInExtensions.json');
|
||||
|
||||
/**
|
||||
* We're doing way too much stuff at once, with webpack et al. So much stuff
|
||||
* that while downloading extensions from the marketplace, node js doesn't get enough
|
||||
* stack frames to complete the download in under 2 minutes, at which point the
|
||||
* marketplace server cuts off the http request. So, we sequentialize the extensino tasks.
|
||||
*/
|
||||
function sequence(streamProviders: { (): Stream }[]): Stream {
|
||||
const result = es.through();
|
||||
|
||||
function pop() {
|
||||
if (streamProviders.length === 0) {
|
||||
result.emit('end');
|
||||
} else {
|
||||
const fn = streamProviders.shift()!;
|
||||
fn()
|
||||
.on('end', function () { setTimeout(pop, 0); })
|
||||
.pipe(result, { end: false });
|
||||
}
|
||||
}
|
||||
|
||||
pop();
|
||||
return result;
|
||||
}
|
||||
|
||||
export function packageExtensionsStream(optsIn?: IPackageExtensionsOptions): NodeJS.ReadWriteStream {
|
||||
const opts = optsIn || {};
|
||||
|
||||
const localExtensionDescriptions = (<string[]>glob.sync('extensions/*/package.json'))
|
||||
.map(manifestPath => {
|
||||
const extensionPath = path.dirname(path.join(root, manifestPath));
|
||||
const extensionName = path.basename(extensionPath);
|
||||
return { name: extensionName, path: extensionPath };
|
||||
})
|
||||
.filter(({ name }) => excludedExtensions.indexOf(name) === -1)
|
||||
.filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
|
||||
.filter(({ name }) => builtInExtensions.every(b => b.name !== name))
|
||||
// {{SQL CARBON EDIT}}
|
||||
.filter(({ name }) => sqlBuiltInExtensions.indexOf(name) === -1);
|
||||
|
||||
const localExtensions = () => sequence([...localExtensionDescriptions.map(extension => () => {
|
||||
return fromLocal(extension.path, opts.sourceMappingURLBase)
|
||||
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
})]);
|
||||
|
||||
// {{SQL CARBON EDIT}}
|
||||
const extensionDepsSrc = [
|
||||
..._.flatten(extensionsProductionDependencies.map((d: any) => path.relative(root, d.path)).map((d: any) => [`${d}/**`, `!${d}/**/{test,tests}/**`])),
|
||||
];
|
||||
|
||||
const localExtensionDependencies = () => gulp.src(extensionDepsSrc, { base: '.', dot: true })
|
||||
.pipe(filter(['**', '!**/package-lock.json']));
|
||||
|
||||
// Original code commented out here
|
||||
// const localExtensionDependencies = () => gulp.src('extensions/node_modules/**', { base: '.' });
|
||||
|
||||
// const marketplaceExtensions = () => es.merge(
|
||||
// ...builtInExtensions
|
||||
// .filter(({ name }) => opts.desiredExtensions ? opts.desiredExtensions.indexOf(name) >= 0 : true)
|
||||
// .map(extension => {
|
||||
// return fromMarketplace(extension.name, extension.version, extension.metadata)
|
||||
// .pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
|
||||
// })
|
||||
// );
|
||||
|
||||
return sequence([localExtensions, localExtensionDependencies, /*marketplaceExtensions*/])
|
||||
.pipe(util2.setExecutableBit(['**/*.sh']))
|
||||
.pipe(filter(['**', '!**/*.js.map']));
|
||||
// {{SQL CARBON EDIT}} - End
|
||||
}
|
||||
|
||||
@@ -4,47 +4,47 @@
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
'use strict';
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
var path = require("path");
|
||||
var fs = require("fs");
|
||||
const path = require("path");
|
||||
const fs = require("fs");
|
||||
/**
|
||||
* Returns the sha1 commit version of a repository or undefined in case of failure.
|
||||
*/
|
||||
function getVersion(repo) {
|
||||
var git = path.join(repo, '.git');
|
||||
var headPath = path.join(git, 'HEAD');
|
||||
var head;
|
||||
const git = path.join(repo, '.git');
|
||||
const headPath = path.join(git, 'HEAD');
|
||||
let head;
|
||||
try {
|
||||
head = fs.readFileSync(headPath, 'utf8').trim();
|
||||
}
|
||||
catch (e) {
|
||||
return void 0;
|
||||
return undefined;
|
||||
}
|
||||
if (/^[0-9a-f]{40}$/i.test(head)) {
|
||||
return head;
|
||||
}
|
||||
var refMatch = /^ref: (.*)$/.exec(head);
|
||||
const refMatch = /^ref: (.*)$/.exec(head);
|
||||
if (!refMatch) {
|
||||
return void 0;
|
||||
return undefined;
|
||||
}
|
||||
var ref = refMatch[1];
|
||||
var refPath = path.join(git, ref);
|
||||
const ref = refMatch[1];
|
||||
const refPath = path.join(git, ref);
|
||||
try {
|
||||
return fs.readFileSync(refPath, 'utf8').trim();
|
||||
}
|
||||
catch (e) {
|
||||
// noop
|
||||
}
|
||||
var packedRefsPath = path.join(git, 'packed-refs');
|
||||
var refsRaw;
|
||||
const packedRefsPath = path.join(git, 'packed-refs');
|
||||
let refsRaw;
|
||||
try {
|
||||
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
|
||||
}
|
||||
catch (e) {
|
||||
return void 0;
|
||||
return undefined;
|
||||
}
|
||||
var refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
|
||||
var refsMatch;
|
||||
var refs = {};
|
||||
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
|
||||
let refsMatch;
|
||||
let refs = {};
|
||||
while (refsMatch = refsRegex.exec(refsRaw)) {
|
||||
refs[refsMatch[2]] = refsMatch[1];
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ import * as fs from 'fs';
|
||||
/**
|
||||
* Returns the sha1 commit version of a repository or undefined in case of failure.
|
||||
*/
|
||||
export function getVersion(repo: string): string {
|
||||
export function getVersion(repo: string): string | undefined {
|
||||
const git = path.join(repo, '.git');
|
||||
const headPath = path.join(git, 'HEAD');
|
||||
let head: string;
|
||||
@@ -18,7 +18,7 @@ export function getVersion(repo: string): string {
|
||||
try {
|
||||
head = fs.readFileSync(headPath, 'utf8').trim();
|
||||
} catch (e) {
|
||||
return void 0;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (/^[0-9a-f]{40}$/i.test(head)) {
|
||||
@@ -28,7 +28,7 @@ export function getVersion(repo: string): string {
|
||||
const refMatch = /^ref: (.*)$/.exec(head);
|
||||
|
||||
if (!refMatch) {
|
||||
return void 0;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const ref = refMatch[1];
|
||||
@@ -46,11 +46,11 @@ export function getVersion(repo: string): string {
|
||||
try {
|
||||
refsRaw = fs.readFileSync(packedRefsPath, 'utf8').trim();
|
||||
} catch (e) {
|
||||
return void 0;
|
||||
return undefined;
|
||||
}
|
||||
|
||||
const refsRegex = /^([0-9a-f]{40})\s+(.+)$/gm;
|
||||
let refsMatch: RegExpExecArray;
|
||||
let refsMatch: RegExpExecArray | null;
|
||||
let refs: { [ref: string]: string } = {};
|
||||
|
||||
while (refsMatch = refsRegex.exec(refsRaw)) {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -27,135 +27,159 @@
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/cli",
|
||||
"name": "vs/workbench/api/common",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/codeEditor",
|
||||
"name": "vs/workbench/contrib/cli",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/comments",
|
||||
"name": "vs/workbench/contrib/codeEditor",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/debug",
|
||||
"name": "vs/workbench/contrib/codeinset",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/emmet",
|
||||
"name": "vs/workbench/contrib/callHierarchy",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/execution",
|
||||
"name": "vs/workbench/contrib/comments",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/extensions",
|
||||
"name": "vs/workbench/contrib/debug",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/feedback",
|
||||
"name": "vs/workbench/contrib/emmet",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/files",
|
||||
"name": "vs/workbench/contrib/extensions",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/html",
|
||||
"name": "vs/workbench/contrib/externalTerminal",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/markers",
|
||||
"name": "vs/workbench/contrib/feedback",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/localizations",
|
||||
"name": "vs/workbench/contrib/files",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/logs",
|
||||
"name": "vs/workbench/contrib/html",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/navigation",
|
||||
"name": "vs/workbench/contrib/issue",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/output",
|
||||
"name": "vs/workbench/contrib/markers",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/performance",
|
||||
"name": "vs/workbench/contrib/localizations",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/preferences",
|
||||
"name": "vs/workbench/contrib/logs",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/quickopen",
|
||||
"name": "vs/workbench/contrib/output",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/relauncher",
|
||||
"name": "vs/workbench/contrib/performance",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/scm",
|
||||
"name": "vs/workbench/contrib/preferences",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/search",
|
||||
"name": "vs/workbench/contrib/quickopen",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/snippets",
|
||||
"name": "vs/workbench/contrib/remote",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/surveys",
|
||||
"name": "vs/workbench/contrib/relauncher",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/tasks",
|
||||
"name": "vs/workbench/contrib/scm",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/terminal",
|
||||
"name": "vs/workbench/contrib/search",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/themes",
|
||||
"name": "vs/workbench/contrib/snippets",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/trust",
|
||||
"name": "vs/workbench/contrib/format",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/update",
|
||||
"name": "vs/workbench/contrib/stats",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/url",
|
||||
"name": "vs/workbench/contrib/surveys",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/watermark",
|
||||
"name": "vs/workbench/contrib/tasks",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/webview",
|
||||
"name": "vs/workbench/contrib/terminal",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/welcome",
|
||||
"name": "vs/workbench/contrib/themes",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/parts/outline",
|
||||
"name": "vs/workbench/contrib/trust",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/update",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/url",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/watermark",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/webview",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/welcome",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/contrib/outline",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
@@ -166,6 +190,10 @@
|
||||
"name": "vs/workbench/services/bulkEdit",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/commands",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/configuration",
|
||||
"project": "vscode-workbench"
|
||||
@@ -191,13 +219,17 @@
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/jsonschemas",
|
||||
"name": "vs/workbench/services/extensionManagement",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/files",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/integrity",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/keybinding",
|
||||
"project": "vscode-workbench"
|
||||
@@ -210,6 +242,10 @@
|
||||
"name": "vs/workbench/services/progress",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/remote",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/textfile",
|
||||
"project": "vscode-workbench"
|
||||
@@ -230,6 +266,10 @@
|
||||
"name": "vs/workbench/services/decorations",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/label",
|
||||
"project": "vscode-workbench"
|
||||
},
|
||||
{
|
||||
"name": "vs/workbench/services/preferences",
|
||||
"project": "vscode-preferences"
|
||||
|
||||
@@ -7,25 +7,25 @@ import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
|
||||
import { through, readable, ThroughStream } from 'event-stream';
|
||||
import File = require('vinyl');
|
||||
import * as File from 'vinyl';
|
||||
import * as Is from 'is';
|
||||
import * as xml2js from 'xml2js';
|
||||
import * as glob from 'glob';
|
||||
import * as https from 'https';
|
||||
import * as gulp from 'gulp';
|
||||
|
||||
var util = require('gulp-util');
|
||||
var iconv = require('iconv-lite');
|
||||
import * as fancyLog from 'fancy-log';
|
||||
import * as ansiColors from 'ansi-colors';
|
||||
import * as iconv from 'iconv-lite';
|
||||
|
||||
const NUMBER_OF_CONCURRENT_DOWNLOADS = 4;
|
||||
|
||||
function log(message: any, ...rest: any[]): void {
|
||||
util.log(util.colors.green('[i18n]'), message, ...rest);
|
||||
fancyLog(ansiColors.green('[i18n]'), message, ...rest);
|
||||
}
|
||||
|
||||
export interface Language {
|
||||
id: string; // laguage id, e.g. zh-tw, de
|
||||
transifexId?: string; // language id used in transifex, e.g zh-hant, de (optional, if not set, the id is used)
|
||||
id: string; // language id, e.g. zh-tw, de
|
||||
translationId?: string; // language id used in translation tools, e.g zh-hant, de (optional, if not set, the id is used)
|
||||
folderName?: string; // language specific folder name, e.g. cht, deu (optional, if not set, the id is used)
|
||||
}
|
||||
|
||||
@@ -38,8 +38,8 @@ export interface InnoSetup {
|
||||
}
|
||||
|
||||
export const defaultLanguages: Language[] = [
|
||||
{ id: 'zh-tw', folderName: 'cht', transifexId: 'zh-hant' },
|
||||
{ id: 'zh-cn', folderName: 'chs', transifexId: 'zh-hans' },
|
||||
{ id: 'zh-tw', folderName: 'cht', translationId: 'zh-hant' },
|
||||
{ id: 'zh-cn', folderName: 'chs', translationId: 'zh-hans' },
|
||||
{ id: 'ja', folderName: 'jpn' },
|
||||
{ id: 'ko', folderName: 'kor' },
|
||||
{ id: 'de', folderName: 'deu' },
|
||||
@@ -57,7 +57,7 @@ export const extraLanguages: Language[] = [
|
||||
];
|
||||
|
||||
// non built-in extensions also that are transifex and need to be part of the language packs
|
||||
const externalExtensionsWithTranslations = {
|
||||
export const externalExtensionsWithTranslations = {
|
||||
'vscode-chrome-debug': 'msjsdiag.debugger-for-chrome',
|
||||
'vscode-node-debug': 'ms-vscode.node-debug',
|
||||
'vscode-node-debug2': 'ms-vscode.node-debug2'
|
||||
@@ -71,7 +71,7 @@ interface Map<V> {
|
||||
interface Item {
|
||||
id: string;
|
||||
message: string;
|
||||
comment: string;
|
||||
comment?: string;
|
||||
}
|
||||
|
||||
export interface Resource {
|
||||
@@ -137,27 +137,6 @@ module PackageJsonFormat {
|
||||
}
|
||||
}
|
||||
|
||||
interface ModuleJsonFormat {
|
||||
messages: string[];
|
||||
keys: (string | LocalizeInfo)[];
|
||||
}
|
||||
|
||||
module ModuleJsonFormat {
|
||||
export function is(value: any): value is ModuleJsonFormat {
|
||||
let candidate = value as ModuleJsonFormat;
|
||||
return Is.defined(candidate)
|
||||
&& Is.array(candidate.messages) && candidate.messages.every(message => Is.string(message))
|
||||
&& Is.array(candidate.keys) && candidate.keys.every(key => Is.string(key) || LocalizeInfo.is(key));
|
||||
}
|
||||
}
|
||||
|
||||
interface BundledExtensionHeaderFormat {
|
||||
id: string;
|
||||
type: string;
|
||||
hash: string;
|
||||
outDir: string;
|
||||
}
|
||||
|
||||
interface BundledExtensionFormat {
|
||||
[key: string]: {
|
||||
messages: string[];
|
||||
@@ -165,10 +144,19 @@ interface BundledExtensionFormat {
|
||||
};
|
||||
}
|
||||
|
||||
interface I18nFormat {
|
||||
version: string;
|
||||
contents: {
|
||||
[module: string]: {
|
||||
[messageKey: string]: string;
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
export class Line {
|
||||
private buffer: string[] = [];
|
||||
|
||||
constructor(private indent: number = 0) {
|
||||
constructor(indent: number = 0) {
|
||||
if (indent > 0) {
|
||||
this.buffer.push(new Array(indent + 1).join(' '));
|
||||
}
|
||||
@@ -235,8 +223,8 @@ export class XLF {
|
||||
let existingKeys = new Set<string>();
|
||||
for (let i = 0; i < keys.length; i++) {
|
||||
let key = keys[i];
|
||||
let realKey: string;
|
||||
let comment: string;
|
||||
let realKey: string | undefined;
|
||||
let comment: string | undefined;
|
||||
if (Is.string(key)) {
|
||||
realKey = key;
|
||||
comment = undefined;
|
||||
@@ -286,17 +274,17 @@ export class XLF {
|
||||
}
|
||||
|
||||
static parsePseudo = function (xlfString: string): Promise<ParsedXLF[]> {
|
||||
return new Promise((resolve, reject) => {
|
||||
return new Promise((resolve) => {
|
||||
let parser = new xml2js.Parser();
|
||||
let files: { messages: Map<string>, originalFilePath: string, language: string }[] = [];
|
||||
parser.parseString(xlfString, function (err, result) {
|
||||
parser.parseString(xlfString, function (_err: any, result: any) {
|
||||
const fileNodes: any[] = result['xliff']['file'];
|
||||
fileNodes.forEach(file => {
|
||||
const originalFilePath = file.$.original;
|
||||
const messages: Map<string> = {};
|
||||
const transUnits = file.body[0]['trans-unit'];
|
||||
if (transUnits) {
|
||||
transUnits.forEach(unit => {
|
||||
transUnits.forEach((unit: any) => {
|
||||
const key = unit.$.id;
|
||||
const val = pseudify(unit.source[0]['_'].toString());
|
||||
if (key && val) {
|
||||
@@ -317,7 +305,7 @@ export class XLF {
|
||||
|
||||
let files: { messages: Map<string>, originalFilePath: string, language: string }[] = [];
|
||||
|
||||
parser.parseString(xlfString, function (err, result) {
|
||||
parser.parseString(xlfString, function (err: any, result: any) {
|
||||
if (err) {
|
||||
reject(new Error(`XLF parsing error: Failed to parse XLIFF string. ${err}`));
|
||||
}
|
||||
@@ -340,17 +328,20 @@ export class XLF {
|
||||
|
||||
const transUnits = file.body[0]['trans-unit'];
|
||||
if (transUnits) {
|
||||
transUnits.forEach(unit => {
|
||||
transUnits.forEach((unit: any) => {
|
||||
const key = unit.$.id;
|
||||
if (!unit.target) {
|
||||
return; // No translation available
|
||||
}
|
||||
|
||||
const val = unit.target.toString();
|
||||
let val = unit.target[0];
|
||||
if (typeof val !== 'string') {
|
||||
val = val._;
|
||||
}
|
||||
if (key && val) {
|
||||
messages[key] = decodeEntities(val);
|
||||
} else {
|
||||
reject(new Error(`XLF parsing error: XLIFF file does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.`));
|
||||
reject(new Error(`XLF parsing error: XLIFF file ${originalFilePath} does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.`));
|
||||
}
|
||||
});
|
||||
files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() });
|
||||
@@ -369,7 +360,7 @@ export interface ITask<T> {
|
||||
|
||||
interface ILimitedTaskFactory<T> {
|
||||
factory: ITask<Promise<T>>;
|
||||
c: (value?: T | Thenable<T>) => void;
|
||||
c: (value?: T | Promise<T>) => void;
|
||||
e: (error?: any) => void;
|
||||
}
|
||||
|
||||
@@ -391,7 +382,7 @@ export class Limiter<T> {
|
||||
|
||||
private consume(): void {
|
||||
while (this.outstandingPromises.length && this.runningPromises < this.maxDegreeOfParalellism) {
|
||||
const iLimitedTask = this.outstandingPromises.shift();
|
||||
const iLimitedTask = this.outstandingPromises.shift()!;
|
||||
this.runningPromises++;
|
||||
|
||||
const promise = iLimitedTask.factory();
|
||||
@@ -419,8 +410,8 @@ function stripComments(content: string): string {
|
||||
* Third matches block comments
|
||||
* Fourth matches line comments
|
||||
*/
|
||||
var regexp: RegExp = /("(?:[^\\\"]*(?:\\.)?)*")|('(?:[^\\\']*(?:\\.)?)*')|(\/\*(?:\r?\n|.)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g;
|
||||
let result = content.replace(regexp, (match, m1, m2, m3, m4) => {
|
||||
const regexp = /("(?:[^\\\"]*(?:\\.)?)*")|('(?:[^\\\']*(?:\\.)?)*')|(\/\*(?:\r?\n|.)*?\*\/)|(\/{2,}.*?(?:(?:\r?\n)|$))/g;
|
||||
let result = content.replace(regexp, (match, _m1, _m2, m3, m4) => {
|
||||
// Only one of m1, m2, m3, m4 matches
|
||||
if (m3) {
|
||||
// A block comment. Replace with nothing
|
||||
@@ -442,9 +433,9 @@ function stripComments(content: string): string {
|
||||
}
|
||||
|
||||
function escapeCharacters(value: string): string {
|
||||
var result: string[] = [];
|
||||
for (var i = 0; i < value.length; i++) {
|
||||
var ch = value.charAt(i);
|
||||
const result: string[] = [];
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
const ch = value.charAt(i);
|
||||
switch (ch) {
|
||||
case '\'':
|
||||
result.push('\\\'');
|
||||
@@ -484,7 +475,6 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
|
||||
|
||||
let statistics: Map<number> = Object.create(null);
|
||||
|
||||
let total: number = 0;
|
||||
let defaultMessages: Map<Map<string>> = Object.create(null);
|
||||
let modules = Object.keys(keysSection);
|
||||
modules.forEach((module) => {
|
||||
@@ -497,7 +487,6 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
|
||||
let messageMap: Map<string> = Object.create(null);
|
||||
defaultMessages[module] = messageMap;
|
||||
keys.map((key, i) => {
|
||||
total++;
|
||||
if (typeof key === 'string') {
|
||||
messageMap[key] = messages[i];
|
||||
} else {
|
||||
@@ -506,7 +495,11 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
|
||||
});
|
||||
});
|
||||
|
||||
let languageDirectory = path.join(__dirname, '..', '..', 'i18n');
|
||||
let languageDirectory = path.join(__dirname, '..', '..', '..', 'vscode-loc', 'i18n');
|
||||
if (!fs.existsSync(languageDirectory)) {
|
||||
log(`No VS Code localization repository found. Looking at ${languageDirectory}`);
|
||||
log(`To bundle translations please check out the vscode-loc repository as a sibling of the vscode repository.`);
|
||||
}
|
||||
let sortedLanguages = sortLanguages(languages);
|
||||
sortedLanguages.forEach((language) => {
|
||||
if (process.env['VSCODE_BUILD_VERBOSE']) {
|
||||
@@ -515,31 +508,35 @@ function processCoreBundleFormat(fileHeader: string, languages: Language[], json
|
||||
|
||||
statistics[language.id] = 0;
|
||||
let localizedModules: Map<string[]> = Object.create(null);
|
||||
let languageFolderName = language.folderName || language.id;
|
||||
let cwd = path.join(languageDirectory, languageFolderName, 'src');
|
||||
modules.forEach((module) => {
|
||||
let order = keysSection[module];
|
||||
let i18nFile = path.join(cwd, module) + '.i18n.json';
|
||||
let messages: Map<string> = null;
|
||||
let languageFolderName = language.translationId || language.id;
|
||||
let i18nFile = path.join(languageDirectory, `vscode-language-pack-${languageFolderName}`, 'translations', 'main.i18n.json');
|
||||
let allMessages: I18nFormat | undefined;
|
||||
if (fs.existsSync(i18nFile)) {
|
||||
let content = stripComments(fs.readFileSync(i18nFile, 'utf8'));
|
||||
messages = JSON.parse(content);
|
||||
} else {
|
||||
allMessages = JSON.parse(content);
|
||||
}
|
||||
modules.forEach((module) => {
|
||||
let order = keysSection[module];
|
||||
let moduleMessage: { [messageKey: string]: string } | undefined;
|
||||
if (allMessages) {
|
||||
moduleMessage = allMessages.contents[module];
|
||||
}
|
||||
if (!moduleMessage) {
|
||||
if (process.env['VSCODE_BUILD_VERBOSE']) {
|
||||
log(`No localized messages found for module ${module}. Using default messages.`);
|
||||
}
|
||||
messages = defaultMessages[module];
|
||||
statistics[language.id] = statistics[language.id] + Object.keys(messages).length;
|
||||
moduleMessage = defaultMessages[module];
|
||||
statistics[language.id] = statistics[language.id] + Object.keys(moduleMessage).length;
|
||||
}
|
||||
let localizedMessages: string[] = [];
|
||||
order.forEach((keyInfo) => {
|
||||
let key: string = null;
|
||||
let key: string | null = null;
|
||||
if (typeof keyInfo === 'string') {
|
||||
key = keyInfo;
|
||||
} else {
|
||||
key = keyInfo.key;
|
||||
}
|
||||
let message: string = messages[key];
|
||||
let message: string = moduleMessage![key];
|
||||
if (!message) {
|
||||
if (process.env['VSCODE_BUILD_VERBOSE']) {
|
||||
log(`No localized message found for key ${key} in module ${module}. Using default message.`);
|
||||
@@ -625,7 +622,7 @@ export function getResource(sourceFile: string): Resource {
|
||||
return { name: 'vs/base', project: editorProject };
|
||||
} else if (/^vs\/code/.test(sourceFile)) {
|
||||
return { name: 'vs/code', project: workbenchProject };
|
||||
} else if (/^vs\/workbench\/parts/.test(sourceFile)) {
|
||||
} else if (/^vs\/workbench\/contrib/.test(sourceFile)) {
|
||||
resource = sourceFile.split('/', 4).join('/');
|
||||
return { name: resource, project: workbenchProject };
|
||||
} else if (/^vs\/workbench\/services/.test(sourceFile)) {
|
||||
@@ -712,7 +709,7 @@ export function createXlfFilesForExtensions(): ThroughStream {
|
||||
}
|
||||
return _xlf;
|
||||
}
|
||||
gulp.src([`./extensions/${extensionName}/package.nls.json`, `./extensions/${extensionName}/**/nls.metadata.json`]).pipe(through(function (file: File) {
|
||||
gulp.src([`./extensions/${extensionName}/package.nls.json`, `./extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(through(function (file: File) {
|
||||
if (file.isBuffer()) {
|
||||
const buffer: Buffer = file.contents as Buffer;
|
||||
const basename = path.basename(file.path);
|
||||
@@ -824,8 +821,8 @@ export function createXlfFilesForIsl(): ThroughStream {
|
||||
}
|
||||
|
||||
export function pushXlfFiles(apiHostname: string, username: string, password: string): ThroughStream {
|
||||
let tryGetPromises = [];
|
||||
let updateCreatePromises = [];
|
||||
let tryGetPromises: Array<Promise<boolean>> = [];
|
||||
let updateCreatePromises: Array<Promise<boolean>> = [];
|
||||
|
||||
return through(function (this: ThroughStream, file: File) {
|
||||
const project = path.dirname(file.relative);
|
||||
@@ -890,7 +887,7 @@ function getAllResources(project: string, apiHostname: string, username: string,
|
||||
|
||||
export function findObsoleteResources(apiHostname: string, username: string, password: string): ThroughStream {
|
||||
let resourcesByProject: Map<string[]> = Object.create(null);
|
||||
resourcesByProject[extensionsProject] = [].concat(externalExtensionsWithTranslations); // clone
|
||||
resourcesByProject[extensionsProject] = ([] as any[]).concat(externalExtensionsWithTranslations); // clone
|
||||
|
||||
return through(function (this: ThroughStream, file: File) {
|
||||
const project = path.dirname(file.relative);
|
||||
@@ -907,7 +904,7 @@ export function findObsoleteResources(apiHostname: string, username: string, pas
|
||||
|
||||
const json = JSON.parse(fs.readFileSync('./build/lib/i18n.resources.json', 'utf8'));
|
||||
let i18Resources = [...json.editor, ...json.workbench].map((r: Resource) => r.project + '/' + r.name.replace(/\//g, '_'));
|
||||
let extractedResources = [];
|
||||
let extractedResources: string[] = [];
|
||||
for (let project of [workbenchProject, editorProject]) {
|
||||
for (let resource of resourcesByProject[project]) {
|
||||
if (resource !== 'setup_messages') {
|
||||
@@ -920,7 +917,7 @@ export function findObsoleteResources(apiHostname: string, username: string, pas
|
||||
console.log(`[i18n] Missing resources in file 'build/lib/i18n.resources.json': JSON.stringify(${extractedResources.filter(p => i18Resources.indexOf(p) === -1)})`);
|
||||
}
|
||||
|
||||
let promises = [];
|
||||
let promises: Array<Promise<void>> = [];
|
||||
for (let project in resourcesByProject) {
|
||||
promises.push(
|
||||
getAllResources(project, apiHostname, username, password).then(resources => {
|
||||
@@ -965,7 +962,7 @@ function tryGetResource(project: string, slug: string, apiHostname: string, cred
|
||||
}
|
||||
|
||||
function createResource(project: string, slug: string, xlfFile: File, apiHostname: string, credentials: any): Promise<any> {
|
||||
return new Promise((resolve, reject) => {
|
||||
return new Promise((_resolve, reject) => {
|
||||
const data = JSON.stringify({
|
||||
'content': xlfFile.contents.toString(),
|
||||
'name': slug,
|
||||
@@ -1056,8 +1053,8 @@ export function pullCoreAndExtensionsXlfFiles(apiHostname: string, username: str
|
||||
|
||||
// extensions
|
||||
let extensionsToLocalize = Object.create(null);
|
||||
glob.sync('./extensions/**/*.nls.json', ).forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
||||
glob.sync('./extensions/*/node_modules/vscode-nls', ).forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
||||
glob.sync('./extensions/**/*.nls.json').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
||||
glob.sync('./extensions/*/node_modules/vscode-nls').forEach(extension => extensionsToLocalize[extension.split('/')[2]] = true);
|
||||
|
||||
Object.keys(extensionsToLocalize).forEach(extension => {
|
||||
_coreAndExtensionResources.push({ name: extension, project: extensionsProject });
|
||||
@@ -1085,7 +1082,7 @@ function pullXlfFiles(apiHostname: string, username: string, password: string, l
|
||||
let expectedTranslationsCount = resources.length;
|
||||
let translationsRetrieved = 0, called = false;
|
||||
|
||||
return readable(function (count, callback) {
|
||||
return readable(function (_count: any, callback: any) {
|
||||
// Mark end of stream when all resources were retrieved
|
||||
if (translationsRetrieved === expectedTranslationsCount) {
|
||||
return this.emit('end');
|
||||
@@ -1095,7 +1092,7 @@ function pullXlfFiles(apiHostname: string, username: string, password: string, l
|
||||
called = true;
|
||||
const stream = this;
|
||||
resources.map(function (resource) {
|
||||
retrieveResource(language, resource, apiHostname, credentials).then((file: File) => {
|
||||
retrieveResource(language, resource, apiHostname, credentials).then((file: File | null) => {
|
||||
if (file) {
|
||||
stream.emit('data', file);
|
||||
}
|
||||
@@ -1107,13 +1104,13 @@ function pullXlfFiles(apiHostname: string, username: string, password: string, l
|
||||
callback();
|
||||
});
|
||||
}
|
||||
const limiter = new Limiter<File>(NUMBER_OF_CONCURRENT_DOWNLOADS);
|
||||
const limiter = new Limiter<File | null>(NUMBER_OF_CONCURRENT_DOWNLOADS);
|
||||
|
||||
function retrieveResource(language: Language, resource: Resource, apiHostname, credentials): Promise<File> {
|
||||
return limiter.queue(() => new Promise<File>((resolve, reject) => {
|
||||
function retrieveResource(language: Language, resource: Resource, apiHostname: string, credentials: string): Promise<File | null> {
|
||||
return limiter.queue(() => new Promise<File | null>((resolve, reject) => {
|
||||
const slug = resource.name.replace(/\//g, '_');
|
||||
const project = resource.project;
|
||||
let transifexLanguageId = language.id === 'ps' ? 'en' : language.transifexId || language.id;
|
||||
let transifexLanguageId = language.id === 'ps' ? 'en' : language.translationId || language.id;
|
||||
const options = {
|
||||
hostname: apiHostname,
|
||||
path: `/api/2/project/${project}/resource/${slug}/translation/${transifexLanguageId}?file&mode=onlyreviewed`,
|
||||
@@ -1212,10 +1209,10 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
|
||||
let parsePromises: Promise<ParsedXLF[]>[] = [];
|
||||
let mainPack: I18nPack = { version: i18nPackVersion, contents: {} };
|
||||
let extensionsPacks: Map<I18nPack> = {};
|
||||
let errors: any[] = [];
|
||||
return through(function (this: ThroughStream, xlf: File) {
|
||||
let stream = this;
|
||||
let project = path.dirname(xlf.path);
|
||||
let resource = path.basename(xlf.path, '.xlf');
|
||||
let project = path.basename(path.dirname(xlf.relative));
|
||||
let resource = path.basename(xlf.relative, '.xlf');
|
||||
let contents = xlf.contents.toString();
|
||||
let parsePromise = pseudo ? XLF.parsePseudo(contents) : XLF.parse(contents);
|
||||
parsePromises.push(parsePromise);
|
||||
@@ -1242,10 +1239,15 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
|
||||
}
|
||||
});
|
||||
}
|
||||
);
|
||||
).catch(reason => {
|
||||
errors.push(reason);
|
||||
});
|
||||
}, function () {
|
||||
Promise.all(parsePromises)
|
||||
.then(() => {
|
||||
if (errors.length > 0) {
|
||||
throw errors;
|
||||
}
|
||||
const translatedMainFile = createI18nFile('./main', mainPack);
|
||||
resultingTranslationPaths.push({ id: 'vscode', resourceName: 'main.i18n.json' });
|
||||
|
||||
@@ -1264,7 +1266,9 @@ export function prepareI18nPackFiles(externalExtensions: Map<string>, resultingT
|
||||
}
|
||||
this.queue(null);
|
||||
})
|
||||
.catch(reason => { throw new Error(reason); });
|
||||
.catch((reason) => {
|
||||
this.emit('error', reason);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1285,11 +1289,15 @@ export function prepareIslFiles(language: Language, innoSetupConfig: InnoSetup):
|
||||
stream.queue(translatedFile);
|
||||
});
|
||||
}
|
||||
);
|
||||
).catch(reason => {
|
||||
this.emit('error', reason);
|
||||
});
|
||||
}, function () {
|
||||
Promise.all(parsePromises)
|
||||
.then(() => { this.queue(null); })
|
||||
.catch(reason => { throw new Error(reason); });
|
||||
.catch(reason => {
|
||||
this.emit('error', reason);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1306,7 +1314,7 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
|
||||
let firstChar = line.charAt(0);
|
||||
if (firstChar === '[' || firstChar === ';') {
|
||||
if (line === '; *** Inno Setup version 5.5.3+ English messages ***') {
|
||||
content.push(`; *** Inno Setup version 5.5.3+ ${innoSetup.defaultInfo.name} messages ***`);
|
||||
content.push(`; *** Inno Setup version 5.5.3+ ${innoSetup.defaultInfo!.name} messages ***`);
|
||||
} else {
|
||||
content.push(line);
|
||||
}
|
||||
@@ -1316,9 +1324,9 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
|
||||
let translated = line;
|
||||
if (key) {
|
||||
if (key === 'LanguageName') {
|
||||
translated = `${key}=${innoSetup.defaultInfo.name}`;
|
||||
translated = `${key}=${innoSetup.defaultInfo!.name}`;
|
||||
} else if (key === 'LanguageID') {
|
||||
translated = `${key}=${innoSetup.defaultInfo.id}`;
|
||||
translated = `${key}=${innoSetup.defaultInfo!.id}`;
|
||||
} else if (key === 'LanguageCodePage') {
|
||||
translated = `${key}=${innoSetup.codePage.substr(2)}`;
|
||||
} else {
|
||||
@@ -1339,14 +1347,14 @@ function createIslFile(originalFilePath: string, messages: Map<string>, language
|
||||
|
||||
return new File({
|
||||
path: filePath,
|
||||
contents: iconv.encode(Buffer.from(content.join('\r\n'), 'utf8'), innoSetup.codePage)
|
||||
contents: iconv.encode(Buffer.from(content.join('\r\n'), 'utf8').toString(), innoSetup.codePage)
|
||||
});
|
||||
}
|
||||
|
||||
function encodeEntities(value: string): string {
|
||||
var result: string[] = [];
|
||||
for (var i = 0; i < value.length; i++) {
|
||||
var ch = value[i];
|
||||
let result: string[] = [];
|
||||
for (let i = 0; i < value.length; i++) {
|
||||
let ch = value[i];
|
||||
switch (ch) {
|
||||
case '<':
|
||||
result.push('<');
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user