Compare commits

..

335 Commits

Author SHA1 Message Date
Chris LaFreniere
0f5cfdc2c8 Handle key_up in callout dialog (#15472) (#15485) 2021-05-14 12:04:15 -07:00
Charles Gagnon
1d6427ce89 Fix Issue Reporter (#15467) (#15475)
(cherry picked from commit d797ef433a)
2021-05-13 21:44:28 -07:00
Alan Ren
4356e49793 fix animated svg issue (#15471) (#15473) 2021-05-13 20:13:56 -07:00
Hai Cao
12662f7427 Fix link handling in viewPane (#15458) (#15468) 2021-05-13 15:26:01 -07:00
Aditya Bist
6e1f995286 fix azure context menu (#15447) (#15462)
* fix azure context menu

* fix node issue

* change to undefined

* just use interface

* set connection profile

* add save profile
2021-05-13 14:13:32 -07:00
Aditya Bist
c5508cf46a Fix cloud shell (#15452) (#15461)
* fix cloud shell

* better logic for dimensions

* fix dimension type

* change terminate to close
2021-05-13 14:12:08 -07:00
Udeesha Gautam
3f10d89db3 Doing a point fix for backup since select box has impact on other features (#15415) (#15456) 2021-05-13 12:45:24 -07:00
Barbara Valdez
419c2324c9 Notebooks View: Do not recenter view when selecting a notebook that is visible (#15442) (#15455)
* do not focus element when tree item is visible

* reset reveal behavior

* add comment
2021-05-13 10:46:46 -07:00
Benjin Dubishar
166faccf1e added missing await, removed duplicate okay button checks (#15450) (#15454) 2021-05-13 10:46:15 -07:00
Chris LaFreniere
e5f9790ed1 Notebooks: Fix Table Generation into Pure Markdown When No thead Exists (#15423) (#15448)
* works without alignment

* Alignment working

* Add comment

* Remove outdated comment
2021-05-12 17:57:02 -07:00
Charles Gagnon
e7fbedfe65 Fix dashboard icons (#15400) (#15431)
* Fix dashboard icons

* Split out custom SQL menu items

* Put on same line
2021-05-12 14:08:04 -07:00
Charles Gagnon
e291a25104 Fix event prefix (#15429) (#15433) 2021-05-12 13:50:37 -07:00
Lucy Zhang
4d3787dbc0 skip text cell smoketest (#15411) (#15417) 2021-05-12 10:23:11 -07:00
Aditya Bist
56cc1202d5 update remote yarn lock (#15419) (#15424)
* update yarn lock to use 2.9.4 when ^2.6.0 is used

* update main yarn lock

* update yarn lock for remote/web
2021-05-11 20:07:49 -07:00
Alan Ren
e3b238c017 remove the filters not applicable to ads (#15409) (#15418) 2021-05-11 16:27:21 -07:00
Aasim Khan
ed5bb14b11 Adding all the arm libraries as external dependencies in azurecore webpack (#15398) (#15399) 2021-05-11 13:41:38 -07:00
Alan Ren
7dca953660 fix line number display issue (#15368) 2021-05-07 18:57:22 -07:00
Kim Santiago
9d8660749a fix schema compare display name having username (#15357)
* fix schema compare display name having username

* reuse variable

* fix previous selection for server not being selected in dialog

* fix tests
2021-05-07 12:46:06 -07:00
Alan Ren
24f29ca0a7 bring in latest sqltoolsservice (#15366) 2021-05-07 11:24:41 -07:00
Maddy
1af8fdbb45 fix attach image on rehydration (#15359) 2021-05-06 17:13:00 -07:00
Lucy Zhang
4bc87c46a6 Fix notebook icons (#15346)
* remove width and height from nb svgs

* fix textcell toolbar icons
2021-05-06 15:58:59 -07:00
Lucy Zhang
2a354aef09 basic text cell functionality smoke test (#15195) 2021-05-06 15:53:20 -07:00
Vasu Bhog
cb06ca07c0 Fixes the empty parameter cell and multi parameters on one line (#15234)
* fix two bugs

* work on empty parameter cell and format properly

* add empty cell tests
2021-05-06 14:48:38 -07:00
Aasim Khan
18b2524528 Updating axios package to fix component governance issue. (#15339)
* Updating axios package and related libraries.

* Adding node fetch due to a test failure
2021-05-06 11:48:36 -07:00
Karl Burtram
9405795d4a Bump SQL Tools Service to 3.0.0-release.101 (#15344) 2021-05-06 10:44:19 -07:00
Sakshi Sharma
3b3df2c3fc Fixes for sql db projects dashboard (#15100)
* Fix ID assignment for sql db projects dashboard

* Update with new fixes

* Fix tests

* Fix test
2021-05-05 19:00:22 -07:00
Sakshi Sharma
8c08c79d67 Remove horizontal scrollbar (#15338) 2021-05-05 17:13:40 -07:00
Christopher Suh
b13dc0cd03 fixed account icon loading... issue (#15335) 2021-05-05 17:10:44 -07:00
Alan Ren
745c17da0a remove extension from recommended list (#15327) 2021-05-04 16:53:17 -07:00
Maddy
bd5b9b36c6 add tests for cell attachment changes (#15325) 2021-05-04 14:53:04 -07:00
Udeesha Gautam
bd4ebe65cc Reverting a change to ensure immediate notebook fix (#15326) 2021-05-04 14:12:53 -07:00
Charles Gagnon
0fa0431707 Add rollup dependency (#15317) 2021-05-04 09:08:36 -07:00
Charles Gagnon
726ad26a75 Trim spaces from arc connect controller dialog (#15321) 2021-05-04 08:38:06 -07:00
Alan Ren
0e7bd8a939 add ADS checks back to install extension workflow (#15320) 2021-05-03 20:27:47 -07:00
Kim Santiago
e2537922b0 bump sql database projects version to 0.9.1 (#15314) 2021-05-03 16:22:11 -07:00
Charles Gagnon
cb8f9d9d90 Use official Microsoft ads-kerberos (#15316)
* Use @microsoft/ads-kerberos

* Bundle ads-kerberos
2021-05-03 16:18:11 -07:00
Chris LaFreniere
027e8a6f33 Enable run unit tests current file and cleanup launch.json (#15307) 2021-05-03 11:38:43 -07:00
Justin M
f7ce597606 Bumped sqltoolservice version to 99 for Kusto. Bumped Kusto version to 0.5.4 (#15309) 2021-05-03 10:49:53 -07:00
Alan Ren
088d6353f0 keep title up to date when connection changes (#15306) 2021-04-30 22:41:55 -07:00
nasc17
7a726e5dfa Fix for PG parameters table disappearing when trying to reset a value (#15303)
* Working on table bug

* Removed unit test checking for onEngineSettingsUpdated event to fire since removed from model

* Moved instanceofcheckbox function to utils, pr fixes

* Fix comment
2021-04-30 20:50:39 -07:00
Maddy
ef8b26b7ae Insert local/online images using image call out (#15238)
* changes from Chris's branch and cell model updates

* get base64 value

* handle spaces in image names

* add comments

* add tests for imageCallOut dialog

* format document for hygiene errors

* address comments

* check base64 validity using regex

* replace space with regex

* add parameter and return type

* split into two functions

* move functions to fileUtilities

* correct import

* fix for layering issue

* revert file function changes
2021-04-30 19:43:55 -07:00
Charles Gagnon
587237673b Remove settings sync output channel (#15304) 2021-04-30 15:07:57 -07:00
Udeesha Gautam
35e0f1a99f Missed commit from PR #15082 (#15302)
* missed commit from PR #15082 for back up restore name update issue
2021-04-30 13:48:35 -07:00
Alan Ren
5e39b1c7ed fix secrets api issue (#15300) 2021-04-30 13:27:42 -07:00
Alex Ma
4ec3779be4 fix for gulp remote in extensions.ts (#15298) 2021-04-30 13:06:44 -07:00
Aasim Khan
ec4e5d5c18 mssql vbump to 98 (#15297) 2021-04-30 12:01:49 -07:00
Alan Ren
a155b9be40 GA connection browse feature (#15295)
* GA connection browse feature

* UPDATE TEXT
2021-04-30 11:29:37 -07:00
Kim Santiago
25681defd8 Add git clone for project to Open Existing dialog (#15244)
* expose git clone api

* add git clone option for projects in open existing dialog

* add sql carbon edits

* cleanup and error handling

* fix telemetry property

* addressing comments

* add gitignore

* copy git.d.ts from git extension

* remove copy of git.d.ts from data-workspace extension

* use single quotes

* Remove git copy from git clone PR (#15286)

* Remove copy git typings

* Remove gitignore

* update error messages

* lowercase

Co-authored-by: Charles Gagnon <chgagnon@microsoft.com>
2021-04-30 11:12:22 -07:00
Udeesha Gautam
f2ee8b11b4 Fixing the update issue with backup default name (#15082)
* Fixing the update issue with backup default name

* Avoiding to call select again since that fires an event

* Fixing comment and backup name
2021-04-30 10:50:26 -07:00
Aditya Bist
adbbe7e6b0 Adjust icon badge size (#15292)
* adjust badge size

* convert rem to px
2021-04-29 18:42:03 -07:00
Alan Ren
c72fdeb019 fix action label vertical alignment issue (#15293) 2021-04-29 18:12:36 -07:00
Alan Ren
2aed4e76e3 error handling for browse azure resources (#15291)
* custom error for throttling

* remove unused imports
2021-04-29 17:06:16 -07:00
Kim Santiago
020bc738cc fix DW project build (#15287) 2021-04-29 14:48:01 -07:00
Aasim Khan
e00d48c210 Adding telemetry framework to migration extension (#15284)
* Adding telemetry to sql-migration

* Adding some telemetry helper functions

* removing unncessary telemetry

* removing unncessary imports

* Changing name of telemetry sender methods and removing unused functions.

* Removing extra whitespace

* Fixing a typo
2021-04-29 14:00:44 -07:00
Alan Ren
8d482a06e8 fix smoke test failure (#15276)
* add logging

* fix

* remove logging

* comment out code and add comments
2021-04-29 11:36:42 -07:00
Kim Santiago
cbe128dd6c bump version for schema compare and sql database projects (#15285) 2021-04-29 11:32:13 -07:00
Barbara Valdez
fd0a0ec2b0 replace notebook tab icon (#15282) 2021-04-29 11:31:45 -07:00
Hai Cao
a0bf5df79d Use a different config name for connection sorting (#15275)
* use a different sort config name
2021-04-29 10:52:35 -07:00
Charles Gagnon
ddf092462d Bump arc version (#15283) 2021-04-29 10:07:31 -07:00
Sakshi Sharma
e695a01538 Update Schema Compare dialog to start a new connection (#15193)
* Update SC dialog to start a new connection

* Functionally complete

* Fix target db to pick correct database

* Address comments

* Added test+fixed one missing scenario

* Address comments + add one more test
2021-04-29 09:53:38 -07:00
Alan Ren
e42da81005 connect dialog telemetry (#15267)
* add connection dialog telemetry

* reset source

* add to correct place
2021-04-28 14:44:31 -07:00
Charles Gagnon
659fa2f78d Add back in documentdb dependency (#15265)
* Add back in documentdb dependency

* Use original version
2021-04-28 12:40:02 -07:00
Charles Gagnon
be8a55cef0 Add back open view keybindings (#15264) 2021-04-28 12:17:36 -07:00
Hai Cao
9bc1797e88 add sorting option for saved connections (#15229)
* add sort by name option for saved connections and groups
2021-04-28 11:41:09 -07:00
Karl Burtram
13ab2bc487 Bump SQL Tools Service to 3.0.0-release.97 (#15258) 2021-04-28 11:01:55 -07:00
Charles Gagnon
57cd25fab9 Fix dipose "method not implemented" errors in MainThread API classes (#15257) 2021-04-28 09:39:04 -07:00
Alan Ren
2cf15b51f9 add nowrap to tree label (#15260) 2021-04-28 00:10:48 -07:00
Kim Santiago
da37cf6ec6 Expose sql project apis (#15239)
* make project variables private and add getters

* expose project in sqlproj.d.ts

* add more comments

* change to ISqlProject
2021-04-27 17:35:27 -07:00
Charles Gagnon
f29d7308ff bump distro (#15249) 2021-04-27 16:03:56 -07:00
Karl Burtram
867a963882 Merge from vscode bead496a613e475819f89f08e9e882b841bc1fe8 (#14883)
* Merge from vscode bead496a613e475819f89f08e9e882b841bc1fe8

* Bump distro

* Upgrade GCC to 4.9 due to yarn install errors

* Update build image

* Fix bootstrap base url

* Bump distro

* Fix build errors

* Update source map file

* Disable checkbox for blocking migration issues (#15131)

* disable checkbox for blocking issues

* wip

* disable checkbox fixes

* fix strings

* Remove duplicate tsec command

* Default to off for tab color if settings not present

* re-skip failing tests

* Fix mocha error

* Bump sqlite version & fix notebooks search view

* Turn off esbuild warnings

* Update esbuild log level

* Fix overflowactionbar tests

* Fix ts-ignore in dropdown tests

* cleanup/fixes

* Fix hygiene

* Bundle in entire zone.js module

* Remove extra constructor param

* bump distro for web compile break

* bump distro for web compile break v2

* Undo log level change

* New distro

* Fix integration test scripts

* remove the "no yarn.lock changes" workflow

* fix scripts v2

* Update unit test scripts

* Ensure ads-kerberos2 updates in .vscodeignore

* Try fix unit tests

* Upload crash reports

* remove nogpu

* always upload crashes

* Use bash script

* Consolidate data/ext dir names

* Create in tmp directory

Co-authored-by: chlafreniere <hichise@gmail.com>
Co-authored-by: Christopher Suh <chsuh@microsoft.com>
Co-authored-by: chgagnon <chgagnon@microsoft.com>
2021-04-27 14:01:59 -07:00
Lucy Zhang
7e1c0076ba Try to make smoke tests more stable (#15212)
* make sure dialog buttons are not disabled

* use 'enter' to close connect dialog

* retry clikcing the Connect button

* wait for dialog gone after reclicking

* pr comments + add logging

* use debug to log

* close toasts before clicking dialog buttons

* await close notification toast call

* click python wizard buttons instead of enter
2021-04-27 13:53:17 -07:00
Vasu Bhog
c66a8ca171 fix parameterized notebook output (#15233) 2021-04-27 12:40:49 -07:00
Aasim Khan
c150f79eb4 Updating handle bars to 4.7.7 to fix a component governance issue (#15246) 2021-04-27 01:34:37 -07:00
Alan Ren
0ed2a5d431 fix result selection summary issues (#15245) 2021-04-26 23:07:55 -07:00
Charles Gagnon
a086ad6ff1 Update what-is link for linux installers (#15242) 2021-04-26 17:16:22 -07:00
Charles Gagnon
ef57812a95 Arc april updates (#15237)
* Enables being able to view and edit Coordinator node scheduling params (#15114)

* Trying to save per role settings

* Updated spec

* Cleaning up

* Removed unneccessary code and comments

* Added separate type for { w?: string, c?: string}, PR fixes

* Added methods to refresh mr,ml,cr,cl versus per role

* Fixed spec

* Put back optional properties, removed passing empty string to reset scheduling params

* Spacing

* vBump arc

* Included roles in fake show output for testing (#15196)

* Update arc specs (#15225)

* Update azdata Arc specs to match April azdata

* vcores -> cpu

* fix spacing

* Consolidate types and update storage volumes

* Fix compile

* Update min azdata version

Co-authored-by: nasc17 <69922333+nasc17@users.noreply.github.com>
Co-authored-by: nasc17 <nasc@microsoft.com>
2021-04-26 16:20:27 -07:00
Alan Ren
1e42f0b923 enable inline actions for connection dialog's browse tab (#15227)
* enable inline actions

* comments
2021-04-26 14:01:00 -07:00
Vaggelis Sotiropoulos
3e4f687862 ISSUE10685 - Edits schemaCompareDialog to show connectionName if provided (#15152)
* ISSUE10685 - Edits schemaCompareDialog to show connectionName if provided

* ISSUE10685 - Edits to not show user if connectionName

* ISSUE10685 - Shows connection name is schema compare launched from db

* ISSUE10685 - Shows connection name in schema compare window

* ISSUE10685 - Edits utils tests

* ISSUE10685 - Edits schemaCompare test

* ISSUE10685 - Adds/Edits specs

* ISSUE10685 - Changes let to const
2021-04-26 11:11:43 -07:00
Aasim Khan
463ea0b46b Fixing the disabled next button on migration wizard (#15226)
* Fixing the next button disabled on wizard

* vbump extension
2021-04-23 17:05:23 -07:00
Alan Ren
222d8e12f4 a couple more table filter improvements (#15219)
* a few more fixes

* revert sortcolumn

* fix hygiene error

* add comment
2021-04-23 15:24:07 -07:00
Christopher Suh
9fabf85fe2 Fix VM Page displaying MI assessment results (#15190)
* fix vm page displaying mi assessment results

* vm page fix
2021-04-23 12:07:17 -07:00
Maddy
f4d47e9675 undo carbon edit (#15216) 2021-04-23 10:26:50 -07:00
Charles Gagnon
1252544282 Fix welcome page enum descriptions (#15215)
* Fix welcome page enum descriptions

* Remove gettingStarted experience

* Add sql carbon edit
2021-04-22 16:35:50 -07:00
Alan Ren
228a5d8e20 Table Filter improvements (#15207)
* use virutualized list

* comments
2021-04-22 12:03:17 -07:00
Aasim Khan
37894c9e96 Migration extensions - UI fixes and vBump (#15199)
* Fixing Migration Cutover Dialog
Adding support for target file share
Fixing request body
Correcting localized strings

* Redesigned IR page
Adding additional details in migration status dialog

* vbump

* Fixed the perpetual loading

* Fixed duration logic

* Adding icon for migration extension

* Adding helper commenst to util function
localizing some strings
logging console errors

* enabling cutover buttons for  ignored files
2021-04-22 10:19:36 -07:00
Maddy
fcaaf1cb29 fix the find highlight issues (#15149)
* register mode changes, remove cleanMarkdownLinks

* fixes

* test fixes and scroll to center change
2021-04-22 10:13:30 -07:00
Charles Gagnon
599a64c631 Add encoded path name tests for SQL kernel (#15197)
* Add encoded path name tests for SQL kernel

* Remove invocation count
2021-04-22 09:03:48 -07:00
Karl Burtram
2f02b0bb52 Update drop-down selected value in accessibility mode (#15202) 2021-04-21 12:56:54 -07:00
Vasu Bhog
6951bf3b90 Ability to see and edit links in split view/markdown mode (#15150)
* callout dialog selects link properly in split/markdown mode

* use regex for links
2021-04-20 23:33:14 -07:00
Charles Gagnon
a82be2f014 Fix URI encoding for connection provider calls (#15177)
* Fix URI encoding for connection provider calls

* Update distro
2021-04-20 11:40:32 -07:00
Alan Ren
556dd9d2b1 add padding to editable dropdown (#15192) 2021-04-19 23:46:23 -07:00
Charles Gagnon
e7e4828703 Fix 'nodebookInputFactory' name (#15188) 2021-04-19 15:18:26 -07:00
Charles Gagnon
cd6f584ed1 Notebook smoke test fixes (#15178)
* Notebook smoke test fixes

* Undo delay fixes
2021-04-19 14:26:47 -07:00
Charles Gagnon
2f04adb510 Fix crash when opening SQL diff input (#15183)
* Fix crash when opening SQL diff input

* fix compile

* Fix tests
2021-04-19 13:49:02 -07:00
Alan Ren
1914546553 fix filter not applied issue (#15186) 2021-04-19 13:46:17 -07:00
Hai Cao
e3ef4e1755 Onboarding pull request (caohai) (#15184) 2021-04-19 12:52:17 -07:00
Christopher Suh
dbcada5e30 Display "No Issues" Message (#15176)
* wip

* no warnings screen for dbs with no warnings

* cleanup
2021-04-19 10:01:46 -07:00
Christopher Suh
fe9d3bc63b Disable checkbox for blocking migration issues (#15131)
* disable checkbox for blocking issues

* wip

* disable checkbox fixes

* fix strings
2021-04-17 09:03:46 -07:00
Lucy Zhang
aa315ae19f Update azuredatastudio-sqlite version (#15174)
* update ads-sqlite version

* update distro value
2021-04-16 13:49:47 -07:00
Charles Gagnon
ca701a7985 Remove copycat action (#15172) 2021-04-16 12:45:22 -07:00
Maddy
6d95b113d5 fix the logic to check the same level folders (#15143) 2021-04-16 11:35:51 -07:00
Aditya Bist
5244cd0b09 update version (#15161) 2021-04-16 10:46:40 -07:00
Charles Gagnon
2f865445a7 Disable workspace tags event (#15168) 2021-04-16 10:33:45 -07:00
Karl Burtram
4c5bb90724 Bump STS to 3.0.0-release.93 (#15163) 2021-04-16 10:31:00 -07:00
Charles Gagnon
dcd952b903 Fix error when disconnecting database (#15146)
* Fix error when disconnecting database

* correctly print error
2021-04-16 09:24:22 -07:00
Aditya Bist
f60d0fa9e5 release notes (#15160)
* release notes

* fix typo
2021-04-16 09:08:34 -07:00
Aasim Khan
47584544e2 Fixing migration extension to reflect latest RP changes (#15157)
* Fixing sms object

* extension vbump
2021-04-16 05:20:07 -07:00
Alan Ren
0a707c2b96 remove the preview flag check for tabs (#15155) 2021-04-15 16:11:50 -07:00
Maddy
bec1916c62 show notebook as trusted when it belongs to a trusted book (#15093)
* check for trusted books

* don't add to trustedNotebooksMomento

* update comment

* added comments

* remove empty line
2021-04-15 10:30:51 -07:00
Alan Ren
4edbd6f3e7 fix the menu not hide issue (#15138) 2021-04-14 16:18:45 -07:00
Vasu Bhog
3a1885491f Update run w/parameters icon and increment parameterized notebook (#15127)
* update icon and increment parameterized notebook
2021-04-14 15:28:27 -07:00
Charles Gagnon
e7571410e0 Add notebook query event listeners before connect (#15136) 2021-04-14 15:09:18 -07:00
Alan Ren
9066ea153f trap keyboard navigation (#15134) 2021-04-14 14:09:41 -07:00
Aditya Bist
e151668c81 Links handling in commands (#15118)
* format

* update external options type

* add command flag for command

* Allow commands in Notebooks

Co-authored-by: chgagnon <chgagnon@microsoft.com>
2021-04-14 13:06:45 -07:00
Alan Ren
842a33e318 a couple more fixes for table filter (#15121)
* a few fixes

* add comments and fix for non-ideal scenario
2021-04-14 10:20:29 -07:00
Charles Gagnon
5922047f6b Add Import smoke test (#15111)
* Add smoke test

* Add wait for service

* Also revert liveshare

* fixes

* Use custom extensions dir

* fix

* compile extensions

* Use build extensions dir

* test break

* revert

* revert yarn.lock

* Add comment

* Add comments
2021-04-14 08:43:45 -07:00
Aasim Khan
5e8ac017a6 Adding on change event in editable dropdowns when value is changes through code. (#14952)
* Adding onChange events in editable dropdowns for values changed through code.

* moved event generation from angular wrapper to core component

* Moving event firing to value setter

* converting back updateSelection to private method

* Removing extra check

* Fixing broken tests by mocking used stuff

* Fixing onTextChange event

* Adding some tests

* Adding necessary tests

* small test name edit

* Fix an assert message
2021-04-14 07:48:33 -07:00
Aditya Bist
b6bdb68596 add flag for proxy (#15120)
* add flag for proxy

* update distro hash

* Bump distro hash

* Bump distro

Co-authored-by: kburtram <karlb@microsoft.com>
Co-authored-by: chgagnon <chgagnon@microsoft.com>
2021-04-14 00:36:39 -07:00
Aditya Bist
1b78008258 Remote CLI fixes (#15117)
* fixes from vscode

* update distro

* fix distro

* fix hygiene

* reorder hygiene

* Update distro

Co-authored-by: chgagnon <chgagnon@microsoft.com>
2021-04-13 20:42:06 -07:00
Aditya Bist
90868bc0ad fixed css for font family (#15119) 2021-04-13 18:37:55 -07:00
Vasu Bhog
58d0add46f Save Kusto Notebook with proper language notebook info (#15113)
* save kusto language info
2021-04-13 17:28:33 -07:00
Vasu Bhog
9f63653a53 Fixes notebook integration tests + running parameterized notebook (#15107)
* fix interface

* fix run all cells on new parameterized notebook

* add getNotebookUri function
2021-04-13 17:23:15 -07:00
Kim Santiago
b7ea1c1bf3 Don't allow duplicate file/folder entries to be added to sql projects (#15104)
* don't allow adding the same file or folder to the sqlproj if it has already been added

* add a couple more checks in test

* toLowerCase when comparing
2021-04-13 16:08:43 -07:00
Charles Gagnon
0ce57eb9b3 Update smoke test repo (#15101) 2021-04-13 13:54:41 -07:00
Charles Gagnon
339180aef3 Remove duplicate artifact upload (#15115) 2021-04-13 13:54:16 -07:00
Alan Ren
8d098c227d use context view provider to show the filter (#15106)
* use context view provider

* handle promise

* replace .then with .catch
2021-04-13 11:34:26 -07:00
Justin M
836da58c2c Bumped version of Kusto to 0.5.3 for SqlToolService 92 (#15108) 2021-04-13 11:24:59 -07:00
Charles Gagnon
34c8c77dc1 Fix MIAA connection strings page showing user as undefined (#15105) 2021-04-12 18:15:56 -07:00
Charles Gagnon
0c241c6d65 Declarative table component fixes (#15085)
* Declarative table component fixes

* Don't add empty items list
2021-04-12 13:55:19 -07:00
Charles Gagnon
42cd650147 Only reveal parent node when finding notebook item (#15091) 2021-04-12 13:12:04 -07:00
Alan Ren
862ffcf7d2 fix search box issue (#15097)
* fix search box issue

* fix compilation error
2021-04-12 13:05:19 -07:00
Charles Gagnon
ba89351ff4 Revet update to sqlops-dataprotocolclient (#15088) 2021-04-11 19:57:36 -07:00
Lucy Zhang
24a425ac64 temp skip failingnb integration tests (#15087) 2021-04-09 15:57:45 -07:00
Sakshi Sharma
c58b7d35d7 Fix deploy data to be presented on dashboard (#15073)
* Fix deploy data to be presented on dashboard

* Bump sql-db-project extensions version

* Address comment
2021-04-09 12:22:31 -07:00
Chris LaFreniere
e5a500d1a5 Change to fspath from resource tostring (#15069) 2021-04-09 12:00:55 -07:00
Charles Gagnon
8ea1d9fd8a Add "Don't Ask Again' response to required azdata update prompt (#15058)
* Add don't show again response to azdata required update

* fix key

* add config

* Update error message
2021-04-09 09:54:36 -07:00
Aasim Khan
a11beb71da Updating build pipelines to Cache@2 and switching to artifacts for compiled file sharing in pipeline jobs. (#14989)
* switiching product compile node cache task to newer version

* moving new changes from product-compile to sql-product-compile

* changing to yarn.lock as cache key

* Adding compilation cache

* changing keypath to key

* letting find command do the heavy lifting

* removing old save cache task

* reverting compilation cache to old task

* Creating a js to list compiled files
switching to cache 2 for compiled files
Creating a js file to compute yarn cache

* removed unused input targetFolder from pipeline cache task

* removed save cache

* Fixing compute node modules file

* Adding compiled computenodemodules

* Fixing checked variables on product compile
Updating all pipeline jobs to cache 2
Using tar for windows pipeline. Hoping it works

* Fixing indentation in web job

* Fixing different indentation in web job

* Generating sha keys for compilation cache to be cross plat

* trying deterministic key for compilation cache

* Fixing md5 command

* Trying another method of generating compilation cache

* testing with a hardcoded string

* Changing to a better hardcoded string

* Remove redundant make dir

* Fixing mkdir command in windows and trying new string key

* fixing $$ in sql product compile

* Removin redundant mkdir

* Trying source version var

* Fixing compilation key

* chaning script to powershell

* Adding artifacts to store compiled files
switching to 7zip for windows node cache

* Adding missing step key in web build

* Building not found directories

* Making correct directory

* Switching to vscode's computeNodeModuleCache

* Fixing formatting and making it look more like vscode's pipeline

* Adding back compiled comput cache key

* Fixing cache file

* Fixing copyright message
Adding sql header to custom node cache generator
Updating cache salt to force a cache miss

* Using glob instead of custom method to find all yarn.lock files
Fixing some other pipeline errors.

* Removing unnecessary variable checks.

* Added back VSCODE_STEP_ON_IT check
Moving drop artifacts before compiled files to keep it drop folder free  from compiled files

* Changing task name from cache flags to cache key

* Removing glob from compute node module cache
Fixing copyright message

* checking in updated js
2021-04-09 08:06:39 -07:00
Barbara Valdez
f712c14b93 fix search loading animation (#15063) 2021-04-08 15:29:40 -07:00
Maddy
7484b04158 Select active notebook in viewlet when opened from command pallet (#15027)
* await initialized

* fixes specific for windows

* address comments

* update comment
2021-04-08 15:27:33 -07:00
Vasu Bhog
d76a6698a9 [Notebook] Run Parameters Action openNotebook Functionality in Core (#14978)
* NotebookService update

* openNotebook functionality in NbService

* Add tests for RunParametersAction
2021-04-08 14:48:37 -07:00
Barbara Valdez
4f67f32262 fix anchor links in wysiwyg (#14950)
* fix anchor links in wysiwyg
2021-04-08 10:56:11 -07:00
Udeesha Gautam
35e1daa6ba Fixing an issue where backup launch was freezing ADS and restore launch was not doiing anything. This repros in absence of any connetcions. (#15041) 2021-04-07 18:37:05 -07:00
Udeesha Gautam
c20e620c34 Fix for Bug #15020 dashboard update with multiproject (#15029)
* Fix to enable two projects showing only their data even if opening together

* Fixing a typo

* Taking in PR comments
2021-04-07 16:44:25 -07:00
Alan Ren
6a55c402a4 declarative table improvement (#15024) 2021-04-07 15:27:47 -07:00
Barbara Valdez
a7a6918b1a bump sqlservernotebook version (#15025) 2021-04-07 14:39:01 -07:00
Charles Gagnon
1c66499910 Fix event property name (#15026) 2021-04-07 13:37:17 -07:00
Christopher Suh
95f8c3c8b0 ADS Migration UI Fixes (#15011)
* wip database count

* wip

* added warnings count

* changed warnings to issue details when DB selected

* updated selected dbs

* issues -> issue

* fix issues/warnings
2021-04-07 11:07:15 -07:00
Alan Ren
7fc991b678 fix chart's checkbox option type (#15014)
* fix chart's checkbox option type

* overwrite the useragent margin for checkbox
2021-04-07 10:19:00 -07:00
Sakshi Sharma
addef2d584 Add message when no history exists on projects dashboard (#15002)
* Add message when no history exists on projects dashboard

* Bump version for sql db projects

* Update text, add refresh button

* Remove commented code
2021-04-07 00:42:11 -07:00
Charles Gagnon
9035914d5d Add required minimum version for azdata extension (#15010)
* Add check for minimum required azdata version

* cleanup

* remove unused

* comment

* param comment

* Fix tests
2021-04-06 17:53:46 -07:00
Kim Santiago
eb8c1c396d fix schema compare dropdown not selecting correct db when multiple active connections (#14999)
* fix schema compare dropdown not selecting correct db when multiple active connections

* fix when no username so default is used
2021-04-06 16:15:31 -07:00
Vasu Bhog
04af3e161a See and Edit Selected Links in Callout Dialog (#14987)
* Add URL label to linkCallout

* add test for file link
2021-04-06 11:51:53 -07:00
Justin M
aba8116648 Increased ADS version required from 1.27 to 1.28 (#14991) 2021-04-06 10:57:32 -07:00
Charles Gagnon
10923ee544 Update minimum required azdata engine version - Arc (#14996)
* vBump Arc/Azdata

* undo vbump
2021-04-06 10:04:18 -07:00
Charles Gagnon
4a4f889e8e Update minimum required azdata version to latest release (#14995) 2021-04-06 09:58:24 -07:00
Maddy
43db30d1da Add more Notebook telemetry events (#14755)
* initial checkin

* telemetry for search in notebooks

* telemetry for move notebook

* address comments

* feedback changes

* fix tests paasing NullAdsTelemetryService

* move changeKernel telemetry higher up

* remove telemetry service

* Fix new Notebook events (#14982)

* Notebook telemetry fixes

* update2

* Move event location

* remove service

* remove unused

Co-authored-by: chgagnon <chgagnon@microsoft.com>
2021-04-05 16:08:39 -07:00
Alan Ren
d8b2df5dbc check whether element is undefined (#14981) 2021-04-05 15:35:39 -07:00
Vasu Bhog
0a7719b475 [Notebook] Run Parameters Action UI Component (#14889)
* Run Parameters Action UI Component

* Update UX discussion - accept empty string
2021-04-05 13:59:27 -07:00
Charles Gagnon
75c1a6c2cd Sign vulkan-1 DLL (#14976)
* Sign DLLs

* Only sign vulkan-1
2021-04-05 13:57:20 -07:00
Alan Ren
1a1bcf89c9 redraw table header (#14963) (#14972) 2021-04-05 13:28:51 -07:00
Charles Gagnon
febd8b29c9 Arc updates for March release (#14970)
* Updated Postgres Spec for where to find engine version, removed calling calling -ev in edit commands (#14735)

* Added spec.engine.version, took out calling engine version with edit calls

* Added text wrong place

* missed updates

* PR fix

* Update Arc Postgres troubleshooting notebook

Co-authored-by: Brian Bergeron <brberger@microsoft.com>

* Remove AzdataSession from azdata commands (#14856)

* remove session

* Add in controller-context support

* Revert "Add in controller-context support"

This reverts commit 3b39b968efbf6054041cb01cb2d8443532643a82.

* Add azdataContext to login

* Undo book change

* Undo change correctly

* Add controller context support (#14862)

* remove session

* Add in controller-context support

* Add params to fake

* Fix tests

* Add info and placeholder for controller URL/name (#14887)

* Add info and placeholder for controller URL

* add period + update name

* update memento and allow editing of namespace/URL

* vBump

* vBump

* Fix tests

Co-authored-by: nasc17 <69922333+nasc17@users.noreply.github.com>
Co-authored-by: Brian Bergeron <brian.e.bergeron@gmail.com>
Co-authored-by: Brian Bergeron <brberger@microsoft.com>
2021-04-05 11:47:36 -07:00
Charles Gagnon
71b91c3890 Remove survey link (#14968) 2021-04-05 10:18:22 -07:00
Alan Ren
1dad2f36e3 redraw table header (#14963) 2021-04-05 10:11:53 -07:00
Alan Ren
6b855a9776 fix checkbox and radio button click issue (#14965) 2021-04-05 10:11:39 -07:00
Christopher Suh
4dc3999f46 Sort Dbs by number of issues (#14961)
* wip

* sorted dbs by number of issues
2021-04-05 08:46:45 -07:00
Alan Ren
c2d2cf5a82 limit the data size for chart rendering (#14949)
* limit the rows feed to charts

* add telemetry and option to hide

* fix typo

* updates

* comments

* notebook fix
2021-04-02 19:36:10 -07:00
Maddy
13ad4c9497 Select notebook in viewlet when opened in editor (#14854)
* test changes

* code clean up

* remove extra line

* remove commented code

* remove unnecessary import

* update expand logic

* implement parent,children on bookitem

* merge conflicts

* refactor code

* fix trustBook test

* typo on condition

* feedback

* indexOf to include

* fix undefined error on expand

* remove set parent

* revert getTreeItem logic

* Fix duplicated nodes

* remove debug

* Clean up logic

* Fix tests

* Fix logic

* cleanup

* Cleanup findAndExpandParentNode (#14960)

Co-authored-by: chgagnon <chgagnon@microsoft.com>
2021-04-02 19:33:41 -07:00
Aasim Khan
684dfc9760 Surfacing migration errors in dashboard (#14956)
* vbumping migration

* Adding 2 new icons cancel and warning

* Fixed help link display text in assessments

* Adding summary page redesign and resource name validations

* Made headings bold

* Fixed sku recommendation page styling
Added check item for assessment

* Validating account dropdown after token refresh

* Renamed cutover to mode

* cutover to mode renaming changes.

* Converting to details api for more warnings

* Added target database name and fixed cancel icon

* Surfacing warning info in dashboard.

* Consolidated fetch migrations logic
Localilzed some strings
Surface migration errors in dashboard and status page
Table redesign in status dialog
Fixed a major bug that happens when multiple dashboards are opened due to class variable sharing

* removing console count

* Fixing regex for SQL MI database names

* Allowing spaces in regex
2021-04-02 18:49:34 -07:00
Barbara Valdez
fde5caa9a4 Add documentation and fix paths (#14948)
* use posix relative path

* add doc link in dialog

* rename book to Jupyter book
2021-04-02 15:44:25 -07:00
Chris LaFreniere
0d3112ef35 Adding Rendered Notebook Diff Option (#14860)
* First attempt nb diff preview

* First attempt nb diff preview

* Simplify everything

* Interim scroll one way

* Double scroll

* Add setting

* Add tests

* Add comment

* Fix tests

* first round PR comments

* Ensure scrollable portion has scrollbar in diff

* Fix sqllint errors, register events

* Fix scrolling, readonly
2021-04-02 14:49:52 -07:00
Charles Gagnon
e7be1daf5c Switch azuredatastudio-sqlite repo (#14940)
* Switch azuredatastudio-sqlite repo

* Bump version
2021-04-02 09:14:50 -07:00
Sakshi Sharma
30a2b76faf Add background image to sql db projects dashboard (#14942)
* Add background image to sql db projects dashboard

* Removed gradient color

* Added bottom border for header

* Fixed border color
2021-04-01 17:24:16 -07:00
Charles Gagnon
ce6ea8af41 Fix dropdown error & editor database dropdown validation (#14946)
* Fix dropdown error & editor database dropdown validation

* Set initial values

* Update comment

* hygiene

* remove unused

* Fix tests
2021-04-01 14:52:55 -07:00
Barbara Valdez
a00ffa0469 fix search action (#14937)
* fix search action

* remove book version from book tree item
2021-04-01 11:16:32 -07:00
Charles Gagnon
37af88e265 Fix some spelling errors (#14944) 2021-04-01 11:16:08 -07:00
Charles Gagnon
4c2969d4ca Don't log race condition error (#14922) 2021-04-01 11:11:43 -07:00
Charles Gagnon
97ce7b9b67 Use shared packages in markdown-language-features dependencies (#14936)
* Use shared webpack packages in markdown-language-features dependencies

* update yarn post merge

* Remove ts-loader too
2021-04-01 09:44:41 -07:00
Alan Ren
ef8396f783 fix query plan doc failed to load issue (#14943)
* fix query plan doc failed to load issue

* organize imports
2021-04-01 09:41:41 -07:00
Barbara Valdez
fc13fb2200 Book Fixes (#14931)
* change folder icon and fix book path

* change folder icon in dark mode

* add folder.svg under resources
2021-03-31 14:02:08 -07:00
nasc17
2164bc0bb9 Unit test for Postgres Connections Strings Page (#14905)
* Start writing test (2)

* How read property pairs

* Connection strings check

* Delete folder

* PR fixes
2021-03-31 13:21:44 -07:00
Charles Gagnon
5151f337bc Log full stack trace for editor opened errors (#14932)
* Log full stack trace for editor opened errors

* space

* Actually print name
2021-03-31 13:13:41 -07:00
nasc17
6ab9844909 Cleaning up PG Compute and Storage code and adding reset for Cores (#14919)
* Add reset ability to cores request and limit

* Remove not needed !

* Changed withProperties to with Props
2021-03-31 11:54:53 -07:00
dependabot[bot]
d6e0a679f7 Bump y18n from 4.0.0 to 4.0.1 in /build (#14902)
Bumps [y18n](https://github.com/yargs/y18n) from 4.0.0 to 4.0.1.
- [Release notes](https://github.com/yargs/y18n/releases)
- [Changelog](https://github.com/yargs/y18n/blob/master/CHANGELOG.md)
- [Commits](https://github.com/yargs/y18n/commits)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2021-03-31 11:29:19 -07:00
dependabot[bot]
eb5405d116 Bump y18n from 4.0.0 to 4.0.1 in /extensions/markdown-language-features (#14910)
Bumps [y18n](https://github.com/yargs/y18n) from 4.0.0 to 4.0.1.
- [Release notes](https://github.com/yargs/y18n/releases)
- [Changelog](https://github.com/yargs/y18n/blob/master/CHANGELOG.md)
- [Commits](https://github.com/yargs/y18n/commits)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2021-03-31 11:27:43 -07:00
Kim Santiago
0fa9689345 add data-workspace unit tests to test-extensions-unit scripts (#14923) 2021-03-31 11:19:03 -07:00
Justin M
1ce10cd3f3 Increased sqltoolservice version for Kusto to 90 and bumped Kusto version to 0.5.2 (#14927) 2021-03-31 11:15:49 -07:00
Alex Ma
f7b9ca775a Update for language packs, (#14912)
* update DE lang pack extension strings from previous refresh

* bump lang pack versions for DE

* update for spanish langpack

* update for french langpack

* update for italian pack

* update for japanese pack

* update for korean pack

* update for Brazilian Portuguese pack

* update for Russian pack

* Update for Simplified Chinese pack

* update for Traditional Chinese pack

* added updated language packs for german and spanish

* changed vscode version

* added french and italian packs

* changes made to main

* japanese language pack added

* added korean pack

* Added Portuguese pack

* added russian pack, also small change to pt pack

* simplified chinese pack

* added traditional chinese pack

* deleted old language packs

* restore sql.i18n.json files

* Main.i18n.json reverted

* Revert sql json files

* main.i18n.json file for German updated

* updates to language packs 1

* main json for italian updated.

* update to japanese main

* finished translations for vs and sql in main.

* added ADS exclusive extensions to package.json

* fixed markdown and seti translation ids

* German language pack fixed

* Removed make from ads-language-pack-de and fixed es language pack

* french language pack fixed

* fixed italian language pack and restored typescript-basics

* removed unnecessary readme strings and nsions.

* fixed japanese pack

* Korean language pack fixed

* Portuguese language pack fixed

* fixed russian language pack

* Simplified Chinese pack fixed

* traditional chinese fixed

* restored sqlservernotebook
2021-03-31 11:08:45 -07:00
Aasim Khan
e762f19815 Fixing migration private preview bugs WIP 3 (#14928)
* Adding server name to wizard and dialog title
Surfacing async errors
Fixing a bunch of strings to match the mockup

* Adding auto refresh for migration status

* Removing errors for sql vm migration

* using new logic to get sql server username

* Fixing help links

* Removing unncessary await
2021-03-31 10:19:59 -07:00
Enrique Mejia
00d2fadb7d Use {arg} to reference selected text in query shortcuts (#14894)
* Add selection parameter to query shortcuts

* Add description for selected text as parameter

* Modify feature description since it supports proceedures and queries

* Replace every {arg} inside query shortcut
2021-03-31 08:53:52 -07:00
Christopher Suh
cb619f55c7 mi page shows prompt to select DB to see any warnings (#14921) 2021-03-30 23:19:12 -07:00
Christopher Suh
28577baa87 Assessment Progress Page (#14909)
* wip assessment in progress page

* wip

* working assessment loader

* wip, radio cards not showing up

* working assessments in progress page

* cleanup

* bumped sqlmigration extension version

* cleanup
2021-03-30 23:18:54 -07:00
Barbara Valdez
8aa222d1c8 Add new file UI (#14773)
Adds a notebook or markdown file to a book's top level or a section by right click on Book Tree View
2021-03-30 18:44:52 -07:00
Sakshi Sharma
b774f09b6c Sql DB project dashboard (#14899)
* First set of changes for workspace dashboard implementing the toolbar

* Workspace dashboard container implementation (#14813)

* First set of changes for workspace dashboard implementing the toolbar (#14160)

* First set of changes for workspace dashboard implementing the toolbar

* Addressed comments

* Addressed one remaining comment

* Removed an extra comma in interfaces file

* Addressed comments

* Addressed comments

* Refactored a bit of code

* Remove unnecessary await

* Addressed comments

* First set of changes for workspace dashboard container

* Update targetPlatform icon+add Time column to deploy table

* Addressed comments

* Removed redundant class definition

* Addressed comments

* Addressed comments

* Change enum to union type in dataworkspace typings

* Fix tests

* Addressed comments
2021-03-30 17:37:53 -07:00
Charles Gagnon
4df77c73bf Fix slider component (#14918) 2021-03-30 17:33:59 -07:00
Kim Santiago
f4e1f85e0f Add dacpac references to sqlproj with relative path (#14877)
* relative paths written to sqlproj, but can't delete yet

* only keep track of relative path

* remove leading slash

* add test

* use path.relative

* Add error message if dacpac reference is located on a different drive
2021-03-30 17:06:04 -07:00
Kim Santiago
af4ad1fcb1 fix revealFileInOS command (#14913) 2021-03-30 15:11:48 -07:00
dependabot[bot]
d3f9277139 Bump y18n from 3.2.1 to 3.2.2 in /samples/sqlservices (#14911)
Bumps [y18n](https://github.com/yargs/y18n) from 3.2.1 to 3.2.2.
- [Release notes](https://github.com/yargs/y18n/releases)
- [Changelog](https://github.com/yargs/y18n/blob/master/CHANGELOG.md)
- [Commits](https://github.com/yargs/y18n/commits)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2021-03-30 14:51:09 -07:00
Justin M
7dfafe1393 dstsAuth Token Refresh (#14890)
* Refactored getToken to set tenantId based on providerId

* Changed logic to set tenantId when provider is not dstsAuth
2021-03-30 10:30:25 -07:00
Aasim Khan
a13e924a14 Fixing filters and removing not started migration tile (#14903) 2021-03-30 00:41:04 -07:00
Aasim Khan
a231d2aa82 Migration Private preview 1 fixes 2 (#14898)
* Removing canary host

* Rebranding extension name to Azure SQL Migration

* stopping instance table overflow in assessment dialog

* Added info message for details copied

* Limiting storage account and DMS to the same subscription as target

* making accounts page look like figma mockups

* converting error messages to warnings in cutover dialog

* making source config page look like figma mockups

* adding more filters for storage account and dms

* Adding validations for target database names.

* Fixing branding in other strings

* Adding types for SQL Managed Instance
2021-03-29 17:43:19 -07:00
Sakshi Sharma
69361b5c97 Update button texts for Create New project and Open existing project UIs (#14900) 2021-03-29 16:45:08 -07:00
Christopher Suh
896d0b0ea1 bump sts version (#14895) 2021-03-29 12:49:21 -07:00
Sakshi Sharma
bdbe4fb6de Fix telemetry for Sql db projects (#14896) 2021-03-29 12:20:56 -07:00
Alan Ren
4c5a4215f8 fix maximum call stack size reached issue (#14878)
* fix maximum call stack size reached issue

* Revert "fix maximum call stack size reached issue"

This reverts commit 178675633032a508ddb5585d1adc4f83bb243f55.

* add a few array operations

* use new push
2021-03-29 12:07:23 -07:00
Lucy Zhang
dd5adad772 Notebooks: fix save as csv/excel/json/xml (#14882)
* add getEncoding method to notebooks

* fix row 501 missing in serialization

* fix row index

* pr comment
2021-03-29 07:14:49 -07:00
Kim Santiago
ca19f08582 Add git clone option for opening existing workspace (#14828)
* added git option to dialog

* Add validation for cloning workspace and hide radio buttons for project

* add test

* cleanup
2021-03-26 15:30:29 -07:00
Lucy Zhang
bbee4a1f38 only restart server if there is an active session (#14885) 2021-03-26 15:04:08 -07:00
Kim Santiago
39a47b0053 Add telemetry for opening dacpac wizard (#14884)
* add telemetry for opening dacpac wizard

* add wizard open telemetry to core

* fix tests

* remove WizardOpen
2021-03-26 14:57:51 -07:00
nasc17
e080770c19 Updates how the Postgres Parameter Dashboard adds data to its table and discard values. (#14840)
* Clear updates in discard

* Update discarding values

* Changed where the info bubble is created , added function for collecting all changed components.

* Remove parameters if the same as original value  reset individually

* Moved refreshParametersTable to be private function, try catch for postgresmodel.refresh

* Discard enable if fail

* Made changedCompoenentValues a set, removed unnessaery ! throughout file, cleaned up connec and load
2021-03-26 13:59:33 -07:00
Aasim Khan
4d78aefe57 Fixing bugs for migration extension private preview 1. (#14872)
* Fixing Database backup page target layout

* Filtering out Azure sql db issues from assessment results
Correcting the database count for issued databases in sku rec page.

* Adding copy migration details button to migration status

* Adding start migration button to toolbar

* Fixing a syntax error in package.json

* Adding rg and location to target selection page
Filtering storage account by target location.

* Fixing dashboard title to azure sql migration

* Not making assessment targets selected by default.

* Adding tooltip for database and instance table items.

* Fixing duplicate task widget

* Some fixes mentioned in the PR
Localizing button text
renaming  a var
changing null to undefined.

* Adding enum for Migration target types

* Fixing a critical multi db migration bug because of unhandled race condition

* Adding Azure location api to azure core

* Adding source database info in status
2021-03-26 10:32:28 -07:00
nasc17
e0f24cc268 Add try catch block to model refresh in compute/storage pages (#14861)
* Add try catch block to model refresh in compute/storage pages

* spacing
2021-03-26 08:30:42 -07:00
Charles Gagnon
4b5aac57d3 Add return type to handler (#14876) 2021-03-25 17:12:58 -07:00
Charles Gagnon
5317d9ae0b Reload resource deployment types on extension changes (#14875)
* Reload resource deployment types on extension changes

* add comments
2021-03-25 16:41:36 -07:00
Charles Gagnon
b7e982e78a Add prompt to install required extensions for resource deployment (#14870) 2021-03-25 16:41:08 -07:00
Barbara Valdez
15f7b12849 Search smoke tests (#13469)
* Add smoke test

* add simple tests for searching in notebooks view

* address pr comments

* address pr comments

* add search smoke tests to separate file

* remove span from selector
2021-03-25 12:43:55 -07:00
Alan Ren
94c0795fc7 add 'start migration' action to dashboard toolbar (#14869)
* add 'start migration' action to dashboard toolbar

* localization
2021-03-25 12:36:37 -07:00
Alan Ren
5db6857c49 only return visible elements as focusable (#14864) 2021-03-25 09:49:16 -07:00
csigs
edac96a624 LEGO: check in for main to temporary branch. (#14858) 2021-03-25 09:33:17 -07:00
Aasim Khan
f61288d29e Fixing assessment UX for private preview (#14837)
* Fixing assessment UX for private preview

* localizing all the string

* Fixing comments

* Fixing dbname bug in assessment page.
2021-03-24 20:49:03 -07:00
Alan Ren
f12c8cd5d3 fix issue that user is not able to interact with checkbox in declarative table using keyboard (#14863)
* handle space key press

* correct fix
2021-03-24 19:07:12 -07:00
Alan Ren
2b7535f377 fire selection event when re-entered (#14857) 2021-03-24 16:40:20 -07:00
Lucy Zhang
f59e9b5695 Notebooks: Use new Python installation after configuration change (#14765)
* start new jupyter server

* restart session working (removed extra code)

* only restart server once

* shutdown session first then stop server

* add comments remove extra lines

* add comment

* fix test

* only restart jupyter sessions

* Dispose jupytersessionmanager and create new one

* move restart server logic out of notebookmodel

* move methods to azdata proposed

* pr comment
2021-03-24 15:31:20 -07:00
Leila Lali
130a439abc Removed 'preview' from MI (#14767) 2021-03-24 14:33:27 -07:00
Kim Santiago
dee5720676 make task history horizontally scrollable (#14846) 2021-03-24 13:41:02 -07:00
Alan Ren
92e9a423a0 declarative table fix (#14844)
* declarative table fixes

* reset selectedRow

* use eventHelper
2021-03-24 10:45:43 -07:00
csigs
0fb01b5b34 LEGO: check in for main to temporary branch. (#14852) 2021-03-24 10:08:22 -07:00
Alan Ren
b148b91a9c fix the layout issue (#14850) 2021-03-24 10:03:21 -07:00
csigs
689144a526 LEGO: check in for main to temporary branch. (#14842) 2021-03-24 09:37:28 -07:00
csigs
a3681dc0f9 LEGO: check in for main to temporary branch. (#14836) 2021-03-24 09:37:07 -07:00
csigs
4f255647ac LEGO: check in for main to temporary branch. (#14848) 2021-03-24 09:36:46 -07:00
Aasim Khan
e6a81d01cc Making source config more intiutive (#14849)
Adding loading component for assessment card
2021-03-24 00:49:33 -07:00
Alan Ren
7d94e247f6 card link styles (#14847) 2021-03-23 23:08:49 -07:00
Aasim Khan
de494f97ef bumping sts to get latest migration assessment api changes (#14843) 2021-03-23 21:06:25 -07:00
Christopher Suh
be9d18428f Deselect row everytime setProperties called (#14838)
* deselect row everytime setProperties called

* added data

* changed clear to only happen if data property changed
2021-03-23 15:40:21 -07:00
Alan Ren
6c54059f89 query results filtering and sorting (#14833)
* query results filtering and sorting (#14589)

* enable filter

* attach button style

* add hybrid data provider

* make filter and sort work

* fix editor switch issue

* configuration

* fix sort and filter

* add more specific selector

* fix hidden items issue

* update text

* revert text change

* fix copy results issue

* put feature behind preview flag

* comments

* fix tslint error
2021-03-23 11:30:41 -07:00
Barbara Valdez
5c67f3dbed fix tests on windows (#14827)
* fix tests on windows

* fix test
2021-03-23 10:03:48 -07:00
csigs
e2b6972ba9 LEGO: check in for main to temporary branch. (#14834) 2021-03-23 09:10:43 -07:00
csigs
f125e014fa LEGO: check in for main to temporary branch. (#14831) 2021-03-23 09:10:19 -07:00
csigs
8d0131b624 LEGO: check in for main to temporary branch. (#14826) 2021-03-23 09:09:54 -07:00
csigs
2d43cca3c8 LEGO: check in for main to temporary branch. (#14820) 2021-03-23 09:09:31 -07:00
csigs
0d951e1ef1 LEGO: check in for main to temporary branch. (#14815) 2021-03-23 09:09:19 -07:00
Aasim Khan
339d908d1d Adding windows auth support to sql-migration and misc bug fixes. (#14816)
* - Added coming soon message for learn more.
- Potential fix for learn more message

* Renaming of controller to sqlMigrationService

* Surfacing some errors
-Azure account is stale error
-Migration Service creation error.

* Adding refresh azure token validation.

* Fixing some errors pointed during PR
-Fixing property names
-Fixing count

* Fixing migration status
- Adding special error handling for resource not found error
- Deleting unfound migrations from local cache
- Using prefetched migration status for view all

Misc fixes:
- Using SQL server version name instead of number
- Fixing Icons on sku recommendation page
- Fixing table column width in cutover dialog
- Adding spinner button to refresh.

* Fixing all strings in migration service page and dialog

* fixed a string error in create service dialog

* Adding source config page to migration to support windows auth
Some refactorings for sqlDatabaseTree (still WIP)

* refactoring assessments code 1
introducing new interface for server assessments

* Filtering out non windows sql vms
Retaining selections made by user on assessments dialog

* Fix compile errors on sqlDatabaseTree

* Exposing migration status errors in cutover dialog

* Updating extension verion

* Correcting typos
Fixing compilation erros
Removing en-us from url
Fixing function names
Make UI calls unblocking

* Unblocking dialog in case of failed assessments
Localizing string
removing blocking code from UI
Fixing comments

* Fixed broken assessment page logic
2021-03-23 07:48:26 -07:00
Lucy Zhang
780ca84f9a Enable status bar for notebooks (#14817)
* enable status bar for notebooks

* add onCellExecutionStart event to notebook service

* fix test, change var name
2021-03-22 14:54:13 -07:00
Charles Gagnon
8fb54710fb Promote CssStyles to stable API (#14824)
* Promote CssStyles to stable API

* add comment

* Fix compile
2021-03-22 12:16:59 -07:00
Christopher Suh
1a74d0f3d4 bumped sqltoolsservice version (#14823) 2021-03-22 11:33:11 -07:00
Charles Gagnon
72295d46c2 Add close method to ModelView dashboards (#14812)
* Add close method to ModelView dashboards

* fix closing

* remove accessors

* Update errors
2021-03-22 10:17:16 -07:00
csigs
98ba49304e LEGO: check in for main to temporary branch. (#14802)
Co-authored-by: Alex Ma <alma1@microsoft.com>
2021-03-21 13:03:48 -07:00
Charles Gagnon
6ee321a719 Update sqlops-dataprotocolclient to 1.2.2 (#14807)
* Update sqlops-dataprotocolclient to 1.2.2

* Fix yarn.lock
2021-03-20 12:49:18 -07:00
Alan Ren
45fd89d6da radio button outline style (#14811) 2021-03-19 20:06:57 -07:00
Alan Ren
128d382c91 use label instead of handle for aria label (#14810) 2021-03-19 18:05:10 -07:00
nasc17
2086ca6039 Postgres Parameters edit calls missing try catch block (#14805)
* try catch block within save and reset functions

* pr fix

* Pr fix sessions

* Pr changes
2021-03-19 17:25:48 -07:00
Alan Ren
a5d2870344 make tenant list not tab focusable (#14809) 2021-03-19 17:03:30 -07:00
Kim Santiago
d658af153d fix publish profile with connection string ending in semicolon not loading (#14797) 2021-03-19 13:17:51 -07:00
Brian Bergeron
ce39b4bd19 Arc - Unit tests for deleting Postgres (#14502)
* tests for deleting postgres from overview page

* upgrade to azdata-test 1.5.0

* Fix api stubbing

Co-authored-by: Brian Bergeron <brberger@microsoft.com>
Co-authored-by: chgagnon <chgagnon@microsoft.com>
2021-03-19 13:12:26 -07:00
Alan Ren
69d593007e fix contrast issue of view container and view header (#14800)
* bring in vscode colors

* background color
2021-03-19 09:56:33 -07:00
csigs
8aa8cb2cc6 LEGO: check in for main to temporary branch. (#14779) 2021-03-19 09:14:34 -07:00
Kim Santiago
adc0f3e96d fix build errors in projectController.test.ts (#14798) 2021-03-18 18:31:53 -07:00
Kim Santiago
8068de5938 fix project tree tests failing on windows (#14759)
* fix project tree tests failing on windows

* add back test

* Addressing comments

* change to doc comment

* remove unnecessary change

* undo other change
2021-03-18 17:14:00 -07:00
Charles Gagnon
c65c856d2f Fix API issues for external extensions (#14796)
* Fix azdata API break

* Move dialogwidth to azdata.d.ts

* re-add parameter

* add comment back
2021-03-18 16:46:32 -07:00
Charles Gagnon
31ce58a8fc Cleanup telemetry keys (#14795)
* Add event for connection error

* Cleanup telemetry keys

* Fix missed keys
2021-03-18 15:52:57 -07:00
Lucy Zhang
af19f93b78 clean up sqlnotebookmanager (#14793) 2021-03-18 15:01:14 -07:00
Christopher Suh
172f428789 Fixed formatting/bugs (#14790)
* formatting changes & assessment title

* fixed width for description/impacted objects

* fixed formatting

* removed master, model, tempdb, and msdb

* fix warnings number

* used static variable
2021-03-18 14:33:17 -07:00
Charles Gagnon
a0a97d1611 Revert "Updated Postgres Spec for where to find engine version, removed calling calling -ev in edit commands (#14735)" (#14794)
This reverts commit 318559dcd7.
2021-03-18 14:19:59 -07:00
Charles Gagnon
11d4ea3232 Promote some proposed APIs to stable (#14788)
* Promote some proposed APIs to stable

* Fix spelling

(cherry picked from commit b88a9ba33e644f83a2b8deac9d99d86803605a85)
2021-03-18 13:11:43 -07:00
Barbara Valdez
5da4690be8 fix relative paths in wysiwyg (#14743)
* Convert relative paths to absolute paths based on the notebook uri before converting to markdown
2021-03-18 10:24:15 -07:00
Charles Gagnon
88b7960d01 Add Slider component (#14774)
* initial

* more cleanup

* update types
2021-03-18 09:47:36 -07:00
Zi Chen
03d2ac7206 Recursively delete folders in database projects (#14740)
* Recursively rmdir

* Remove comment

* Add test

* Add test assertions

* Comment

* Recursively rmdir

* Revert "Comment"

This reverts commit 287e7f824adfb2fcedb18ee222b34b8c6a5305b5.

* Re-commit Simplify Test
2021-03-18 09:20:41 -07:00
Charles Gagnon
07bd534453 Update replicas text (#14783) 2021-03-17 16:19:03 -07:00
nasc17
947633f244 Clear updates in discard (#14781) 2021-03-17 16:15:45 -07:00
Alan Ren
de91969b14 declarative table color (#14775) 2021-03-17 15:32:47 -07:00
Aasim Khan
c7cca7e9f0 Renaming controller to migration service and other bug fixes/ validations. (#14751)
* - Added coming soon message for learn more.
- Potential fix for learn more message

* Renaming of controller to sqlMigrationService

* Surfacing some errors
-Azure account is stale error
-Migration Service creation error.

* Adding refresh azure token validation.

* Fixing some errors pointed during PR
-Fixing property names
-Fixing count

* Fixing migration status
- Adding special error handling for resource not found error
- Deleting unfound migrations from local cache
- Using prefetched migration status for view all

Misc fixes:
- Using SQL server version name instead of number
- Fixing Icons on sku recommendation page
- Fixing table column width in cutover dialog
- Adding spinner button to refresh.

* Fixing all strings in migration service page and dialog

* fixed a string error in create service dialog
2021-03-17 14:55:24 -07:00
Christopher Suh
5917f869ef Update version number for next release (#14772) 2021-03-17 14:58:51 -04:00
Kim Santiago
f4d305ec16 vBump dacpac and sql database projects (#14768) 2021-03-17 11:14:48 -07:00
Christopher Suh
3d78fe02a4 Update CHANGELOG.md (#14766)
* Update CHANGELOG.md

* Update CHANGELOG.md
2021-03-17 13:51:15 -04:00
Christopher Suh
04d9b54c49 Updating README for March Release (#14752) 2021-03-17 13:42:26 -04:00
Jonny Johansson
a12f85dd02 Add missing "Data Direction"-option to line charts (#14764) 2021-03-17 10:24:28 -07:00
Charles Gagnon
7ffa7a9b23 Add support for replicas option when deploying MIAA (#14758) 2021-03-17 10:12:26 -07:00
Charles Gagnon
4425b2afd3 Fix MIAA connection strings page not populating (#14763) 2021-03-17 09:40:47 -07:00
Kim Santiago
a4fae7c1d8 fix database name edit box not announcing required (#14753) 2021-03-16 16:04:56 -07:00
Alan Ren
150dec7211 expose the resizable option for table column (#14757) 2021-03-16 15:45:25 -07:00
Charles Gagnon
89e5ee3c80 Fix BDC deployment removing all security settings from config (#14756) 2021-03-16 15:44:21 -07:00
Barbara Valdez
784d76b886 Remove book notebook from toc (#14704)
* Add remove notebook from book
2021-03-16 15:02:29 -07:00
Kim Santiago
ff766a8a14 update workspace opening popup to be modal (#14741) 2021-03-16 11:36:10 -07:00
Charles Gagnon
95fa61ee4e Activate extensions contributing deployment providers (#14750) 2021-03-16 11:11:13 -07:00
Charles Gagnon
0ecc053377 Fix blank MIAA/PG deployment wizard pages (#14714) 2021-03-16 10:05:47 -07:00
Charles Gagnon
97813b4c20 vBump arc (#14748) 2021-03-16 08:29:40 -07:00
nasc17
318559dcd7 Updated Postgres Spec for where to find engine version, removed calling calling -ev in edit commands (#14735)
* Added spec.engine.version, took out calling engine version with edit calls

* Added text wrong place

* missed updates

* PR fix
2021-03-16 08:25:16 -07:00
Charles Gagnon
7a4852b047 Add Product Feedback survey link to welcome page (#14705)
* Fix welcome page

* Add link
2021-03-15 16:23:56 -07:00
Drew Skwiers-Koballa
9beb7804e6 sqlproj - keep new object input box open when user navigates UI 2021-03-15 13:00:01 -07:00
Sakshi Sharma
0209b55ea2 Change no workspace content text (#14726)
* Change no workspace content text

* Address comment
2021-03-15 11:48:27 -07:00
Karl Burtram
05d88f4971 Fix script loading in sandbox windows (#14727)
* Fix script loading in sandbox windows

* Avoid uglify error
2021-03-15 11:41:41 -07:00
Justin M
4140d0da79 Bumped sqltoolservice version to 82 and kusto package version to 0.5.1 (#14718) 2021-03-15 09:40:52 -07:00
Charles Gagnon
b3e141a150 Update missed connection management return values (#14722) 2021-03-15 09:24:43 -07:00
Sakshi Sharma
9df7c6c1aa Added integration tests for schema compare (#14479)
* Added integration tests for schema compare

* Addressed comments

* Revert beforeEach change

* Addressed comments and reverted table creation change

* Removed cancelCompare test and addressed a comment

* Increase test timeout from 5 to 10mins
2021-03-14 11:27:16 -07:00
Alan Ren
7a9de9e708 vbump asde extension (#14720) 2021-03-12 17:26:25 -08:00
nasc17
42347b4681 Pass in empty string quotation (#14717) 2021-03-12 15:50:41 -08:00
Charles Gagnon
63828dcc15 Remove connectivity mode from Arc deployment (#14710) 2021-03-12 14:26:57 -08:00
nasc17
98828aa67d Unit test for Postgres Model (#14651)
* Figure out initial test

* First trys at refresh unit test

* refresh unit test completed

* Connection profile unit test, need to pass in false

* Engine settings start

* All passing

* Clean up

* Removed class, updated showdialog stubs, using sinon spy

* Removed comment
2021-03-12 14:02:55 -08:00
Aasim Khan
d6cd3270a8 Fixing azdata engine version for import (#14666) 2021-03-12 13:17:48 -08:00
Charles Gagnon
90a5d9d817 Fix filtered resource options persisting across deployments (#14702) 2021-03-12 13:08:13 -08:00
Charles Gagnon
138fa3feb6 Update azdata login namespace param (#14670) 2021-03-12 12:01:55 -08:00
Charles Gagnon
7ea09d3d17 Fix welcome page (#14701) 2021-03-12 11:17:19 -08:00
Alex Ma
8acc28e344 Update for extension XLFs (#14650)
* updates to extension XLFs

* revert dacpac

* dacpac updated
2021-03-12 10:55:23 -08:00
Alan Ren
f83925ce38 fix vm creation issue (#14691) 2021-03-12 10:53:02 -08:00
Charles Gagnon
b670ffc7e6 Update azdata login namespace param (#14670) 2021-03-12 10:48:32 -08:00
Aasim Khan
0277054b1f Vbump and vscode engine requirements update for SQL migration extension (#14665)
* Updated version and vscode engine requirements

* bumping the right engine

* changing vscode engine verison as *

* reverting version to 0.0.3
2021-03-11 22:37:17 -08:00
Charles Gagnon
1c671676bf Disable auto sync service (#14677)
* Disable auto sync service

* skip tests
2021-03-11 17:28:01 -08:00
Leila Lali
063953f743 Bug fixes for ML 0.8.0 (#14659)
* Bug fixes for ML 0.8.0
2021-03-11 14:49:28 -08:00
Charles Gagnon
bba3b93c6e Move BDC resource deployment type into BDC extension (#14657) 2021-03-11 10:33:21 -08:00
Charles Gagnon
cf862854c5 Update return types that can return undefined (#14660) 2021-03-11 10:09:50 -08:00
Charles Gagnon
6c877a4e20 Update commands and param names (#14658) 2021-03-11 07:29:19 -08:00
Aasim Khan
29c02e5746 Resetting subscriptions after AD tenant is changed in migration wizard (#14656)
* Resetting subscriptions after AD tenant is changed

* removing migration in progress info message at the end

* printing migration started errors in console.
2021-03-10 18:23:39 -08:00
Aasim Khan
50ac3b0fdf Fixing welcome page tour. (#14640)
* Fixing the icon names

* Fixing settings card position

* getting icon selectors from their source

* Changing var name from dataExplorer to dataExplorerId
2021-03-10 15:55:31 -08:00
Aditya Bist
46f805a8be fix a few more tags (#14648) 2021-03-10 15:10:08 -08:00
Charles Gagnon
e97429cb74 Bump arc version (#14653) 2021-03-10 15:03:14 -08:00
Kim Santiago
03a0d71486 Fix deploy upgrade existing not choosing the correct database (#14635)
* fix deploy upgrade existing not choosing the correct database

* fix test

* specify databaseDropdownValue as a string
2021-03-10 15:02:14 -08:00
Aasim Khan
ee2988f5fd Adding Azure AD tenant dropdown in migration wizard (#14637)
* Added dropdown to select azure tenant in accounts page

* Added dropdown label for tenant dropdown

* Moving deepcopy to  utils
Exporting tenant type from azurecore

* Removing unnecessary stylings

* removing unnecessary async
2021-03-10 12:48:44 -08:00
Christopher Suh
6389a5b0b0 Target Page Updates (#14581)
* fixed target page links

* wip

* wip

* update target page values

* wip

* fixed number of dbs

* add vm assessment dialog
2021-03-10 02:45:46 -05:00
Aasim Khan
bfa8ec0301 Adding database specific settings, cancel migration and other dark UI fixes (#14626)
* Made dashboard dark ui compat

* foundations for sql vm

* WIP

* Added cancel migration
Added refresh migraiton table
Added multi db config

* disabling cancel migration button if the migration is not in progress.

* Addressing some PR based coments
- Removing (s) from loc strings
- Adding return type to cancel migration
- removing _ from public vars
- localizing strings
- Adding name to dialogs for telemetry

* Adding todo comment for offline mode
2021-03-09 23:40:52 -08:00
Alan Ren
c05ba883c4 handle query gallery with tags (#14633)
* handle query gallery with tags

* fix typo
2021-03-09 19:19:48 -08:00
Charles Gagnon
c33116c625 Remove Online Services Settings option (#14624) 2021-03-09 17:20:07 -08:00
nasc17
973f151e07 Add a note that opening new support request from ADS works only after uploading configuration to the cloud/portal (#14601)
* Added note to support request page

* Fixed note
2021-03-09 15:09:21 -08:00
Barbara Valdez
4b462a41ab Queue concurrent calls when initializing contents in BookModel (#14586)
* Initial work for handling concurrent operations when calling initialized books

* fixes to init contents

* create a queue of deferred promises for initializing books

* resolve active promise and set to undefined

* remove duplicated variable

* address pr comments
2021-03-09 10:19:30 -08:00
Charles Gagnon
0906030aa2 Expose AzureAccount typings to other extensions (#14604) 2021-03-09 10:16:52 -08:00
Vladimir Chernov
46dd49d69a SqlAssessment azdata version update (#14613) 2021-03-09 10:09:11 -08:00
dependabot[bot]
a4297a2922 Bump elliptic from 6.5.3 to 6.5.4 (#14600)
Bumps [elliptic](https://github.com/indutny/elliptic) from 6.5.3 to 6.5.4.
- [Release notes](https://github.com/indutny/elliptic/releases)
- [Commits](https://github.com/indutny/elliptic/compare/v6.5.3...v6.5.4)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2021-03-09 08:57:29 -08:00
dependabot[bot]
447e573911 Bump elliptic in /extensions/markdown-language-features (#14609)
Bumps [elliptic](https://github.com/indutny/elliptic) from 6.5.3 to 6.5.4.
- [Release notes](https://github.com/indutny/elliptic/releases)
- [Commits](https://github.com/indutny/elliptic/compare/v6.5.3...v6.5.4)

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2021-03-09 08:53:14 -08:00
Charles Gagnon
b5e66a715f Update connection event properties (#14608) 2021-03-09 08:39:14 -08:00
Cory Rivera
e2a5859155 Only include package versions in Manage Packages dialog if they're supported for the user's version of Python (#14584) 2021-03-08 18:05:10 -08:00
Charles Gagnon
bbdc324f17 Add PyZMQ third party license (#14607) 2021-03-08 17:04:26 -08:00
Jeff Trimmer
0bea923c0a Change tenant to tenant.id when passing to azdata.accounts.getAccountSecurityToken() method. (#14588) 2021-03-08 15:58:04 -08:00
Hale Rankin
809d35f173 Manage Packages icon position fix (#14528)
* Added CSS to treat manage packages icon position.

* Wired up a rough concept showing how I propose to pass a masked-icon class, along with the icon name - derived from the action icon path - to the injected action prior to rendering. The end result is an injected action with an icon which behaves like the others in the notebook toolbar.

* Revert "Added CSS to treat manage packages icon position."

This reverts commit 215a67244a78224fe8fd2e6480b7e50d37a15dc0.

* Revert "Wired up a rough concept showing how I propose to pass a masked-icon class, along with the icon name - derived from the action icon path - to the injected action prior to rendering. The end result is an injected action with an icon which behaves like the others in the notebook toolbar."

This reverts commit 1e0cf116602192aa554334d564e855167b0e8bb6.

* Modified LabeledMenuItemActionItem to add masked-icon class to injected label element.

* Modified LabeledMenuItemActionItem to add masked-icon class and styles for injected label element.

* To prevent conflict with Dashboard, I duplicated the existing LabelMenuItemActionItem and odified it.

* Added comment to new method. Added non-prefixed mask-image style.

* Renamed method and modified comments.
2021-03-08 15:34:49 -08:00
Alex Ma
16556b8316 Info prompt for results encoding when saving to JSON (#14533)
* encoding message prompt added.

* added don't show again prompt

* global scope added

* utc8 warning added.

* Remove extra space
2021-03-08 14:05:05 -08:00
Karl Burtram
9d1db372f0 Update package.json (#14597) 2021-03-08 10:29:05 -08:00
nasc17
dba5880f35 Postgres Resource Health Paage (#14575)
* Add podstatus to spec

* Added image to table and fixed spacing.

* Added pod status to spec

* PR fixes

* Added resource health page, created overiew box

* Pod condtion table is up

* Tryingt to fix how table refreshes

* Fixed how drop down changes table

* Overview box shows number of running and pending pods

* overview box refresh fix

* Updated summary section

* PR fixes

* Condensed create pod list function

* Added enum

* fixed refresh

* Fixed refresh, fixed if all availble section add
2021-03-08 10:05:11 -08:00
3593 changed files with 366144 additions and 224386 deletions

View File

@@ -14,3 +14,4 @@
**/extensions/**/build/**
**/extensions/markdown-language-features/media/**
**/extensions/typescript-basics/test/colorize-fixtures/**
**/extensions/**/dist/**

View File

@@ -939,6 +939,13 @@
"*" // node modules
]
},
{
"target": "**/test/monaco/**",
"restrictions": [
"**/test/monaco/**",
"*" // node modules
]
},
{
"target": "**/api/**.test.ts",
"restrictions": [
@@ -946,9 +953,9 @@
"assert",
"sinon",
"crypto",
"vscode",
"typemoq",
"azdata"
"vscode",
"typemoq",
"azdata"
]
},
{

5
.github/copycat.yml vendored
View File

@@ -1,5 +0,0 @@
{
perform: false,
target_owner: 'anthonydresser',
target_repo: 'testissues'
}

35
.github/workflows/build-chat.yml vendored Normal file
View File

@@ -0,0 +1,35 @@
name: "Build Chat"
on:
workflow_run:
workflows:
- CI
types:
- completed
branches:
- master
- release/*
jobs:
main:
runs-on: ubuntu-latest
steps:
- name: Checkout Actions
uses: actions/checkout@v2
with:
repository: "microsoft/vscode-github-triage-actions"
path: ./actions
- name: Install Actions
run: npm install --production --prefix ./actions
- name: Install Additional Dependencies
# Pulls in a bunch of other packages that arent needed for the rest of the actions
run: npm install @azure/storage-blob@12.1.1
- name: Build Chat
uses: ./actions/build-chat
with:
token: ${{ secrets.GITHUB_TOKEN }}
slack_token: ${{ secrets.SLACK_TOKEN }}
storage_connection_string: ${{ secrets.BUILD_CHAT_STORAGE_CONNECTION_STRING }}
workflow_run_url: ${{ github.event.workflow_run.url }}
notification_channel: build
log_channel: bot-log

View File

@@ -13,7 +13,7 @@ jobs:
uses: actions/checkout@v2
with:
repository: "microsoft/vscode-github-triage-actions"
ref: v40
ref: v42
path: ./actions
- name: Install Actions
run: npm install --production --prefix ./actions

View File

@@ -11,7 +11,7 @@ jobs:
uses: actions/checkout@v2
with:
repository: "microsoft/vscode-github-triage-actions"
ref: v40
ref: v42
path: ./actions
- name: Install Actions
run: npm install --production --prefix ./actions

View File

@@ -14,7 +14,7 @@ jobs:
with:
repository: "microsoft/vscode-github-triage-actions"
path: ./actions
ref: v40
ref: v42
- name: Install Actions
run: npm install --production --prefix ./actions
- name: Install Storage Module

View File

@@ -1,24 +0,0 @@
name: On Issue Open
on:
issues:
types: [opened]
jobs:
main:
runs-on: ubuntu-latest
steps:
- name: Checkout Actions
uses: actions/checkout@v2.2.0
with:
repository: 'microsoft/azuredatastudio'
ref: main
path: ./actions
- name: Install Actions
run: npm install --production --prefix ./actions/build/actions
- name: Run CopyCat
uses: ./actions/build/actions/copycat
with:
token: ${{secrets.TRIAGE_PAT}}
owner: anthonydresser
repo: testissues

22
.gitignore vendored
View File

@@ -5,26 +5,8 @@ Thumbs.db
node_modules/
.build/
extensions/**/dist/
out/
out-build/
out-editor/
out-editor-src/
out-editor-build/
out-editor-esm/
out-editor-esm-bundle/
out-editor-min/
out-monaco-editor-core/
out-vscode/
out-vscode-min/
out-vscode-reh/
out-vscode-reh-min/
out-vscode-reh-pkg/
out-vscode-reh-web/
out-vscode-reh-web-min/
out-vscode-reh-web-pkg/
out-vscode-web/
out-vscode-web-min/
out-vscode-web-pkg/
/out*/
/extensions/**/out/
src/vs/server
resources/server
build/node_modules

View File

@@ -3,7 +3,6 @@
// for the documentation about the extensions.json format
"recommendations": [
"dbaeumer.vscode-eslint",
"EditorConfig.EditorConfig",
"msjsdiag.debugger-for-chrome"
"EditorConfig.EditorConfig"
]
}

19
.vscode/launch.json vendored
View File

@@ -105,8 +105,7 @@
"outFiles": [
"${workspaceFolder}/out/**/*.js"
],
"browserLaunchLocation": "workspace",
"preLaunchTask": "Ensure Prelaunch Dependencies",
"browserLaunchLocation": "workspace"
},
{
"type": "node",
@@ -141,7 +140,6 @@
"type": "pwa-chrome",
"request": "launch",
"outFiles": [],
"outFiles": [],
"perScriptSourcemaps": "yes",
"name": "VS Code (Web, Chrome)",
"url": "http://localhost:8080",
@@ -215,7 +213,7 @@
"${workspaceFolder}/out/**/*.js"
],
"cascadeTerminateToConfigurations": [
"Attach to VS Code"
"Attach to azuredatastudio"
],
"env": {
"MOCHA_COLORS": "true"
@@ -229,15 +227,15 @@
"request": "launch",
"name": "Run Unit Tests For Current File",
"program": "${workspaceFolder}/test/unit/electron/index.js",
"runtimeExecutable": "${workspaceFolder}/.build/electron/Code - OSS.app/Contents/MacOS/Electron",
"runtimeExecutable": "${workspaceFolder}/.build/electron/Azure Data Studio.app/Contents/MacOS/Electron",
"windows": {
"runtimeExecutable": "${workspaceFolder}/.build/electron/Code - OSS.exe"
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio.exe"
},
"linux": {
"runtimeExecutable": "${workspaceFolder}/.build/electron/code-oss"
"runtimeExecutable": "${workspaceFolder}/.build/electron/azuredatastudio"
},
"cascadeTerminateToConfigurations": [
"Attach to VS Code"
"Attach to azuredatastudio"
],
"outputCapture": "std",
"args": [
@@ -251,9 +249,6 @@
],
"env": {
"MOCHA_COLORS": "true"
},
"presentation": {
"hidden": true
}
},
{
@@ -356,7 +351,7 @@
{
"name": "Debug Unit Tests (Current File)",
"configurations": [
"Attach to VS Code",
"Attach to azuredatastudio",
"Run Unit Tests For Current File"
],
"presentation": {

View File

@@ -8,7 +8,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"November 2020\"",
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"February 2021\"",
"editable": true
},
{

View File

@@ -8,8 +8,8 @@
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server\n\n$MILESTONE=milestone:\"November 2020\"",
"editable": false
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper\n\n$MILESTONE=milestone:\"January 2021\"",
"editable": true
},
{
"kind": 1,
@@ -80,7 +80,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE is:issue is:closed label:feature-request label:verification-needed",
"value": "$REPOS $MILESTONE is:issue is:closed label:feature-request label:verification-needed -label:verified",
"editable": true
},
{
@@ -89,10 +89,28 @@
"value": "# Verification",
"editable": true
},
{
"kind": 1,
"language": "markdown",
"value": "## Verifiable Fixes",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found",
"value": "$REPOS $MILESTONE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -label:z-author-verified -label:unreleased",
"editable": true
},
{
"kind": 1,
"language": "markdown",
"value": "## Unreleased Fixes",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -label:z-author-verified label:unreleased",
"editable": true
},
{
@@ -104,7 +122,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE label:candidate",
"value": "$REPOS $MILESTONE is:open label:candidate",
"editable": true
}
]

View File

@@ -176,17 +176,20 @@
{
"kind": 1,
"language": "markdown",
"value": "# vscode-pull-request-github"
"value": "# vscode-pull-request-github",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "repo:microsoft/vscode-pull-request-github is:issue closed:>$since"
"value": "repo:microsoft/vscode-pull-request-github is:issue closed:>$since",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "repo:microsoft/vscode-test is:issue created:>$since"
"value": "repo:microsoft/vscode-test is:issue created:>$since",
"editable": true
},
{
"kind": 1,

View File

@@ -8,8 +8,8 @@
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server\n\n$MILESTONE=milestone:\"November 2020\"\n\n$MINE=assignee:@me",
"editable": false
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server\n\n$MILESTONE=milestone:\"January 2021\"\n\n$MINE=assignee:@me",
"editable": true
},
{
"kind": 1,
@@ -122,7 +122,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE $MINE is:issue is:open",
"value": "$REPOS $MILESTONE $MINE is:issue is:open -label:endgame-plan",
"editable": true
},
{
@@ -152,7 +152,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE $MINE is:issue is:open label:bug label:verification-steps-needed",
"value": "$REPOS $MILESTONE $MINE is:issue label:bug label:verification-steps-needed",
"editable": true
},
{
@@ -164,7 +164,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE $MINE is:issue is:open label:bug label:verification-found",
"value": "$REPOS $MILESTONE $MINE is:issue label:bug label:verification-found",
"editable": true
},
{
@@ -176,7 +176,19 @@
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed author:@me sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found",
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed author:@me sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found",
"editable": true
},
{
"kind": 1,
"language": "markdown",
"value": "## Issues filed from outside team",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -author:aeschli -author:alexdima -author:alexr00 -author:AmandaSilver -author:bamurtaugh -author:bpasero -author:btholt -author:chrisdias -author:chrmarti -author:Chuxel -author:connor4312 -author:dbaeumer -author:deepak1556 -author:devinvalenciano -author:digitarald -author:eamodio -author:egamma -author:fiveisprime -author:gregvanl -author:isidorn -author:ItalyPaleAle -author:JacksonKearl -author:joaomoreno -author:jrieken -author:kieferrm -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:ornellaalt -author:orta -author:rebornix -author:RMacfarlane -author:roblourens -author:rzhao271 -author:sana-ajani -author:sandy081 -author:sbatten -author:stevencl -author:Tyriar -author:weinand -author:TylerLeonhardt -author:lramos15",
"editable": true
},
{
@@ -188,7 +200,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed -author:@me sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found",
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed -author:@me sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found",
"editable": true
},
{

View File

@@ -8,7 +8,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"November 2020\"",
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"February 2021\"",
"editable": true
},
{
@@ -83,6 +83,24 @@
"value": "$repos assignee:@me is:open milestone:\"Backlog Candidates\"",
"editable": false
},
{
"kind": 1,
"language": "markdown",
"value": "### Personal Inbox\n",
"editable": true
},
{
"kind": 1,
"language": "markdown",
"value": "\n#### Missing Type label",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "$repos assignee:@me is:open type:issue -label:bug -label:\"needs more info\" -label:feature-request -label:under-discussion -label:debt -label:plan-item -label:upstream",
"editable": true
},
{
"kind": 1,
"language": "markdown",

View File

@@ -0,0 +1,44 @@
[
{
"kind": 1,
"language": "markdown",
"value": "## Papercuts\n\nThis notebook serves as an ongoing collection of papercut issues that we encounter while dogfooding. With that in mind only promote issues that really turn you off, e.g. issues that make you want to stop using VS Code or its extensions. To mark an issue (bug, feature-request, etc.) as papercut add the labels: `papercut :drop_of_blood:`",
"editable": true
},
{
"kind": 1,
"language": "markdown",
"value": "## All Papercuts\n\nThese are all papercut issues that we encounter while dogfooding vscode or extensions that we author.",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "repo:microsoft/vscode is:open -label:notebook label:\"papercut :drop_of_blood:\"",
"editable": true
},
{
"kind": 1,
"language": "markdown",
"value": "## Native Notebook",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "repo:microsoft/vscode is:open label:notebook label:\"papercut :drop_of_blood:\"",
"editable": true
},
{
"kind": 1,
"language": "markdown",
"value": "### My Papercuts",
"editable": true
},
{
"kind": 2,
"language": "github-issues",
"value": "repo:microsoft/vscode is:open assignee:@me label:\"papercut :drop_of_blood:\"",
"editable": true
}
]

View File

@@ -14,7 +14,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"October 2020\"",
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"January 2021\"",
"editable": true
},
{

8
.vscode/tasks.json vendored
View File

@@ -62,7 +62,8 @@
"group": {
"kind": "build",
"isDefault": true
}
},
"problemMatcher": []
},
{
"type": "npm",
@@ -90,7 +91,8 @@
"Kill Build VS Code Core",
"Kill Build VS Code Extensions"
],
"group": "build"
"group": "build",
"problemMatcher": []
},
{
"type": "npm",
@@ -217,7 +219,7 @@
"base": "$tsc",
"applyTo": "allDocuments",
"owner": "tsec"
},
}
],
"group": "build",
"label": "npm: tsec-compile-check",

View File

@@ -1,3 +1,3 @@
disturl "https://electronjs.org/headers"
target "9.4.3"
target "11.2.2"
runtime "electron"

View File

@@ -1,5 +1,30 @@
# Change Log
## Version 1.28.0
* Release date: April 16, 2021
* Release status: General Availability
* New Notebook Features:
* Added Add Notebook and Remove Notebook commands
* Extension Updates:
* SQL Database Projects
* Schema Compare
* Bug Fixes
## Version 1.27.0
* Release date: March 17, 2021
* Release status: General Availability
* New Notebook Features:
* Added create book dialog
* Extension Updates:
* Import
* Dacpac
* Machine Learning
* SQL Assessment
* Arc
* SQL Database Projects
* ASDE Deployment
* Bug Fixes
## Version 1.26.1
* Release date: February 25, 2021
* Release status: General Availability

View File

@@ -131,10 +131,10 @@ Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the [Source EULA](LICENSE.txt).
[win-user]: https://go.microsoft.com/fwlink/?linkid=2154985
[win-system]: https://go.microsoft.com/fwlink/?linkid=2155159
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2155221
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2155096
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2154986
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2155222
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2155223
[win-user]: https://go.microsoft.com/fwlink/?linkid=2160781
[win-system]: https://go.microsoft.com/fwlink/?linkid=2160780
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2160923
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2160874
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2160782
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2160875
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2160876

View File

@@ -12,7 +12,7 @@ expressly granted herein, whether by implication, estoppel or otherwise.
angular2-grid: https://github.com/BTMorton/angular2-grid
angular2-slickgrid: https://github.com/Microsoft/angular2-slickgrid
applicationinsights: https://github.com/Microsoft/ApplicationInsights-node.js
axios: https://github.com/axios/axios
axios: https://github.com/axios/axios
bootstrap: https://github.com/twbs/bootstrap
chart.js: https://github.com/Timer/chartjs
chokidar: https://github.com/paulmillr/chokidar
@@ -39,7 +39,7 @@ expressly granted herein, whether by implication, estoppel or otherwise.
jschardet: https://github.com/aadsm/jschardet
jupyter-powershell: https://github.com/vors/jupyter-powershell
JupyterLab: https://github.com/jupyterlab/jupyterlab
keytar: https://github.com/atom/node-keytar
keytar: https://github.com/atom/node-keytar
make-error: https://github.com/JsCommunity/make-error
mark.js: https://github.com/julmot/mark.js
minimist: https://github.com/substack/minimist
@@ -54,7 +54,8 @@ expressly granted herein, whether by implication, estoppel or otherwise.
primeng: https://github.com/primefaces/primeng
process-nextick-args: https://github.com/calvinmetcalf/process-nextick-args
pty.js: https://github.com/chjj/pty.js
qs: https://github.com/ljharb/qs
pyzmq: https://github.com/zeromq/pyzmq
qs: https://github.com/ljharb/qs
reflect-metadata: https://github.com/rbuckton/reflect-metadata
request: https://github.com/request/request
rxjs: https://github.com/ReactiveX/RxJS
@@ -1548,6 +1549,40 @@ THE SOFTWARE.
=========================================
END OF pty.js NOTICES AND INFORMATION
%% PyZMQ NOTICES AND INFORMATION BEGIN HERE
=========================================
Copyright (c) 2009-2012, Brian Granger, Min Ragan-Kelley
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of PyZMQ nor the names of its contributors may be used to
endorse or promote products derived from this software without specific prior
written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
=========================================
END OF pyzmq NOTICES AND INFORMATION
%% reflect-metadata NOTICES AND INFORMATION BEGIN HERE
=========================================
Apache License

View File

@@ -20,3 +20,8 @@ jobs:
vmImage: macOS-latest
steps:
- template: build/azure-pipelines/darwin/continuous-build-darwin.yml
trigger:
branches:
exclude:
- electron-11.x.y

View File

@@ -1 +1 @@
2020-04-29T05:20:58.491Z
2021-04-07T00:04:17.775Z

View File

@@ -1,4 +1,6 @@
# cleanup rules for native node modules, .gitignore style
# cleanup rules for node modules, .gitignore style
# native node modules
nan/**
*/node_modules/nan/**
@@ -46,6 +48,7 @@ spdlog/build/**
spdlog/deps/**
spdlog/src/**
spdlog/test/**
spdlog/*.yml
!spdlog/build/Release/*.node
jschardet/dist/**
@@ -72,6 +75,7 @@ node-pty/build/**
node-pty/src/**
node-pty/tools/**
node-pty/deps/**
node-pty/scripts/**
!node-pty/build/Release/*.exe
!node-pty/build/Release/*.dll
!node-pty/build/Release/*.node
@@ -117,8 +121,55 @@ vsda/README.md
vsda/targets
!vsda/build/Release/vsda.node
vscode-encrypt/build/**
vscode-encrypt/src/**
vscode-encrypt/vendor/**
vscode-encrypt/.gitignore
vscode-encrypt/binding.gyp
vscode-encrypt/README.md
!vscode-encrypt/build/Release/vscode-encrypt-native.node
vscode-windows-ca-certs/**/*
!vscode-windows-ca-certs/package.json
!vscode-windows-ca-certs/**/*.node
node-addon-api/**/*
# other node modules
**/docs/**
**/example/**
**/examples/**
**/test/**
**/tests/**
**/History.md
**/CHANGELOG.md
**/README.md
**/readme.md
**/readme.markdown
**/*.ts
!typescript/**/*.d.ts
jschardet/dist/**
es6-promise/lib/**
vscode-textmate/webpack.config.js
# {{SQL CARBON EDIT }} We need more than just zone-node.js
# zone.js/dist/**
# !zone.js/dist/zone-node.js
# https://github.com/xtermjs/xterm.js/issues/3137
xterm/src/**
xterm/tsconfig.all.json
# https://github.com/xtermjs/xterm.js/issues/3138
xterm-addon-*/src/**
xterm-addon-*/fixtures/**
xterm-addon-*/out/**
xterm-addon-*/out-test/**

View File

@@ -1,15 +0,0 @@
name: Copycat
description: Copy all new issues to a different repo
inputs:
token:
description: GitHub token with issue, comment, and label read/write permissions to both repos
default: ${{ github.token }}
owner:
description: account/organization that owns the destination repo (the microsoft part of microsoft/vscode)
required: true
repo:
description: name of the destination repo (the vscode part of microsoft/vscode)
required: true
runs:
using: 'node12'
main: 'index.js'

View File

@@ -1,19 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
class CopyCat {
constructor(github, owner, repo) {
this.github = github;
this.owner = owner;
this.repo = repo;
}
async run() {
const issue = await this.github.getIssue();
console.log(`Mirroring issue \`${issue.title}\` to ${this.owner}/${this.repo}`);
await this.github.createIssue(this.owner, this.repo, issue.title, issue.body.replace(/@|#|issues/g, '-'));
}
}
exports.CopyCat = CopyCat;

View File

@@ -1,21 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { GitHubIssue } from '../api/api'
export class CopyCat {
constructor(private github: GitHubIssue, private owner: string, private repo: string) {}
async run() {
const issue = await this.github.getIssue()
console.log(`Mirroring issue \`${issue.title}\` to ${this.owner}/${this.repo}`)
await this.github.createIssue(
this.owner,
this.repo,
issue.title,
issue.body.replace(/@|#|issues/g, '-'),
)
}
}

View File

@@ -1,21 +0,0 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const core = require("@actions/core");
const github_1 = require("@actions/github");
const octokit_1 = require("../api/octokit");
const utils_1 = require("../utils/utils");
const copyCat_1 = require("./copyCat");
const token = utils_1.getRequiredInput('token');
const main = async () => {
await new copyCat_1.CopyCat(new octokit_1.OctoKitIssue(token, github_1.context.repo, { number: github_1.context.issue.number }), utils_1.getRequiredInput('owner'), utils_1.getRequiredInput('repo')).run();
};
main()
.then(() => utils_1.logRateLimit(token))
.catch(async (error) => {
core.setFailed(error.message);
await utils_1.logErrorToIssue(error.message, true, token);
});

View File

@@ -1,27 +0,0 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import * as core from '@actions/core'
import { context } from '@actions/github'
import { OctoKitIssue } from '../api/octokit'
import { getRequiredInput, logErrorToIssue, logRateLimit } from '../utils/utils'
import { CopyCat } from './copyCat'
const token = getRequiredInput('token')
const main = async () => {
await new CopyCat(
new OctoKitIssue(token, context.repo, { number: context.issue.number }),
getRequiredInput('owner'),
getRequiredInput('repo'),
).run()
}
main()
.then(() => logRateLimit(token))
.catch(async (error) => {
core.setFailed(error.message)
await logErrorToIssue(error.message, true, token)
})

View File

@@ -1,2 +0,0 @@
node_modules/
*.js

View File

@@ -0,0 +1,25 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
const { dirs } = require('../../npm/dirs');
const ROOT = path.join(__dirname, '../../../');
const shasum = crypto.createHash('sha1');
shasum.update(fs.readFileSync(path.join(ROOT, 'build/.cachesalt')));
shasum.update(fs.readFileSync(path.join(ROOT, '.yarnrc')));
shasum.update(fs.readFileSync(path.join(ROOT, 'remote/.yarnrc')));
// Add `yarn.lock` files
for (let dir of dirs) {
const yarnLockPath = path.join(ROOT, dir, 'yarn.lock');
shasum.update(fs.readFileSync(yarnLockPath));
}
// Add any other command line arguments
for (let i = 2; i < process.argv.length; i++) {
shasum.update(process.argv[i]);
}
process.stdout.write(shasum.digest('hex'));

View File

@@ -0,0 +1,32 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import * as path from 'path';
import * as crypto from 'crypto';
const { dirs } = require('../../npm/dirs');
const ROOT = path.join(__dirname, '../../../');
const shasum = crypto.createHash('sha1');
shasum.update(fs.readFileSync(path.join(ROOT, 'build/.cachesalt')));
shasum.update(fs.readFileSync(path.join(ROOT, '.yarnrc')));
shasum.update(fs.readFileSync(path.join(ROOT, 'remote/.yarnrc')));
// Add `yarn.lock` files
for (let dir of dirs) {
const yarnLockPath = path.join(ROOT, dir, 'yarn.lock');
shasum.update(fs.readFileSync(yarnLockPath));
}
// Add any other command line arguments
for (let i = 2; i < process.argv.length; i++) {
shasum.update(process.argv[i]);
}
process.stdout.write(shasum.digest('hex'));

View File

@@ -0,0 +1,40 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const vfs = require("vinyl-fs");
const path = require("path");
const es = require("event-stream");
const fs = require("fs");
const files = [
'.build/extensions/**/*.vsix',
'.build/win32-x64/**/*.{exe,zip}',
'.build/linux/sha256hashes.txt',
'.build/linux/deb/amd64/deb/*.deb',
'.build/linux/rpm/x86_64/*.rpm',
'.build/linux/server/*',
'.build/linux/archive/*',
'.build/docker/*',
'.build/darwin/*',
'.build/version.json' // version information
];
async function main() {
return new Promise((resolve, reject) => {
const stream = vfs.src(files, { base: '.build', allowEmpty: true })
.pipe(es.through(file => {
const filePath = path.join(process.env.BUILD_ARTIFACTSTAGINGDIRECTORY,
//Preserve intermediate directories after .build folder
file.path.substr(path.resolve('.build').length + 1));
fs.mkdirSync(path.dirname(filePath), { recursive: true });
fs.renameSync(file.path, filePath);
}));
stream.on('end', () => resolve());
stream.on('error', e => reject(e));
});
}
main().catch(err => {
console.error(err);
process.exit(1);
});

View File

@@ -0,0 +1,94 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const crypto = require("crypto");
const azure = require("azure-storage");
const mime = require("mime");
const cosmos_1 = require("@azure/cosmos");
const retry_1 = require("./retry");
if (process.argv.length !== 6) {
console.error('Usage: node createAsset.js PLATFORM TYPE NAME FILE');
process.exit(-1);
}
function hashStream(hashName, stream) {
return new Promise((c, e) => {
const shasum = crypto.createHash(hashName);
stream
.on('data', shasum.update.bind(shasum))
.on('error', e)
.on('close', () => c(shasum.digest('hex')));
});
}
async function doesAssetExist(blobService, quality, blobName) {
const existsResult = await new Promise((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService, quality, blobName, filePath, fileName) {
const blobOptions = {
contentSettings: {
contentType: mime.lookup(filePath),
contentDisposition: `attachment; filename="${fileName}"`,
cacheControl: 'max-age=31536000, public'
}
};
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, filePath, blobOptions, err => err ? e(err) : c()));
}
function getEnv(name) {
const result = process.env[name];
if (typeof result === 'undefined') {
throw new Error('Missing env: ' + name);
}
return result;
}
async function main() {
const [, , platform, type, fileName, filePath] = process.argv;
const quality = getEnv('VSCODE_QUALITY');
const commit = getEnv('BUILD_SOURCEVERSION');
console.log('Creating asset...');
const stat = await new Promise((c, e) => fs.stat(filePath, (err, stat) => err ? e(err) : c(stat)));
const size = stat.size;
console.log('Size:', size);
const stream = fs.createReadStream(filePath);
const [sha1hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]);
console.log('SHA1:', sha1hash);
console.log('SHA256:', sha256hash);
const blobName = commit + '/' + fileName;
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2'];
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
const blobExists = await doesAssetExist(blobService, quality, blobName);
if (blobExists) {
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
return;
}
console.log('Uploading blobs to Azure storage...');
await uploadBlob(blobService, quality, blobName, filePath, fileName);
console.log('Blobs successfully uploaded.');
const asset = {
platform,
type,
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
hash: sha1hash,
sha256hash,
size
};
// Remove this if we ever need to rollback fast updates for windows
if (/win32/.test(platform)) {
asset.supportsFastUpdate = true;
}
console.log('Asset:', JSON.stringify(asset, null, ' '));
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
await retry_1.retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
}
main().then(() => {
console.log('Asset successfully created');
process.exit(0);
}, err => {
console.error(err);
process.exit(1);
});

View File

@@ -11,6 +11,7 @@ import * as crypto from 'crypto';
import * as azure from 'azure-storage';
import * as mime from 'mime';
import { CosmosClient } from '@azure/cosmos';
import { retry } from './retry';
interface Asset {
platform: string;
@@ -121,7 +122,7 @@ async function main(): Promise<void> {
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
await scripts.storedProcedure('createAsset').execute('', [commit, asset, true]);
await retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
}
main().then(() => {

View File

@@ -0,0 +1,51 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const cosmos_1 = require("@azure/cosmos");
const retry_1 = require("./retry");
if (process.argv.length !== 3) {
console.error('Usage: node createBuild.js VERSION');
process.exit(-1);
}
function getEnv(name) {
const result = process.env[name];
if (typeof result === 'undefined') {
throw new Error('Missing env: ' + name);
}
return result;
}
async function main() {
const [, , _version] = process.argv;
const quality = getEnv('VSCODE_QUALITY');
const commit = getEnv('BUILD_SOURCEVERSION');
const queuedBy = getEnv('BUILD_QUEUEDBY');
const sourceBranch = getEnv('BUILD_SOURCEBRANCH');
const version = _version + (quality === 'stable' ? '' : `-${quality}`);
console.log('Creating build...');
console.log('Quality:', quality);
console.log('Version:', version);
console.log('Commit:', commit);
const build = {
id: commit,
timestamp: (new Date()).getTime(),
version,
isReleased: false,
sourceBranch,
queuedBy,
assets: [],
updates: {}
};
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
await retry_1.retry(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
}
main().then(() => {
console.log('Build successfully created');
process.exit(0);
}, err => {
console.error(err);
process.exit(1);
});

View File

@@ -6,6 +6,7 @@
'use strict';
import { CosmosClient } from '@azure/cosmos';
import { retry } from './retry';
if (process.argv.length !== 3) {
console.error('Usage: node createBuild.js VERSION');
@@ -48,7 +49,7 @@ async function main(): Promise<void> {
const client = new CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT']!, key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
await scripts.storedProcedure('createBuild').execute('', [{ ...build, _partitionKey: '' }]);
await retry(() => scripts.storedProcedure('createBuild').execute('', [{ ...build, _partitionKey: '' }]));
}
main().then(() => {

View File

@@ -10,8 +10,8 @@ git clone --depth 1 https://github.com/Microsoft/vscode-node-debug2.git
git clone --depth 1 https://github.com/Microsoft/vscode-node-debug.git
git clone --depth 1 https://github.com/Microsoft/vscode-html-languageservice.git
git clone --depth 1 https://github.com/Microsoft/vscode-json-languageservice.git
node $BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --sourceDir $BUILD_SOURCESDIRECTORY --excludedDir $BUILD_SOURCESDIRECTORY/extensions --outputDir . --applyEndpoints
node $BUILD_SOURCESDIRECTORY/build/node_modules/.bin/vscode-telemetry-extractor --config $BUILD_SOURCESDIRECTORY/build/azure-pipelines/common/telemetry-config.json -o .
node $BUILD_SOURCESDIRECTORY/node_modules/.bin/vscode-telemetry-extractor --sourceDir $BUILD_SOURCESDIRECTORY --excludedDir $BUILD_SOURCESDIRECTORY/extensions --outputDir . --applyEndpoints
node $BUILD_SOURCESDIRECTORY/node_modules/.bin/vscode-telemetry-extractor --config $BUILD_SOURCESDIRECTORY/build/azure-pipelines/common/telemetry-config.json -o .
mkdir -p $BUILD_SOURCESDIRECTORY/.build/telemetry
mv declarations-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-core.json
mv config-resolved.json $BUILD_SOURCESDIRECTORY/.build/telemetry/telemetry-extensions.json

View File

@@ -0,0 +1,14 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path");
const retry_1 = require("./retry");
const { installBrowsersWithProgressBar } = require('playwright/lib/install/installer');
const playwrightPath = path.dirname(require.resolve('playwright'));
async function install() {
await retry_1.retry(() => installBrowsersWithProgressBar(playwrightPath));
}
install();

View File

@@ -0,0 +1,40 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const path = require("path");
if (process.argv.length !== 3) {
console.error('Usage: node listNodeModules.js OUTPUT_FILE');
process.exit(-1);
}
const ROOT = path.join(__dirname, '../../../');
function findNodeModulesFiles(location, inNodeModules, result) {
const entries = fs.readdirSync(path.join(ROOT, location));
for (const entry of entries) {
const entryPath = `${location}/${entry}`;
if (/(^\/out)|(^\/src$)|(^\/.git$)|(^\/.build$)/.test(entryPath)) {
continue;
}
let stat;
try {
stat = fs.statSync(path.join(ROOT, entryPath));
}
catch (err) {
continue;
}
if (stat.isDirectory()) {
findNodeModulesFiles(entryPath, inNodeModules || (entry === 'node_modules'), result);
}
else {
if (inNodeModules) {
result.push(entryPath.substr(1));
}
}
}
}
const result = [];
findNodeModulesFiles('', false, result);
fs.writeFileSync(process.argv[2], result.join('\n') + '\n');

View File

@@ -0,0 +1,46 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import * as path from 'path';
if (process.argv.length !== 3) {
console.error('Usage: node listNodeModules.js OUTPUT_FILE');
process.exit(-1);
}
const ROOT = path.join(__dirname, '../../../');
function findNodeModulesFiles(location: string, inNodeModules: boolean, result: string[]) {
const entries = fs.readdirSync(path.join(ROOT, location));
for (const entry of entries) {
const entryPath = `${location}/${entry}`;
if (/(^\/out)|(^\/src$)|(^\/.git$)|(^\/.build$)/.test(entryPath)) {
continue;
}
let stat: fs.Stats;
try {
stat = fs.statSync(path.join(ROOT, entryPath));
} catch (err) {
continue;
}
if (stat.isDirectory()) {
findNodeModulesFiles(entryPath, inNodeModules || (entry === 'node_modules'), result);
} else {
if (inNodeModules) {
result.push(entryPath.substr(1));
}
}
}
}
const result: string[] = [];
findNodeModulesFiles('', false, result);
fs.writeFileSync(process.argv[2], result.join('\n') + '\n');

View File

@@ -0,0 +1,71 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const azure = require("azure-storage");
const mime = require("mime");
const minimist = require("minimist");
const path_1 = require("path");
const fileNames = [
'fake.html',
'host.js',
'index.html',
'main.js',
'service-worker.js'
];
async function assertContainer(blobService, container) {
await new Promise((c, e) => blobService.createContainerIfNotExists(container, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
}
async function doesBlobExist(blobService, container, blobName) {
const existsResult = await new Promise((c, e) => blobService.doesBlobExist(container, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService, container, blobName, file) {
const blobOptions = {
contentSettings: {
contentType: mime.lookup(file),
cacheControl: 'max-age=31536000, public'
}
};
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(container, blobName, file, blobOptions, err => err ? e(err) : c()));
}
async function publish(commit, files) {
console.log('Publishing...');
console.log('Commit:', commit);
const storageAccount = process.env['AZURE_WEBVIEW_STORAGE_ACCOUNT'];
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_WEBVIEW_STORAGE_ACCESS_KEY'])
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
await assertContainer(blobService, commit);
for (const file of files) {
const blobName = path_1.basename(file);
const blobExists = await doesBlobExist(blobService, commit, blobName);
if (blobExists) {
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
continue;
}
console.log('Uploading blob to Azure storage...');
await uploadBlob(blobService, commit, blobName, file);
}
console.log('Blobs successfully uploaded.');
}
function main() {
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
return;
}
const opts = minimist(process.argv.slice(2));
const [directory] = opts._;
const files = fileNames.map(fileName => path_1.join(directory, fileName));
publish(commit, files).catch(err => {
console.error(err);
process.exit(1);
});
}
if (process.argv.length < 3) {
console.error('Usage: node publish.js <directory>');
process.exit(-1);
}
main();

View File

@@ -0,0 +1,224 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const crypto = require("crypto");
const azure = require("azure-storage");
const mime = require("mime");
const minimist = require("minimist");
const documentdb_1 = require("documentdb");
// {{SQL CARBON EDIT}}
if (process.argv.length < 9) {
console.error('Usage: node publish.js <product_quality> <platform> <file_type> <file_name> <version> <is_update> <file> [commit_id]');
process.exit(-1);
}
function hashStream(hashName, stream) {
return new Promise((c, e) => {
const shasum = crypto.createHash(hashName);
stream
.on('data', shasum.update.bind(shasum))
.on('error', e)
.on('close', () => c(shasum.digest('hex')));
});
}
function createDefaultConfig(quality) {
return {
id: quality,
frozen: false
};
}
function getConfig(quality) {
console.log(`Getting config for quality ${quality}`);
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/config';
const query = {
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
parameters: [
{ name: '@quality', value: quality }
]
};
return retry(() => new Promise((c, e) => {
client.queryDocuments(collection, query, { enableCrossPartitionQuery: true }).toArray((err, results) => {
if (err && err.code !== 409) {
return e(err);
}
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0]);
});
}));
}
function createOrUpdate(commit, quality, platform, type, release, asset, isUpdate) {
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/' + quality;
const updateQuery = {
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
parameters: [{ name: '@id', value: commit }]
};
let updateTries = 0;
function update() {
updateTries++;
return new Promise((c, e) => {
console.log(`Querying existing documents to update...`);
client.queryDocuments(collection, updateQuery, { enableCrossPartitionQuery: true }).toArray((err, results) => {
if (err) {
return e(err);
}
if (results.length !== 1) {
return e(new Error('No documents'));
}
const release = results[0];
release.assets = [
...release.assets.filter((a) => !(a.platform === platform && a.type === type)),
asset
];
if (isUpdate) {
release.updates[platform] = type;
}
console.log(`Replacing existing document with updated version`);
client.replaceDocument(release._self, release, err => {
if (err && err.code === 409 && updateTries < 5) {
return c(update());
}
if (err) {
return e(err);
}
console.log('Build successfully updated.');
c();
});
});
});
}
return retry(() => new Promise((c, e) => {
console.log(`Attempting to create document`);
client.createDocument(collection, release, err => {
if (err && err.code === 409) {
return c(update());
}
if (err) {
return e(err);
}
console.log('Build successfully published.');
c();
});
}));
}
async function assertContainer(blobService, quality) {
await new Promise((c, e) => blobService.createContainerIfNotExists(quality, { publicAccessLevel: 'blob' }, err => err ? e(err) : c()));
}
async function doesAssetExist(blobService, quality, blobName) {
const existsResult = await new Promise((c, e) => blobService.doesBlobExist(quality, blobName, (err, r) => err ? e(err) : c(r)));
return existsResult.exists;
}
async function uploadBlob(blobService, quality, blobName, file) {
const blobOptions = {
contentSettings: {
contentType: mime.lookup(file),
cacheControl: 'max-age=31536000, public'
}
};
await new Promise((c, e) => blobService.createBlockBlobFromLocalFile(quality, blobName, file, blobOptions, err => err ? e(err) : c()));
}
async function publish(commit, quality, platform, type, name, version, _isUpdate, file, opts) {
const isUpdate = _isUpdate === 'true';
const queuedBy = process.env['BUILD_QUEUEDBY'];
const sourceBranch = process.env['BUILD_SOURCEBRANCH'];
console.log('Publishing...');
console.log('Quality:', quality);
console.log('Platform:', platform);
console.log('Type:', type);
console.log('Name:', name);
console.log('Version:', version);
console.log('Commit:', commit);
console.log('Is Update:', isUpdate);
console.log('File:', file);
const stat = await new Promise((c, e) => fs.stat(file, (err, stat) => err ? e(err) : c(stat)));
const size = stat.size;
console.log('Size:', size);
const stream = fs.createReadStream(file);
const [sha1hash, sha256hash] = await Promise.all([hashStream('sha1', stream), hashStream('sha256', stream)]);
console.log('SHA1:', sha1hash);
console.log('SHA256:', sha256hash);
const blobName = commit + '/' + name;
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2'];
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
await assertContainer(blobService, quality);
const blobExists = await doesAssetExist(blobService, quality, blobName);
if (blobExists) {
console.log(`Blob ${quality}, ${blobName} already exists, not publishing again.`);
return;
}
console.log('Uploading blobs to Azure storage...');
await uploadBlob(blobService, quality, blobName, file);
console.log('Blobs successfully uploaded.');
const config = await getConfig(quality);
console.log('Quality config:', config);
const asset = {
platform: platform,
type: type,
url: `${process.env['AZURE_CDN_URL']}/${quality}/${blobName}`,
hash: sha1hash,
sha256hash,
size
};
// Remove this if we ever need to rollback fast updates for windows
if (/win32/.test(platform)) {
asset.supportsFastUpdate = true;
}
console.log('Asset:', JSON.stringify(asset, null, ' '));
// {{SQL CARBON EDIT}}
// Insiders: nightly build from main
const isReleased = (((quality === 'insider' && /^main$|^refs\/heads\/main$/.test(sourceBranch)) ||
(quality === 'rc1' && /^release\/|^refs\/heads\/release\//.test(sourceBranch))) &&
/Project Collection Service Accounts|Microsoft.VisualStudio.Services.TFS/.test(queuedBy));
const release = {
id: commit,
timestamp: (new Date()).getTime(),
version,
isReleased: isReleased,
sourceBranch,
queuedBy,
assets: [],
updates: {}
};
if (!opts['upload-only']) {
release.assets.push(asset);
if (isUpdate) {
release.updates[platform] = type;
}
}
await createOrUpdate(commit, quality, platform, type, release, asset, isUpdate);
}
const RETRY_TIMES = 10;
async function retry(fn) {
for (let run = 1; run <= RETRY_TIMES; run++) {
try {
return await fn();
}
catch (err) {
if (!/ECONNRESET/.test(err.message)) {
throw err;
}
console.log(`Caught error ${err} - ${run}/${RETRY_TIMES}`);
}
}
throw new Error('Retried too many times');
}
function main() {
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {
console.warn('Skipping publish due to missing BUILD_SOURCEVERSION');
return;
}
const opts = minimist(process.argv.slice(2), {
boolean: ['upload-only']
});
const [quality, platform, type, name, version, _isUpdate, file] = opts._;
publish(commit, quality, platform, type, name, version, _isUpdate, file, opts).catch(err => {
console.error(err);
process.exit(1);
});
}
main();

View File

@@ -0,0 +1,91 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const documentdb_1 = require("documentdb");
function createDefaultConfig(quality) {
return {
id: quality,
frozen: false
};
}
function getConfig(quality) {
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/config';
const query = {
query: `SELECT TOP 1 * FROM c WHERE c.id = @quality`,
parameters: [
{ name: '@quality', value: quality }
]
};
return new Promise((c, e) => {
client.queryDocuments(collection, query).toArray((err, results) => {
if (err && err.code !== 409) {
return e(err);
}
c(!results || results.length === 0 ? createDefaultConfig(quality) : results[0]);
});
});
}
function doRelease(commit, quality) {
const client = new documentdb_1.DocumentClient(process.env['AZURE_DOCUMENTDB_ENDPOINT'], { masterKey: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const collection = 'dbs/builds/colls/' + quality;
const query = {
query: 'SELECT TOP 1 * FROM c WHERE c.id = @id',
parameters: [{ name: '@id', value: commit }]
};
let updateTries = 0;
function update() {
updateTries++;
return new Promise((c, e) => {
client.queryDocuments(collection, query).toArray((err, results) => {
if (err) {
return e(err);
}
if (results.length !== 1) {
return e(new Error('No documents'));
}
const release = results[0];
release.isReleased = true;
client.replaceDocument(release._self, release, err => {
if (err && err.code === 409 && updateTries < 5) {
return c(update());
}
if (err) {
return e(err);
}
console.log('Build successfully updated.');
c();
});
});
});
}
return update();
}
async function release(commit, quality) {
const config = await getConfig(quality);
console.log('Quality config:', config);
if (config.frozen) {
console.log(`Skipping release because quality ${quality} is frozen.`);
return;
}
await doRelease(commit, quality);
}
function env(name) {
const result = process.env[name];
if (!result) {
throw new Error(`Skipping release due to missing env: ${name}`);
}
return result;
}
async function main() {
const commit = env('BUILD_SOURCEVERSION');
const quality = env('VSCODE_QUALITY');
await release(commit, quality);
}
main().catch(err => {
console.error(err);
process.exit(1);
});

View File

@@ -0,0 +1,50 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const cosmos_1 = require("@azure/cosmos");
const retry_1 = require("./retry");
function getEnv(name) {
const result = process.env[name];
if (typeof result === 'undefined') {
throw new Error('Missing env: ' + name);
}
return result;
}
function createDefaultConfig(quality) {
return {
id: quality,
frozen: false
};
}
async function getConfig(client, quality) {
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${quality}"`;
const res = await client.database('builds').container('config').items.query(query).fetchAll();
if (res.resources.length === 0) {
return createDefaultConfig(quality);
}
return res.resources[0];
}
async function main() {
const commit = getEnv('BUILD_SOURCEVERSION');
const quality = getEnv('VSCODE_QUALITY');
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const config = await getConfig(client, quality);
console.log('Quality config:', config);
if (config.frozen) {
console.log(`Skipping release because quality ${quality} is frozen.`);
return;
}
console.log(`Releasing build ${commit}...`);
const scripts = client.database('builds').container(quality).scripts;
await retry_1.retry(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
}
main().then(() => {
console.log('Build successfully released');
process.exit(0);
}, err => {
console.error(err);
process.exit(1);
});

View File

@@ -6,6 +6,7 @@
'use strict';
import { CosmosClient } from '@azure/cosmos';
import { retry } from './retry';
function getEnv(name: string): string {
const result = process.env[name];
@@ -58,7 +59,7 @@ async function main(): Promise<void> {
console.log(`Releasing build ${commit}...`);
const scripts = client.database('builds').container(quality).scripts;
await scripts.storedProcedure('releaseBuild').execute('', [commit]);
await retry(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
}
main().then(() => {

View File

@@ -0,0 +1,25 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
exports.retry = void 0;
async function retry(fn) {
for (let run = 1; run <= 10; run++) {
try {
return await fn();
}
catch (err) {
if (!/ECONNRESET/.test(err.message)) {
throw err;
}
const millis = (Math.random() * 200) + (50 * Math.pow(1.5, run));
console.log(`Failed with ECONNRESET, retrying in ${millis}ms...`);
// maximum delay is 10th retry: ~3 seconds
await new Promise(c => setTimeout(c, millis));
}
}
throw new Error('Retried too many times');
}
exports.retry = retry;

View File

@@ -0,0 +1,26 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
export async function retry<T>(fn: () => Promise<T>): Promise<T> {
for (let run = 1; run <= 10; run++) {
try {
return await fn();
} catch (err) {
if (!/ECONNRESET/.test(err.message)) {
throw err;
}
const millis = (Math.random() * 200) + (50 * Math.pow(1.5, run));
console.log(`Failed with ECONNRESET, retrying in ${millis}ms...`);
// maximum delay is 10th retry: ~3 seconds
await new Promise(c => setTimeout(c, millis));
}
}
throw new Error('Retried too many times');
}

View File

@@ -0,0 +1,47 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const path = require("path");
const crypto = require("crypto");
const ROOT = path.join(__dirname, '../../../');
function findFiles(location, pattern, result) {
const entries = fs.readdirSync(path.join(ROOT, location));
for (const entry of entries) {
const entryPath = `${location}/${entry}`;
let stat;
try {
stat = fs.statSync(path.join(ROOT, entryPath));
}
catch (err) {
continue;
}
if (stat.isDirectory()) {
findFiles(entryPath, pattern, result);
}
else {
if (stat.isFile() && entry.endsWith(pattern)) {
result.push(path.join(ROOT, entryPath));
}
}
}
}
const shasum = crypto.createHash('sha1');
/**
* Creating a sha hash of all the files that can cause packages to change/redownload.
*/
shasum.update(fs.readFileSync(path.join(ROOT, 'build/.cachesalt')));
shasum.update(fs.readFileSync(path.join(ROOT, '.yarnrc')));
shasum.update(fs.readFileSync(path.join(ROOT, 'remote/.yarnrc')));
// Adding all yarn.lock files into sha sum.
const result = [];
findFiles('', 'yarn.lock', result);
result.forEach(f => shasum.update(fs.readFileSync(f)));
// Add any other command line arguments
for (let i = 2; i < process.argv.length; i++) {
shasum.update(process.argv[i]);
}
process.stdout.write(shasum.digest('hex'));

View File

@@ -0,0 +1,54 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as fs from 'fs';
import * as path from 'path';
import * as crypto from 'crypto';
const ROOT = path.join(__dirname, '../../../');
function findFiles(location: string, pattern: string, result: string[]) {
const entries = fs.readdirSync(path.join(ROOT, location));
for (const entry of entries) {
const entryPath = `${location}/${entry}`;
let stat: fs.Stats;
try {
stat = fs.statSync(path.join(ROOT, entryPath));
} catch (err) {
continue;
}
if (stat.isDirectory()) {
findFiles(entryPath, pattern, result);
} else {
if (stat.isFile() && entry.endsWith(pattern)) {
result.push(path.join(ROOT, entryPath));
}
}
}
}
const shasum = crypto.createHash('sha1');
/**
* Creating a sha hash of all the files that can cause packages to change/redownload.
*/
shasum.update(fs.readFileSync(path.join(ROOT, 'build/.cachesalt')));
shasum.update(fs.readFileSync(path.join(ROOT, '.yarnrc')));
shasum.update(fs.readFileSync(path.join(ROOT, 'remote/.yarnrc')));
// Adding all yarn.lock files into sha sum.
const result: string[] = [];
findFiles('', 'yarn.lock', result);
result.forEach(f => shasum.update(fs.readFileSync(f)));
// Add any other command line arguments
for (let i = 2; i < process.argv.length; i++) {
shasum.update(process.argv[i]);
}
process.stdout.write(shasum.digest('hex'));

View File

@@ -0,0 +1,87 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const url = require("url");
const azure = require("azure-storage");
const mime = require("mime");
const cosmos_1 = require("@azure/cosmos");
const retry_1 = require("./retry");
function log(...args) {
console.log(...[`[${new Date().toISOString()}]`, ...args]);
}
function error(...args) {
console.error(...[`[${new Date().toISOString()}]`, ...args]);
}
if (process.argv.length < 3) {
error('Usage: node sync-mooncake.js <quality>');
process.exit(-1);
}
async function sync(commit, quality) {
log(`Synchronizing Mooncake assets for ${quality}, ${commit}...`);
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const container = client.database('builds').container(quality);
const query = `SELECT TOP 1 * FROM c WHERE c.id = "${commit}"`;
const res = await container.items.query(query, {}).fetchAll();
if (res.resources.length !== 1) {
throw new Error(`No builds found for ${commit}`);
}
const build = res.resources[0];
log(`Found build for ${commit}, with ${build.assets.length} assets`);
const storageAccount = process.env['AZURE_STORAGE_ACCOUNT_2'];
const blobService = azure.createBlobService(storageAccount, process.env['AZURE_STORAGE_ACCESS_KEY_2'])
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
const mooncakeBlobService = azure.createBlobService(storageAccount, process.env['MOONCAKE_STORAGE_ACCESS_KEY'], `${storageAccount}.blob.core.chinacloudapi.cn`)
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
// mooncake is fussy and far away, this is needed!
blobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
mooncakeBlobService.defaultClientRequestTimeoutInMs = 10 * 60 * 1000;
for (const asset of build.assets) {
try {
const blobPath = url.parse(asset.url).path;
if (!blobPath) {
throw new Error(`Failed to parse URL: ${asset.url}`);
}
const blobName = blobPath.replace(/^\/\w+\//, '');
log(`Found ${blobName}`);
if (asset.mooncakeUrl) {
log(` Already in Mooncake ✔️`);
continue;
}
const readStream = blobService.createReadStream(quality, blobName, undefined);
const blobOptions = {
contentSettings: {
contentType: mime.lookup(blobPath),
cacheControl: 'max-age=31536000, public'
}
};
const writeStream = mooncakeBlobService.createWriteStreamToBlockBlob(quality, blobName, blobOptions, undefined);
log(` Uploading to Mooncake...`);
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
log(` Updating build in DB...`);
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
await retry_1.retry(() => container.scripts.storedProcedure('setAssetMooncakeUrl')
.execute('', [commit, asset.platform, asset.type, mooncakeUrl]));
log(` Done ✔️`);
}
catch (err) {
error(err);
}
}
log(`All done ✔️`);
}
function main() {
const commit = process.env['BUILD_SOURCEVERSION'];
if (!commit) {
error('Skipping publish due to missing BUILD_SOURCEVERSION');
return;
}
const quality = process.argv[2];
sync(commit, quality).catch(err => {
error(err);
process.exit(1);
});
}
main();

View File

@@ -9,6 +9,7 @@ import * as url from 'url';
import * as azure from 'azure-storage';
import * as mime from 'mime';
import { CosmosClient } from '@azure/cosmos';
import { retry } from './retry';
function log(...args: any[]) {
console.log(...[`[${new Date().toISOString()}]`, ...args]);
@@ -99,8 +100,8 @@ async function sync(commit: string, quality: string): Promise<void> {
log(` Updating build in DB...`);
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
await container.scripts.storedProcedure('setAssetMooncakeUrl')
.execute('', [commit, asset.platform, asset.type, mooncakeUrl]);
await retry(() => container.scripts.storedProcedure('setAssetMooncakeUrl')
.execute('', [commit, asset.platform, asset.type, mooncakeUrl]));
log(` Done ✔️`);
} catch (err) {

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs:

View File

@@ -1,10 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
<true/>
<key>com.apple.security.cs.disable-library-validation</key>
<true/>
</dict>
</plist>

View File

@@ -1,32 +1,7 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
echo -n $ENABLE_TERRAPIN > .build/terrapin
displayName: Prepare compilation cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
versionSpec: "12.18.3"
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
@@ -34,9 +9,21 @@ steps:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- task: DownloadPipelineArtifact@2
inputs:
artifact: Compilation
path: $(Build.ArtifactStagingDirectory)
displayName: Download compilation output
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
- script: |
set -e
tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz
displayName: Extract compilation output
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
@@ -55,40 +42,45 @@ steps:
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
displayName: Merge distro
- script: |
npx https://aka.ms/enablesecurefeed standAlone
displayName: Switch to Terrapin packages
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
- script: |
echo -n $(VSCODE_ARCH) > .build/arch
mkdir -p .build
node build/azure-pipelines/common/computeNodeModulesCacheKey.js $VSCODE_ARCH $ENABLE_TERRAPIN > .build/yarnlockhash
displayName: Prepare yarn cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
- task: Cache@2
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
- script: |
set -e
npm install -g node-gyp@7.1.0
tar -xzf .build/node_modules_cache/cache.tgz
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules cache
- script: |
set -e
npm install -g node-gyp@latest
node-gyp --version
displayName: Update node-gyp
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: |
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- script: |
set -e
export npm_config_arch=$(VSCODE_ARCH)
export npm_config_node_gyp=$(which node-gyp)
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
export CHILD_CONCURRENCY="1"
for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break
@@ -98,25 +90,19 @@ steps:
fi
echo "Yarn failed $i, trying again..."
done
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: |
set -e
export npm_config_arch=$(VSCODE_ARCH)
export npm_config_node_gyp=$(which node-gyp)
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
ls /Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive
- script: |
set -e
@@ -126,10 +112,21 @@ steps:
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
ls /Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/
yarn electron-rebuild
# remove once https://github.com/prebuild/prebuild-install/pull/140 is merged and found in keytar
cd ./node_modules/keytar
node-gyp rebuild
displayName: Rebuild native modules for ARM64
condition: eq(variables['VSCODE_ARCH'], 'arm64')
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'arm64'))
- download: current
artifact: vscode-darwin-x64
displayName: Download x64 artifact
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
- download: current
artifact: vscode-darwin-arm64
displayName: Download arm64 artifact
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
- script: |
set -e
@@ -141,6 +138,7 @@ steps:
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-darwin-$(VSCODE_ARCH)-min-ci
displayName: Build
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
- script: |
set -e
@@ -148,14 +146,23 @@ steps:
yarn gulp vscode-reh-darwin-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-web-darwin-min-ci
displayName: Build reh
displayName: Build Server
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
- script: |
set -e
yarn electron $(VSCODE_ARCH)
displayName: Download Electron
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
unzip $(Pipeline.Workspace)/vscode-darwin-x64/VSCode-darwin-x64.zip -d $(agent.builddirectory)/vscode-x64
unzip $(Pipeline.Workspace)/vscode-darwin-arm64/VSCode-darwin-arm64.zip -d $(agent.builddirectory)/vscode-arm64
DEBUG=* node build/darwin/create-universal-app.js
displayName: Create Universal App
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install"
displayName: Download Electron and Playwright
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
@@ -167,17 +174,26 @@ steps:
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain
VSCODE_ARCH="$(VSCODE_ARCH)" DEBUG=electron-osx-sign* node build/darwin/sign.js
displayName: Set Hardened Entitlements
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -e
./scripts/test.sh --build --tfs "Unit Tests"
displayName: Run unit tests (Electron)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn test-browser --build --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
displayName: Run unit tests (Browser)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn --cwd test/integration/browser compile
displayName: Compile integration tests
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
@@ -191,13 +207,15 @@ steps:
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
timeoutInMinutes: 10
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
./resources/server/test/test-web-integration.sh --browser webkit
displayName: Run integration tests (Browser)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
@@ -208,22 +226,39 @@ steps:
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
./resources/server/test/test-remote-integration.sh
displayName: Run remote integration tests (Electron)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn --cwd test/smoke compile
displayName: Compile smoke tests
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
APP_NAME="`ls $APP_ROOT | head -n 1`"
yarn smoketest --build "$APP_ROOT/$APP_NAME"
continueOnError: true
yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME"
timeoutInMinutes: 5
displayName: Run smoke tests (Electron)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
APP_NAME="`ls $APP_ROOT | head -n 1`"
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
yarn smoketest-no-compile --build "$APP_ROOT/$APP_NAME" --remote
timeoutInMinutes: 5
displayName: Run smoke tests (Remote)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
yarn smoketest --web --headless
continueOnError: true
yarn smoketest-no-compile --web --headless
timeoutInMinutes: 5
displayName: Run smoke tests (Browser)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
@@ -240,12 +275,13 @@ steps:
inputs:
testResultsFiles: "*-results.xml"
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
condition: succeededOrFailed()
condition: and(succeededOrFailed(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
pushd $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH) && zip -r -X -y $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip * && popd
displayName: Archive build
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
@@ -270,10 +306,12 @@ steps:
]
SessionTimeout: 60
displayName: Codesign
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
zip -d $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip "*.pkg"
displayName: Clean Archive
displayName: Clean
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
@@ -281,6 +319,7 @@ steps:
BUNDLE_IDENTIFIER=$(node -p "require(\"$APP_ROOT/$APP_NAME/Contents/Resources/app/product.json\").darwinBundleIdentifier")
echo "##vso[task.setvariable variable=BundleIdentifier]$BUNDLE_IDENTIFIER"
displayName: Export bundle identifier
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
@@ -305,6 +344,7 @@ steps:
]
SessionTimeout: 60
displayName: Notarization
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -e
@@ -312,7 +352,7 @@ steps:
APP_NAME="`ls $APP_ROOT | head -n 1`"
"$APP_ROOT/$APP_NAME/Contents/Resources/app/bin/code" --export-default-configuration=.build
displayName: Verify start after signing (export configuration)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'arm64'), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -e
@@ -323,13 +363,29 @@ steps:
VSCODE_ARCH="$(VSCODE_ARCH)" \
./build/azure-pipelines/darwin/publish.sh
displayName: Publish
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
artifact: vscode-darwin-$(VSCODE_ARCH)
displayName: Publish archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-darwin.zip
artifact: vscode-server-darwin-$(VSCODE_ARCH)
displayName: Publish server archive
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-darwin-web.zip
artifact: vscode-server-darwin-$(VSCODE_ARCH)-web
displayName: Publish web server archive
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
VSCODE_ARCH="$(VSCODE_ARCH)" \
yarn gulp upload-vscode-configuration
displayName: Upload configuration (for Bing settings search)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
continueOnError: true
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0

View File

@@ -5,6 +5,7 @@ set -e
case $VSCODE_ARCH in
x64) ASSET_ID="darwin" ;;
arm64) ASSET_ID="darwin-arm64" ;;
universal) ASSET_ID="darwin-universal" ;;
esac
# publish the build

View File

@@ -5,26 +5,15 @@ steps:
certSecureFile: 'osx_signing_key.p12'
condition: eq(variables['signed'], true)
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
displayName: Restore Cache - Compiled Files
- task: DownloadPipelineArtifact@2
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'BuildCache'
platformIndependent: true
alias: 'Compilation'
artifact: Compilation
displayName: Download compilation output
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
tar -xzf $(Pipeline.Workspace)/compilation.tar.gz
displayName: Extract compilation output
- task: NodeTool@0
inputs:
@@ -61,12 +50,23 @@ steps:
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
- script: |
mkdir -p .build
node build/azure-pipelines/common/sql-computeNodeModulesCacheKey.js > .build/yarnlockhash
displayName: Prepare yarn cache key
- task: Cache@2
displayName: Restore Cache - Node Modules
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
- script: |
set -e
tar -xzf .build/node_modules_cache/cache.tgz
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules archive
- script: |
set -e
@@ -74,21 +74,21 @@ steps:
displayName: Install dependencies
env:
GITHUB_TOKEN: $(github-distro-mixin-password)
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
displayName: Save Cache - Node Modules
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
- script: |
set -e
@@ -122,11 +122,16 @@ steps:
displayName: Run integration tests (Electron)
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- script: |
set -e
yarn gulp compile-extensions
displayName: Compile Extensions
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/azuredatastudio-darwin-x64
APP_NAME="`ls $APP_ROOT | head -n 1`"
yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots" --log "$(build.artifactstagingdirectory)/logs/darwin/smoke.log"
yarn smoketest --build "$APP_ROOT/$APP_NAME" --screenshots "$(build.artifactstagingdirectory)/smokeshots" --log "$(build.artifactstagingdirectory)/logs/darwin/smoke.log" --extensionsDir "$(build.sourcesdirectory)/extensions"
displayName: Run smoke tests (Electron)
continueOnError: true
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))

View File

@@ -11,7 +11,7 @@ pr:
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
versionSpec: "12.18.3"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'

View File

@@ -11,7 +11,7 @@ pr:
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
versionSpec: "12.18.3"
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"

View File

@@ -6,12 +6,12 @@ RUN apt-get update && apt-get upgrade -y
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 \
libkrb5-dev git apt-transport-https ca-certificates curl gnupg-agent software-properties-common \
libnss3 libasound2 make gcc libx11-dev fakeroot rpm libgconf-2-4 libunwind8 g++-4.8
libnss3 libasound2 make gcc libx11-dev fakeroot rpm libgconf-2-4 libunwind8 g++-4.9
RUN rm /usr/bin/gcc
RUN rm /usr/bin/g++
RUN ln -s /usr/bin/gcc-4.8 /usr/bin/gcc
RUN ln -s /usr/bin/g++-4.8 /usr/bin/g++
RUN ln -s /usr/bin/gcc-4.9 /usr/bin/gcc
RUN ln -s /usr/bin/g++-4.9 /usr/bin/g++
#docker
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -

View File

@@ -22,7 +22,7 @@ SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX-web"
SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX-web.tar.gz"
SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
rm -rf $ROOT/vscode-server-*.tar.*
rm -rf $ROOT/vscode-server-*-web.tar.*
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX-web" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"

View File

@@ -10,16 +10,21 @@ steps:
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3
inputs:
versionSpec: "1.x"
- script: |
mkdir -p .build
node build/azure-pipelines/common/computeNodeModulesCacheKey.js > .build/yarnlockhash
displayName: Prepare yarn cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
displayName: Restore Cache - Node Modules # {{SQL CARBON EDIT}}
inputs:
keyfile: "build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock"
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules"
vstsFeed: "npm-cache" # {{SQL CARBON EDIT}} update build cache
@@ -31,7 +36,7 @@ steps:
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
displayName: Save Cache - Node Modules # {{SQL CARBON EDIT}}
inputs:
keyfile: "build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock"
keyfile: ".yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules"
vstsFeed: "npm-cache" # {{SQL CARBON EDIT}} update build cache
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))

View File

@@ -1,28 +1,7 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
echo -n $ENABLE_TERRAPIN > .build/terrapin
displayName: Prepare compilation cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
@@ -34,6 +13,17 @@ steps:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- task: DownloadPipelineArtifact@2
inputs:
artifact: Compilation
path: $(Build.ArtifactStagingDirectory)
displayName: Download compilation output
- script: |
set -e
tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz
displayName: Extract compilation output
- task: Docker@1
displayName: "Pull image"
inputs:
@@ -45,7 +35,6 @@ steps:
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
@@ -58,30 +47,35 @@ steps:
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
displayName: Merge distro
- script: |
npx https://aka.ms/enablesecurefeed standAlone
displayName: Switch to Terrapin packages
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
- script: |
echo -n "alpine" > .build/arch
mkdir -p .build
node build/azure-pipelines/common/computeNodeModulesCacheKey.js "alpine" $ENABLE_TERRAPIN > .build/yarnlockhash
displayName: Prepare yarn cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
- task: Cache@2
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
- script: |
set -e
tar -xzf .build/node_modules_cache/cache.tgz
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules cache
- script: |
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- script: |
set -e
export CHILD_CONCURRENCY="1"
for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break
if [ $i -eq 3 ]; then
@@ -90,21 +84,19 @@ steps:
fi
echo "Yarn failed $i, trying again..."
done
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive
- script: |
set -e
@@ -113,7 +105,7 @@ steps:
- script: |
set -e
docker run -e VSCODE_QUALITY -e CHILD_CONCURRENCY=1 -v $(pwd):/root/vscode -v ~/.netrc:/root/.netrc vscodehub.azurecr.io/vscode-linux-build-agent:alpine /root/vscode/build/azure-pipelines/linux/alpine/install-dependencies.sh
docker run -e VSCODE_QUALITY -v $(pwd):/root/vscode -v ~/.netrc:/root/.netrc vscodehub.azurecr.io/vscode-linux-build-agent:alpine /root/vscode/build/azure-pipelines/linux/alpine/install-dependencies.sh
displayName: Prebuild
- script: |
@@ -129,6 +121,17 @@ steps:
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
./build/azure-pipelines/linux/alpine/publish.sh
displayName: Publish
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-linux-alpine.tar.gz
artifact: vscode-server-linux-alpine
displayName: Publish server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-linux-alpine-web.tar.gz
artifact: vscode-server-linux-alpine-web
displayName: Publish web server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: "Component Detection"

View File

@@ -1,28 +1,7 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
echo -n $ENABLE_TERRAPIN > .build/terrapin
displayName: Prepare compilation cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
@@ -34,6 +13,17 @@ steps:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- task: DownloadPipelineArtifact@2
inputs:
artifact: Compilation
path: $(Build.ArtifactStagingDirectory)
displayName: Download compilation output
- script: |
set -e
tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz
displayName: Extract compilation output
- script: |
set -e
cat << EOF > ~/.netrc
@@ -48,31 +38,56 @@ steps:
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
displayName: Merge distro
- script: |
npx https://aka.ms/enablesecurefeed standAlone
displayName: Switch to Terrapin packages
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
- script: |
echo -n $(VSCODE_ARCH) > .build/arch
mkdir -p .build
node build/azure-pipelines/common/computeNodeModulesCacheKey.js $VSCODE_ARCH $ENABLE_TERRAPIN > .build/yarnlockhash
displayName: Prepare yarn cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
- task: Cache@2
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
- script: |
set -e
tar -xzf .build/node_modules_cache/cache.tgz
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules cache
- script: |
set -e
npm install -g node-gyp@latest
node-gyp --version
displayName: Update node-gyp
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['VSCODE_ARCH'], 'x64'))
- script: |
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- script: |
set -e
export npm_config_arch=$(NPM_ARCH)
export CHILD_CONCURRENCY="1"
export npm_config_build_from_source=true
if [ -z "$CC" ] || [ -z "$CXX" ]; then
export CC=$(which gcc-5)
export CXX=$(which g++-5)
fi
if [ "$VSCODE_ARCH" == "x64" ]; then
export VSCODE_REMOTE_CC=$(which gcc-4.8)
export VSCODE_REMOTE_CXX=$(which g++-4.8)
export VSCODE_REMOTE_NODE_GYP=$(which node-gyp)
fi
for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break
if [ $i -eq 3 ]; then
@@ -81,21 +96,23 @@ steps:
fi
echo "Yarn failed $i, trying again..."
done
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
# remove once https://github.com/prebuild/prebuild-install/pull/140 is merged and found in keytar
cd ./node_modules/keytar
npx node-gyp rebuild
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
displayName: Install dependencies
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive
- script: |
set -e
@@ -106,28 +123,41 @@ steps:
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-linux-$(VSCODE_ARCH)-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-linux-$(VSCODE_ARCH)-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-web-linux-$(VSCODE_ARCH)-min-ci
displayName: Build
- script: |
set -e
service xvfb start
displayName: Start xvfb
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-linux-$(VSCODE_ARCH)-min-ci
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-reh-web-linux-$(VSCODE_ARCH)-min-ci
displayName: Build Server
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install"
displayName: Download Electron and Playwright
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
displayName: Run unit tests (Electron)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
DISPLAY=:10 yarn test-browser --build --browser chromium --tfs "Browser Unit Tests"
displayName: Run unit tests (Browser)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn --cwd test/integration/browser compile
displayName: Compile integration tests
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
@@ -137,10 +167,12 @@ steps:
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_APP_NAME="$APP_NAME" \
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./scripts/test-integration.sh --build --tfs "Integration Tests"
displayName: Run integration tests (Electron)
timeoutInMinutes: 10
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
@@ -148,16 +180,19 @@ steps:
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
displayName: Run integration tests (Browser)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-linux-$(VSCODE_ARCH)
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_APP_NAME="$APP_NAME" \
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./resources/server/test/test-remote-integration.sh
displayName: Run remote integration tests (Electron)
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishPipelineArtifact@0
@@ -180,17 +215,20 @@ steps:
yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-deb"
yarn gulp "vscode-linux-$(VSCODE_ARCH)-build-rpm"
displayName: Build deb, rpm packages
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -e
yarn gulp "vscode-linux-$(VSCODE_ARCH)-prepare-snap"
displayName: Prepare snap package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
# needed for code signing
- task: UseDotNet@2
displayName: "Install .NET Core SDK 2.x"
inputs:
version: 2.x
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
@@ -210,6 +248,7 @@ steps:
]
SessionTimeout: 120
displayName: Codesign rpm
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -e
@@ -219,12 +258,34 @@ steps:
VSCODE_ARCH="$(VSCODE_ARCH)" \
./build/azure-pipelines/linux/publish.sh
displayName: Publish
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(DEB_PATH)
artifact: vscode-linux-deb-$(VSCODE_ARCH)
displayName: Publish deb package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(RPM_PATH)
artifact: vscode-linux-rpm-$(VSCODE_ARCH)
displayName: Publish rpm package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-linux-$(VSCODE_ARCH).tar.gz
artifact: vscode-server-linux-$(VSCODE_ARCH)
displayName: Publish server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-linux-$(VSCODE_ARCH)-web.tar.gz
artifact: vscode-server-linux-$(VSCODE_ARCH)-web
displayName: Publish web server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: PublishPipelineArtifact@0
displayName: "Publish Pipeline Artifact"
inputs:
artifactName: "snap-$(VSCODE_ARCH)"
targetPath: .build/linux/snap-tarball
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: "Component Detection"

View File

@@ -26,6 +26,17 @@ rm -rf $ROOT/vscode-server-*.tar.*
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
# Publish Remote Extension Host (Web)
LEGACY_SERVER_BUILD_NAME="vscode-reh-web-$PLATFORM_LINUX"
SERVER_BUILD_NAME="vscode-server-$PLATFORM_LINUX-web"
SERVER_TARBALL_FILENAME="vscode-server-$PLATFORM_LINUX-web.tar.gz"
SERVER_TARBALL_PATH="$ROOT/$SERVER_TARBALL_FILENAME"
rm -rf $ROOT/vscode-server-*-web.tar.*
(cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
node build/azure-pipelines/common/createAsset.js "server-$PLATFORM_LINUX-web" archive-unsigned "$SERVER_TARBALL_FILENAME" "$SERVER_TARBALL_PATH"
# Publish DEB
case $VSCODE_ARCH in
x64) DEB_ARCH="amd64" ;;
@@ -58,3 +69,7 @@ mkdir -p $REPO/.build/linux/snap-tarball
SNAP_TARBALL_PATH="$REPO/.build/linux/snap-tarball/snap-$VSCODE_ARCH.tar.gz"
rm -rf $SNAP_TARBALL_PATH
(cd .build/linux && tar -czf $SNAP_TARBALL_PATH snap)
# Export DEB_PATH, RPM_PATH
echo "##vso[task.setvariable variable=DEB_PATH]$DEB_PATH"
echo "##vso[task.setvariable variable=RPM_PATH]$RPM_PATH"

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
@@ -54,3 +54,11 @@ steps:
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
node build/azure-pipelines/common/createAsset.js "linux-snap-$(VSCODE_ARCH)" package "$SNAP_FILENAME" "$SNAP_PATH"
# Export SNAP_PATH
echo "##vso[task.setvariable variable=SNAP_PATH]$SNAP_PATH"
- publish: $(SNAP_PATH)
artifact: vscode-linux-snap-$(VSCODE_ARCH)
displayName: Publish snap package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))

View File

@@ -2,27 +2,6 @@ parameters:
extensionsToUnitTest: []
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
displayName: Restore Cache - Compiled Files
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'BuildCache'
platformIndependent: true
alias: 'Compilation'
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "12.13.0"
@@ -38,6 +17,16 @@ steps:
KeyVaultName: ado-secrets
SecretsFilter: 'github-distro-mixin-password'
- task: DownloadPipelineArtifact@2
inputs:
artifact: Compilation
displayName: Download compilation output
- script: |
set -e
tar -xzf $(Pipeline.Workspace)/compilation.tar.gz
displayName: Extract compilation output
- script: |
set -e
cat << EOF > ~/.netrc
@@ -57,12 +46,23 @@ steps:
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
- script: |
mkdir -p .build
node build/azure-pipelines/common/sql-computeNodeModulesCacheKey.js > .build/yarnlockhash
displayName: Prepare yarn cache key
- task: Cache@2
displayName: Restore Cache - Node Modules
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
- script: |
set -e
tar -xzf .build/node_modules_cache/cache.tgz
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules archive
- script: |
set -e
@@ -70,21 +70,21 @@ steps:
displayName: Install dependencies
env:
GITHUB_TOKEN: $(github-distro-mixin-password)
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
displayName: Save Cache - Node Modules
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
- script: |
set -e
@@ -131,16 +131,19 @@ steps:
displayName: Run integration tests (Electron)
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- ${{each extension in parameters.extensionsToUnitTest}}:
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
export INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
export NO_CLEANUP=1
DISPLAY=:10 node ./scripts/test-extensions-unit.js ${{ extension }}
displayName: 'Run ${{ extension }} Stable Extension Unit Tests'
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- script: |
# Figure out the full absolute path of the product we just built
# including the remote server and configure the unit tests
# to run with these builds instead of running out of sources.
set -e
APP_ROOT=$(agent.builddirectory)/azuredatastudio-linux-x64
APP_NAME=$(node -p "require(\"$APP_ROOT/resources/app/product.json\").applicationName")
INTEGRATION_TEST_ELECTRON_PATH="$APP_ROOT/$APP_NAME" \
NO_CLEANUP=1 \
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/azuredatastudio-reh-linux-x64" \
DISPLAY=:10 ./scripts/test-extensions-unit.sh --build --tfs "Extension Unit Tests"
displayName: 'Run Stable Extension Unit Tests'
condition: and(succeeded(), eq(variables['RUN_TESTS'], 'true'))
- script: |
set -e
@@ -162,7 +165,6 @@ steps:
# Only archive directories we want for debugging purposes
tar -czvf $(Build.ArtifactStagingDirectory)/logs/linux-x64/$folder.tar.gz $folder/User $folder/logs
done
displayName: Archive Logs
continueOnError: true
condition: succeededOrFailed()
@@ -237,6 +239,13 @@ steps:
continueOnError: true
condition: and(succeededOrFailed(), eq(variables['RUN_TESTS'], 'true'))
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: crash reports'
inputs:
PathtoPublish: '$(Build.SourcesDirectory)/.build/crashes'
ArtifactName: crashes
condition: and(succeededOrFailed(), eq(variables['RUN_TESTS'], 'true'))
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: drop'
condition: succeededOrFailed()

View File

@@ -1,4 +1,3 @@
trigger: none
pr: none
schedules:
@@ -8,11 +7,108 @@ schedules:
include:
- master
parameters:
- name: VSCODE_QUALITY
displayName: Quality
type: string
default: insider
values:
- exploration
- insider
- stable
- name: ENABLE_TERRAPIN
displayName: "Enable Terrapin"
type: boolean
default: true
- name: VSCODE_BUILD_WIN32
displayName: "🎯 Windows x64"
type: boolean
default: true
- name: VSCODE_BUILD_WIN32_32BIT
displayName: "🎯 Windows ia32"
type: boolean
default: true
- name: VSCODE_BUILD_WIN32_ARM64
displayName: "🎯 Windows arm64"
type: boolean
default: true
- name: VSCODE_BUILD_LINUX
displayName: "🎯 Linux x64"
type: boolean
default: true
- name: VSCODE_BUILD_LINUX_ARM64
displayName: "🎯 Linux arm64"
type: boolean
default: true
- name: VSCODE_BUILD_LINUX_ARMHF
displayName: "🎯 Linux armhf"
type: boolean
default: true
- name: VSCODE_BUILD_LINUX_ALPINE
displayName: "🎯 Alpine Linux"
type: boolean
default: true
- name: VSCODE_BUILD_MACOS
displayName: "🎯 macOS x64"
type: boolean
default: true
- name: VSCODE_BUILD_MACOS_ARM64
displayName: "🎯 macOS arm64"
type: boolean
default: true
- name: VSCODE_BUILD_MACOS_UNIVERSAL
displayName: "🎯 macOS universal"
type: boolean
default: true
- name: VSCODE_BUILD_WEB
displayName: "🎯 Web"
type: boolean
default: true
- name: VSCODE_PUBLISH
displayName: "Publish to builds.code.visualstudio.com"
type: boolean
default: true
- name: VSCODE_RELEASE
displayName: "Release build if successful"
type: boolean
default: false
- name: VSCODE_COMPILE_ONLY
displayName: "Run Compile stage exclusively"
type: boolean
default: false
- name: VSCODE_STEP_ON_IT
displayName: "Skip tests"
type: boolean
default: false
variables:
- name: ENABLE_TERRAPIN
value: ${{ eq(parameters.ENABLE_TERRAPIN, true) }}
- name: VSCODE_QUALITY
value: ${{ parameters.VSCODE_QUALITY }}
- name: VSCODE_BUILD_STAGE_WINDOWS
value: ${{ or(eq(parameters.VSCODE_BUILD_WIN32, true), eq(parameters.VSCODE_BUILD_WIN32_32BIT, true), eq(parameters.VSCODE_BUILD_WIN32_ARM64, true)) }}
- name: VSCODE_BUILD_STAGE_LINUX
value: ${{ or(eq(parameters.VSCODE_BUILD_LINUX, true), eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true), eq(parameters.VSCODE_BUILD_LINUX_ARM64, true), eq(parameters.VSCODE_BUILD_LINUX_ALPINE, true), eq(parameters.VSCODE_BUILD_WEB, true)) }}
- name: VSCODE_BUILD_STAGE_MACOS
value: ${{ or(eq(parameters.VSCODE_BUILD_MACOS, true), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true)) }}
- name: VSCODE_CIBUILD
value: ${{ in(variables['Build.Reason'], 'IndividualCI', 'BatchedCI') }}
- name: VSCODE_PUBLISH
value: ${{ and(eq(parameters.VSCODE_PUBLISH, true), eq(variables.VSCODE_CIBUILD, false)) }}
- name: VSCODE_SCHEDULEDBUILD
value: ${{ eq(variables['Build.Reason'], 'Schedule') }}
- name: VSCODE_STEP_ON_IT
value: ${{ eq(parameters.VSCODE_STEP_ON_IT, true) }}
- name: VSCODE_BUILD_MACOS_UNIVERSAL
value: ${{ and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_BUILD_MACOS, true), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true), eq(parameters.VSCODE_BUILD_MACOS_UNIVERSAL, true)) }}
resources:
containers:
- container: vscode-x64
image: vscodehub.azurecr.io/vscode-linux-build-agent:x64
image: vscodehub.azurecr.io/vscode-linux-build-agent:bionic-x64
endpoint: VSCodeHub
options: --user 0:0
- container: vscode-arm64
image: vscodehub.azurecr.io/vscode-linux-build-agent:stretch-arm64
endpoint: VSCodeHub
@@ -26,177 +122,189 @@ stages:
- stage: Compile
jobs:
- job: Compile
pool:
vmImage: "Ubuntu-16.04"
container: vscode-x64
pool: compile
variables:
VSCODE_ARCH: x64
steps:
- template: product-compile.yml
- stage: Windows
dependsOn:
- Compile
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
pool:
vmImage: VS2017-Win2016
jobs:
- job: Windows
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32'], 'true'))
timeoutInMinutes: 90
variables:
VSCODE_ARCH: x64
steps:
- template: win32/product-build-win32.yml
- ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_WINDOWS'], true)) }}:
- stage: Windows
dependsOn:
- Compile
pool:
vmImage: VS2017-Win2016
jobs:
- job: Windows32
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32_32BIT'], 'true'))
timeoutInMinutes: 90
variables:
VSCODE_ARCH: ia32
steps:
- template: win32/product-build-win32.yml
- ${{ if eq(parameters.VSCODE_BUILD_WIN32, true) }}:
- job: Windows
timeoutInMinutes: 90
variables:
VSCODE_ARCH: x64
steps:
- template: win32/product-build-win32.yml
- job: WindowsARM64
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WIN32_ARM64'], 'true'))
timeoutInMinutes: 90
variables:
VSCODE_ARCH: arm64
steps:
- template: win32/product-build-win32.yml
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_WIN32_32BIT, true)) }}:
- job: Windows32
timeoutInMinutes: 90
variables:
VSCODE_ARCH: ia32
steps:
- template: win32/product-build-win32.yml
- stage: Linux
dependsOn:
- Compile
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
pool:
vmImage: "Ubuntu-16.04"
jobs:
- job: Linux
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
container: vscode-x64
variables:
VSCODE_ARCH: x64
NPM_ARCH: x64
steps:
- template: linux/product-build-linux.yml
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_WIN32_ARM64, true)) }}:
- job: WindowsARM64
timeoutInMinutes: 90
variables:
VSCODE_ARCH: arm64
steps:
- template: win32/product-build-win32.yml
- job: LinuxSnap
dependsOn:
- ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_LINUX'], true)) }}:
- stage: Linux
dependsOn:
- Compile
pool:
vmImage: "Ubuntu-18.04"
jobs:
- ${{ if eq(parameters.VSCODE_BUILD_LINUX, true) }}:
- job: Linux
container: vscode-x64
variables:
VSCODE_ARCH: x64
NPM_ARCH: x64
steps:
- template: linux/product-build-linux.yml
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX, true)) }}:
- job: LinuxSnap
dependsOn:
- Linux
container: snapcraft
variables:
VSCODE_ARCH: x64
steps:
- template: linux/snap-build-linux.yml
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true)) }}:
- job: LinuxArmhf
container: vscode-armhf
variables:
VSCODE_ARCH: armhf
NPM_ARCH: armv7l
steps:
- template: linux/product-build-linux.yml
# TODO@joaomoreno: We don't ship ARM snaps for now
- ${{ if and(false, eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARMHF, true)) }}:
- job: LinuxSnapArmhf
dependsOn:
- LinuxArmhf
container: snapcraft
variables:
VSCODE_ARCH: armhf
steps:
- template: linux/snap-build-linux.yml
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARM64, true)) }}:
- job: LinuxArm64
container: vscode-arm64
variables:
VSCODE_ARCH: arm64
NPM_ARCH: arm64
steps:
- template: linux/product-build-linux.yml
# TODO@joaomoreno: We don't ship ARM snaps for now
- ${{ if and(false, eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ARM64, true)) }}:
- job: LinuxSnapArm64
dependsOn:
- LinuxArm64
container: snapcraft
variables:
VSCODE_ARCH: arm64
steps:
- template: linux/snap-build-linux.yml
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_LINUX_ALPINE, true)) }}:
- job: LinuxAlpine
steps:
- template: linux/product-build-alpine.yml
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_WEB, true)) }}:
- job: LinuxWeb
variables:
VSCODE_ARCH: x64
steps:
- template: web/product-build-web.yml
- ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), eq(variables['VSCODE_BUILD_STAGE_MACOS'], true)) }}:
- stage: macOS
dependsOn:
- Compile
pool:
vmImage: macOS-latest
jobs:
- ${{ if eq(parameters.VSCODE_BUILD_MACOS, true) }}:
- job: macOS
timeoutInMinutes: 90
variables:
VSCODE_ARCH: x64
steps:
- template: darwin/product-build-darwin.yml
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true)) }}:
- job: macOSARM64
timeoutInMinutes: 90
variables:
VSCODE_ARCH: arm64
steps:
- template: darwin/product-build-darwin.yml
- ${{ if eq(variables['VSCODE_BUILD_MACOS_UNIVERSAL'], true) }}:
- job: macOSUniversal
dependsOn:
- macOS
- macOSARM64
timeoutInMinutes: 90
variables:
VSCODE_ARCH: universal
steps:
- template: darwin/product-build-darwin.yml
- ${{ if and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_COMPILE_ONLY, false)) }}:
- stage: Mooncake
dependsOn:
- ${{ if eq(variables['VSCODE_BUILD_STAGE_WINDOWS'], true) }}:
- Windows
- ${{ if eq(variables['VSCODE_BUILD_STAGE_LINUX'], true) }}:
- Linux
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX'], 'true'))
container: snapcraft
variables:
VSCODE_ARCH: x64
steps:
- template: linux/snap-build-linux.yml
- ${{ if eq(variables['VSCODE_BUILD_STAGE_MACOS'], true) }}:
- macOS
condition: succeededOrFailed()
pool:
vmImage: "Ubuntu-18.04"
jobs:
- job: SyncMooncake
displayName: Sync Mooncake
steps:
- template: sync-mooncake.yml
- job: LinuxArmhf
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
container: vscode-armhf
variables:
VSCODE_ARCH: armhf
NPM_ARCH: armv7l
steps:
- template: linux/product-build-linux.yml
- job: LinuxSnapArmhf
dependsOn:
- LinuxArmhf
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARMHF'], 'true'))
container: snapcraft
variables:
VSCODE_ARCH: armhf
steps:
- template: linux/snap-build-linux.yml
- job: LinuxArm64
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
container: vscode-arm64
variables:
VSCODE_ARCH: arm64
NPM_ARCH: arm64
steps:
- template: linux/product-build-linux.yml
- job: LinuxSnapArm64
dependsOn:
- LinuxArm64
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ARM64'], 'true'))
container: snapcraft
variables:
VSCODE_ARCH: arm64
steps:
- template: linux/snap-build-linux.yml
- job: LinuxAlpine
condition: and(succeeded(), eq(variables['VSCODE_BUILD_LINUX_ALPINE'], 'true'))
steps:
- template: linux/product-build-alpine.yml
- job: LinuxWeb
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WEB'], 'true'))
variables:
VSCODE_ARCH: x64
steps:
- template: web/product-build-web.yml
- stage: macOS
dependsOn:
- Compile
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
pool:
vmImage: macOS-latest
jobs:
- job: macOS
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'))
timeoutInMinutes: 90
variables:
VSCODE_ARCH: x64
steps:
- template: darwin/product-build-darwin.yml
- stage: macOSARM64
dependsOn:
- Compile
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
pool:
vmImage: macOS-latest
jobs:
- job: macOSARM64
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS_ARM64'], 'true'))
timeoutInMinutes: 90
variables:
VSCODE_ARCH: arm64
steps:
- template: darwin/product-build-darwin.yml
- stage: Mooncake
dependsOn:
- Windows
- Linux
- macOS
- macOSARM64
condition: and(succeededOrFailed(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'))
pool:
vmImage: "Ubuntu-16.04"
jobs:
- job: SyncMooncake
displayName: Sync Mooncake
steps:
- template: sync-mooncake.yml
- stage: Publish
dependsOn:
- Windows
- Linux
- macOS
- macOSARM64
condition: and(succeeded(), eq(variables['VSCODE_COMPILE_ONLY'], 'false'), or(eq(variables['VSCODE_RELEASE'], 'true'), and(or(eq(variables['VSCODE_QUALITY'], 'insider'), eq(variables['VSCODE_QUALITY'], 'exploration')), eq(variables['Build.Reason'], 'Schedule'))))
pool:
vmImage: "Ubuntu-16.04"
jobs:
- job: BuildService
displayName: Build Service
steps:
- template: release.yml
- ${{ if and(eq(parameters.VSCODE_COMPILE_ONLY, false), or(eq(parameters.VSCODE_RELEASE, true), and(in(parameters.VSCODE_QUALITY, 'insider', 'exploration'), eq(variables['VSCODE_SCHEDULEDBUILD'], true)))) }}:
- stage: Release
dependsOn:
- ${{ if eq(variables['VSCODE_BUILD_STAGE_WINDOWS'], true) }}:
- Windows
- ${{ if eq(variables['VSCODE_BUILD_STAGE_LINUX'], true) }}:
- Linux
- ${{ if eq(variables['VSCODE_BUILD_STAGE_MACOS'], true) }}:
- macOS
pool:
vmImage: "Ubuntu-18.04"
jobs:
- job: ReleaseBuild
displayName: Release Build
steps:
- template: release.yml

View File

@@ -1,36 +1,17 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
echo -n $ENABLE_TERRAPIN > .build/terrapin
displayName: Prepare compilation cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
dryRun: true
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
versionSpec: "1.x"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
@@ -43,24 +24,22 @@ steps:
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
displayName: Merge distro
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
npx https://aka.ms/enablesecurefeed standAlone
displayName: Switch to Terrapin packages
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
- script: |
mkdir -p .build
echo -n $(VSCODE_ARCH) > .build/arch
echo -n $ENABLE_TERRAPIN > .build/terrapin
displayName: Prepare yarn cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
@@ -68,11 +47,22 @@ steps:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
export CHILD_CONCURRENCY="1"
export npm_config_arch=$(NPM_ARCH)
if [ -z "$CC" ] || [ -z "$CXX" ]; then
export CC=$(which gcc-5)
export CXX=$(which g++-5)
fi
if [ "$VSCODE_ARCH" == "x64" ]; then
export VSCODE_REMOTE_CC=$(which gcc-4.8)
export VSCODE_REMOTE_CXX=$(which g++-4.8)
export VSCODE_REMOTE_NODE_GYP=$(which node-gyp)
fi
for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break
if [ $i -eq 3 ]; then
@@ -81,50 +71,24 @@ steps:
fi
echo "Yarn failed $i, trying again..."
done
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), ne(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['CacheRestored'], 'true'))
# Mixin must run before optimize, because the CSS loader will
# inline small SVGs
# Mixin must run before optimize, because the CSS loader will inline small SVGs
- script: |
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
yarn gulp hygiene
yarn monaco-compile-check
yarn valid-layers-check
displayName: Run hygiene, monaco compile & valid layers checks
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -
./build/azure-pipelines/common/extract-telemetry.sh
displayName: Extract Telemetry
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
./build/azure-pipelines/common/publish-webview.sh
displayName: Publish Webview
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
@@ -134,14 +98,13 @@ steps:
yarn gulp vscode-reh-linux-x64-min
yarn gulp vscode-reh-web-linux-x64-min
displayName: Compile
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
node build/azure-pipelines/upload-sourcemaps
displayName: Upload sourcemaps
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -e
@@ -149,13 +112,16 @@ steps:
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
node build/azure-pipelines/common/createBuild.js $VERSION
displayName: Create build
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
# we gotta tarball everything in order to preserve file permissions
- script: |
set -e
tar -czf $(Build.ArtifactStagingDirectory)/compilation.tar.gz .build out-*
displayName: Compress compilation artifact
- task: PublishPipelineArtifact@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
targetPath: $(Build.ArtifactStagingDirectory)/compilation.tar.gz
artifactName: Compilation
displayName: Publish compilation artifact

View File

@@ -1,2 +0,0 @@
node_modules/
*.js

View File

@@ -0,0 +1,36 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const cp = require("child_process");
let tag = '';
try {
tag = cp
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
if (!isValidTag(tag)) {
throw Error(`Invalid tag ${tag}`);
}
}
catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function isValidTag(t) {
if (t.split('.').length !== 3) {
return false;
}
const [major, minor, bug] = t.split('.');
// Only release for tags like 1.34.0
if (bug !== '0') {
return false;
}
if (isNaN(parseInt(major, 10)) || isNaN(parseInt(minor, 10))) {
return false;
}
return true;
}

View File

@@ -9,7 +9,7 @@ pr: none
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:

View File

@@ -0,0 +1,72 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const fs = require("fs");
const cp = require("child_process");
const path = require("path");
let tag = '';
try {
tag = cp
.execSync('git describe --tags `git rev-list --tags --max-count=1`')
.toString()
.trim();
const dtsUri = `https://raw.githubusercontent.com/microsoft/vscode/${tag}/src/vs/vscode.d.ts`;
const outPath = path.resolve(process.cwd(), 'DefinitelyTyped/types/vscode/index.d.ts');
cp.execSync(`curl ${dtsUri} --output ${outPath}`);
updateDTSFile(outPath, tag);
console.log(`Done updating vscode.d.ts at ${outPath}`);
}
catch (err) {
console.error(err);
console.error('Failed to update types');
process.exit(1);
}
function updateDTSFile(outPath, tag) {
const oldContent = fs.readFileSync(outPath, 'utf-8');
const newContent = getNewFileContent(oldContent, tag);
fs.writeFileSync(outPath, newContent);
}
function repeat(str, times) {
const result = new Array(times);
for (let i = 0; i < times; i++) {
result[i] = str;
}
return result.join('');
}
function convertTabsToSpaces(str) {
return str.replace(/\t/gm, value => repeat(' ', value.length));
}
function getNewFileContent(content, tag) {
const oldheader = [
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the Source EULA. See License.txt in the project root for license information.`,
` *--------------------------------------------------------------------------------------------*/`
].join('\n');
return convertTabsToSpaces(getNewFileHeader(tag) + content.slice(oldheader.length));
}
function getNewFileHeader(tag) {
const [major, minor] = tag.split('.');
const shorttag = `${major}.${minor}`;
const header = [
`// Type definitions for Visual Studio Code ${shorttag}`,
`// Project: https://github.com/microsoft/vscode`,
`// Definitions by: Visual Studio Code Team, Microsoft <https://github.com/Microsoft>`,
`// Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped`,
``,
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the Source EULA.`,
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
` *--------------------------------------------------------------------------------------------*/`,
``,
`/**`,
` * Type Definition for Visual Studio Code ${shorttag} Extension API`,
` * See https://code.visualstudio.com/api for more information`,
` */`
].join('\n');
return header;
}

View File

@@ -1,7 +1,7 @@
resources:
containers:
- container: linux-x64
image: sqltoolscontainers.azurecr.io/linux-build-agent:2
image: sqltoolscontainers.azurecr.io/linux-build-agent:3
endpoint: ContainerRegistry
jobs:

View File

@@ -1,19 +1,4 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
displayName: Restore Cache - Compiled Files
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'BuildCache'
platformIndependent: true
alias: 'Compilation'
- task: NodeTool@0
inputs:
versionSpec: "12.13.0"
@@ -27,7 +12,6 @@ steps:
inputs:
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
KeyVaultName: ado-secrets
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
@@ -40,7 +24,6 @@ steps:
git config user.email "sqltools@service.microsoft.com"
git config user.name "AzureDataStudio"
displayName: Prepare tooling
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
@@ -48,34 +31,44 @@ steps:
git fetch distro
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
displayName: Restore Cache - Node Modules
- script: |
mkdir -p .build
node build/azure-pipelines/common/sql-computeNodeModulesCacheKey.js > .build/yarnlockhash
displayName: Prepare yarn cache key
- task: Cache@2
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore Cache - Node Modules
- script: |
set -e
tar -xzf .build/node_modules_cache/cache.tgz
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules archive
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables['NODE_MODULES_RESTORED'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
displayName: Save Cache - Node Modules
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
condition: and(succeeded(), eq(variables['NODE_MODULES_RESTORED'], 'true'))
# Mixin must run before optimize, because the CSS loader will
# inline small SVGs
@@ -83,7 +76,6 @@ steps:
set -e
node build/azure-pipelines/mixin
displayName: Mix in quality
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
@@ -92,7 +84,7 @@ steps:
yarn strict-vscode
yarn valid-layers-check
displayName: Run hygiene, eslint
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
@@ -102,7 +94,6 @@ steps:
yarn gulp vscode-reh-linux-x64-min
yarn gulp vscode-reh-web-linux-x64-min
displayName: Compile
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- script: |
set -e
@@ -110,7 +101,6 @@ steps:
AZURE_STORAGE_ACCESS_KEY="$(sourcemap-storage-key)" \
node build/azure-pipelines/upload-sourcemaps
displayName: Upload sourcemaps
condition: and(succeeded(), ne(variables['CacheExists-Compilation'], 'true'))
- script: |
set -e
@@ -125,12 +115,13 @@ steps:
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: drop'
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
displayName: Save Cache - Compiled Files
- script: |
set -e
tar -czf $(Build.ArtifactStagingDirectory)/compilation.tar.gz .build out-*
displayName: Compress compilation artifact
- task: PublishPipelineArtifact@1
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'BuildCache'
platformIndependent: true
alias: 'Compilation'
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
targetPath: $(Build.ArtifactStagingDirectory)/compilation.tar.gz
artifactName: Compilation
displayName: Publish compilation artifact

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:

View File

@@ -1,28 +1,7 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
echo -n $ENABLE_TERRAPIN > .build/terrapin
displayName: Prepare compilation cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
@@ -34,6 +13,17 @@ steps:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- task: DownloadPipelineArtifact@2
inputs:
artifact: Compilation
path: $(Build.ArtifactStagingDirectory)
displayName: Download compilation output
- script: |
set -e
tar -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz
displayName: Extract compilation output
- script: |
set -e
cat << EOF > ~/.netrc
@@ -48,30 +38,35 @@ steps:
- script: |
set -e
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git merge $(node -p "require('./package.json').distro")
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
displayName: Merge distro
- script: |
npx https://aka.ms/enablesecurefeed standAlone
displayName: Switch to Terrapin packages
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
mkdir -p .build
node build/azure-pipelines/common/computeNodeModulesCacheKey.js "web" $ENABLE_TERRAPIN > .build/yarnlockhash
displayName: Prepare yarn cache flags
- script: |
echo -n "web" > .build/arch
displayName: Prepare yarn cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
- task: Cache@2
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
- script: |
set -e
tar -xzf .build/node_modules_cache/cache.tgz
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules cache
- script: |
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- script: |
set -e
export CHILD_CONCURRENCY="1"
for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break
if [ $i -eq 3 ]; then
@@ -80,21 +75,19 @@ steps:
fi
echo "Yarn failed $i, trying again..."
done
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive
- script: |
set -e
@@ -130,3 +123,8 @@ steps:
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
./build/azure-pipelines/web/publish.sh
displayName: Publish
- publish: $(Agent.BuildDirectory)/vscode-web.tar.gz
artifact: vscode-web-standalone
displayName: Publish web archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))

View File

@@ -1,25 +1,5 @@
steps:
- script: |
mkdir -p .build
echo -n $BUILD_SOURCEVERSION > .build/commit
echo -n $VSCODE_QUALITY > .build/quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
displayName: Restore Cache - Compiled Files
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'BuildCache'
platformIndependent: true
alias: 'Compilation'
- script: |
set -e
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "12.13.0"
@@ -35,6 +15,16 @@ steps:
KeyVaultName: ado-secrets
SecretsFilter: 'github-distro-mixin-password'
- task: DownloadPipelineArtifact@2
inputs:
artifact: Compilation
displayName: Download compilation output
- script: |
set -e
tar -xzf $(Pipeline.Workspace)/compilation.tar.gz
displayName: Extract compilation output
- script: |
set -e
cat << EOF > ~/.netrc

View File

@@ -1,7 +1,10 @@
<?xml version="1.0" encoding="utf-8"?>
<configuration>
<packageSources>
<clear/>
<add key="ESRP" value="https://microsoft.pkgs.visualstudio.com/_packaging/ESRP/nuget/v3/index.json" />
</packageSources>
<packageSources>
<clear />
<add key="ESRP" value="https://microsoft.pkgs.visualstudio.com/_packaging/ESRP/nuget/v3/index.json" />
</packageSources>
<disabledPackageSources>
<clear />
</disabledPackageSources>
</configuration>

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@3 # {{SQL CARBON EDIT}} update version
inputs:

View File

@@ -3,7 +3,7 @@ $ErrorActionPreference = "Stop"
$CertBytes = [System.Convert]::FromBase64String($CertBase64)
$CertCollection = New-Object System.Security.Cryptography.X509Certificates.X509Certificate2Collection
$CertCollection.Import($CertBytes, $null, [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::Exportable)
$CertCollection.Import($CertBytes, $null, [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::Exportable -bxor [System.Security.Cryptography.X509Certificates.X509KeyStorageFlags]::PersistKeySet)
$CertStore = New-Object System.Security.Cryptography.X509Certificates.X509Store("My","LocalMachine")
$CertStore.Open("ReadWrite")

View File

@@ -1,28 +1,7 @@
steps:
- powershell: |
mkdir .build -ea 0
"$env:BUILD_SOURCEVERSION" | Out-File -Encoding ascii -NoNewLine .build\commit
"$env:VSCODE_QUALITY" | Out-File -Encoding ascii -NoNewLine .build\quality
"$env:ENABLE_TERRAPIN" | Out-File -Encoding ascii -NoNewLine .build\terrapin
displayName: Prepare compilation cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
inputs:
keyfile: "build/.cachesalt, .build/commit, .build/quality, .build/terrapin"
targetfolder: ".build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min"
vstsFeed: "npm-vscode"
platformIndependent: true
alias: "Compilation"
- powershell: |
$ErrorActionPreference = "Stop"
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "12.14.1"
versionSpec: "12.18.3"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
@@ -39,6 +18,18 @@ steps:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- task: DownloadPipelineArtifact@2
inputs:
artifact: Compilation
path: $(Build.ArtifactStagingDirectory)
displayName: Download compilation output
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { tar --force-local -xzf $(Build.ArtifactStagingDirectory)/compilation.tar.gz }
displayName: Extract compilation output
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
@@ -51,26 +42,34 @@ steps:
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git" }
exec { git fetch distro }
exec { git merge $(node -p "require('./package.json').distro") }
exec { git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro") }
displayName: Merge distro
- script: |
npx https://aka.ms/enablesecurefeed standAlone
displayName: Switch to Terrapin packages
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
- powershell: |
"$(VSCODE_ARCH)" | Out-File -Encoding ascii -NoNewLine .build\arch
"$env:ENABLE_TERRAPIN" | Out-File -Encoding ascii -NoNewLine .build\terrapin
node build/azure-pipelines/common/computeNodeModulesCacheKey.js > .build/yarnlockhash
displayName: Prepare yarn cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
- task: Cache@2
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
key: 'nodeModules | $(Agent.OS) | .build/arch, .build/terrapin, .build/yarnlockhash'
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { 7z.exe x .build/node_modules_cache/cache.7z -aos }
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules cache
- script: |
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
- powershell: |
. build/azure-pipelines/win32/exec.ps1
@@ -79,22 +78,20 @@ steps:
$env:npm_config_arch="$(VSCODE_ARCH)"
$env:CHILD_CONCURRENCY="1"
retry { exec { yarn --frozen-lockfile } }
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn postinstall }
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
exec { node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt }
exec { mkdir -Force .build/node_modules_cache }
exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt }
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive
- powershell: |
. build/azure-pipelines/win32/exec.ps1
@@ -107,11 +104,18 @@ steps:
$ErrorActionPreference = "Stop"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-min-ci" }
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-code-helper" }
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-inno-updater" }
echo "##vso[task.setvariable variable=CodeSigningFolderPath]$(agent.builddirectory)/VSCode-win32-$(VSCODE_ARCH)"
displayName: Build
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-code-helper" }
exec { yarn gulp "vscode-win32-$(VSCODE_ARCH)-inno-updater" }
displayName: Prepare Package
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
@@ -122,12 +126,21 @@ steps:
displayName: Build Server
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
exec { yarn npm-run-all -lp "electron $(VSCODE_ARCH)" "playwright-install" }
displayName: Download Electron and Playwright
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn electron $(VSCODE_ARCH) }
exec { .\scripts\test.bat --build --tfs "Unit Tests" }
displayName: Run unit tests (Electron)
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
@@ -135,6 +148,14 @@ steps:
$ErrorActionPreference = "Stop"
exec { yarn test-browser --build --browser chromium --browser firefox --tfs "Browser Unit Tests" }
displayName: Run unit tests (Browser)
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn --cwd test/integration/browser compile }
displayName: Compile integration tests
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
@@ -148,6 +169,7 @@ steps:
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\scripts\test-integration.bat --build --tfs "Integration Tests" }
displayName: Run integration tests (Electron)
timeoutInMinutes: 10
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
@@ -155,6 +177,7 @@ steps:
$ErrorActionPreference = "Stop"
exec { $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-web-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-web-integration.bat --browser firefox }
displayName: Run integration tests (Browser)
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
@@ -165,6 +188,7 @@ steps:
$AppNameShort = $AppProductJson.nameShort
exec { $env:INTEGRATION_TEST_ELECTRON_PATH = "$AppRoot\$AppNameShort.exe"; $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-remote-integration.bat }
displayName: Run remote integration tests (Electron)
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: PublishPipelineArtifact@0
@@ -180,7 +204,7 @@ steps:
inputs:
testResultsFiles: "*-results.xml"
searchFolder: "$(Build.ArtifactStagingDirectory)/test-results"
condition: and(succeededOrFailed(), ne(variables['VSCODE_ARCH'], 'arm64'))
condition: and(succeededOrFailed(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
@@ -236,6 +260,7 @@ steps:
}
]
SessionTimeout: 120
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: NuGetCommand@2
displayName: Install ESRPClient.exe
@@ -245,11 +270,13 @@ steps:
nugetConfigPath: 'build\azure-pipelines\win32\ESRPClient\NuGet.config'
externalFeedCredentials: "ESRP Nuget"
restoreDirectory: packages
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: ESRPImportCertTask@1
displayName: Import ESRP Request Signing Certificate
inputs:
ESRP: "ESRP CodeSign"
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: PowerShell@2
inputs:
@@ -257,6 +284,7 @@ steps:
filePath: .\build\azure-pipelines\win32\import-esrp-auth-cert.ps1
arguments: "$(ESRP-SSL-AADAuth)"
displayName: Import ESRP Auth Certificate
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
@@ -266,6 +294,32 @@ steps:
$env:VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)"
.\build\azure-pipelines\win32\publish.ps1
displayName: Publish
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\archive\VSCode-win32-$(VSCODE_ARCH).zip
artifact: vscode-win32-$(VSCODE_ARCH)
displayName: Publish archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\system-setup\VSCodeSetup.exe
artifact: vscode-win32-$(VSCODE_ARCH)-setup
displayName: Publish system setup
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(System.DefaultWorkingDirectory)\.build\win32-$(VSCODE_ARCH)\user-setup\VSCodeSetup.exe
artifact: vscode-win32-$(VSCODE_ARCH)-user-setup
displayName: Publish user setup
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(System.DefaultWorkingDirectory)\.build\vscode-server-win32-$(VSCODE_ARCH).zip
artifact: vscode-server-win32-$(VSCODE_ARCH)
displayName: Publish server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- publish: $(System.DefaultWorkingDirectory)\.build\vscode-server-win32-$(VSCODE_ARCH)-web.zip
artifact: vscode-server-win32-$(VSCODE_ARCH)-web
displayName: Publish web server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: "Component Detection"

View File

@@ -1,25 +1,4 @@
steps:
- powershell: |
mkdir .build -ea 0
"$env:BUILD_SOURCEVERSION" | Out-File -Encoding ascii -NoNewLine .build\commit
"$env:VSCODE_QUALITY" | Out-File -Encoding ascii -NoNewLine .build\quality
displayName: Prepare cache flag
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
displayName: Restore Cache - Compiled Files
inputs:
keyfile: 'build/.cachesalt, .build/commit, .build/quality'
targetfolder: '.build, out-build, out-vscode-min, out-vscode-reh-min, out-vscode-reh-web-min'
vstsFeed: 'BuildCache'
platformIndependent: true
alias: 'Compilation'
- powershell: |
$ErrorActionPreference = "Stop"
exit 1
displayName: Check RestoreCache
condition: and(succeeded(), ne(variables['CacheRestored-Compilation'], 'true'))
- task: NodeTool@0
inputs:
versionSpec: "12.13.0"
@@ -34,11 +13,22 @@ steps:
addToPath: true
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
KeyVaultName: ado-secrets
SecretsFilter: 'github-distro-mixin-password'
displayName: 'Azure Key Vault: Get Secrets'
- task: DownloadPipelineArtifact@2
inputs:
artifact: Compilation
displayName: Download compilation output
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { tar --force-local -xzf $(Pipeline.Workspace)/compilation.tar.gz }
displayName: Extract compilation output
- powershell: |
. build/azure-pipelines/win32/exec.ps1
@@ -55,37 +45,51 @@ steps:
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
displayName: Restore Cache - Node Modules
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { node build/azure-pipelines/common/sql-computeNodeModulesCacheKey.js > .build/yarnlockhash }
displayName: Prepare yarn cache key
- task: Cache@2
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore Cache - Node Modules
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { 7z.exe x .build/node_modules_cache/cache.7z -aos }
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules archive
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
$env:CHILD_CONCURRENCY="1"
exec { yarn --frozen-lockfile }
displayName: Install dependencies
env:
GITHUB_TOKEN: $(github-distro-mixin-password)
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Install dependencies
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
displayName: Save Cache - Node Modules
inputs:
keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock, !samples/**/yarn.lock'
targetfolder: '**/node_modules, !**/node_modules/**/node_modules, !samples/**/node_modules'
vstsFeed: 'BuildCache'
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt }
exec { mkdir -Force .build/node_modules_cache }
exec { 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt }
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { yarn postinstall }
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
- powershell: |
. build/azure-pipelines/win32/exec.ps1
@@ -151,7 +155,7 @@ steps:
inputs:
ConnectedServiceName: 'Code Signing'
FolderPath: '$(agent.builddirectory)/azuredatastudio-win32-x64'
Pattern: '*.exe,*.node,resources/app/node_modules.asar.unpacked/*.dll,swiftshader/*.dll,d3dcompiler_47.dll,libGLESv2.dll,ffmpeg.dll,libEGL.dll,Microsoft.SqlTools.Hosting.dll,Microsoft.SqlTools.ResourceProvider.Core.dll,Microsoft.SqlTools.ResourceProvider.DefaultImpl.dll,MicrosoftSqlToolsCredentials.dll,MicrosoftSqlToolsServiceLayer.dll,Newtonsoft.Json.dll,SqlSerializationService.dll,SqlToolsResourceProviderService.dll,Microsoft.SqlServer.*.dll,Microsoft.Data.Tools.Sql.BatchParser.dll'
Pattern: '*.exe,*.node,resources/app/node_modules.asar.unpacked/*.dll,swiftshader/*.dll,d3dcompiler_47.dll,vulkan-1.dll,libGLESv2.dll,ffmpeg.dll,libEGL.dll,Microsoft.SqlTools.Hosting.dll,Microsoft.SqlTools.ResourceProvider.Core.dll,Microsoft.SqlTools.ResourceProvider.DefaultImpl.dll,MicrosoftSqlToolsCredentials.dll,MicrosoftSqlToolsServiceLayer.dll,Newtonsoft.Json.dll,SqlSerializationService.dll,SqlToolsResourceProviderService.dll,Microsoft.SqlServer.*.dll,Microsoft.Data.Tools.Sql.BatchParser.dll'
signConfigType: inlineSignParams
inlineOperation: |
[

View File

@@ -6,8 +6,7 @@
const fs = require('fs');
const path = require('path');
const os = require('os');
const { remote } = require('electron');
const dialog = remote.dialog;
const { ipcRenderer } = require('electron');
const builtInExtensionsPath = path.join(__dirname, '..', '..', 'product.json');
const controlFilePath = path.join(os.homedir(), '.vscode-oss-dev', 'extensions', 'control.json');
@@ -84,17 +83,13 @@ function render(el, state) {
}
const localInput = renderOption(form, `local-${ext.name}`, 'Local', 'local', !!local);
localInput.onchange = function () {
const result = dialog.showOpenDialog(remote.getCurrentWindow(), {
title: 'Choose Folder',
properties: ['openDirectory']
});
localInput.onchange = async function () {
const result = await ipcRenderer.invoke('pickdir');
if (result && result.length >= 1) {
control[ext.name] = result[0];
if (result) {
control[ext.name] = result;
setState({ builtin, control });
}
setState({ builtin, control });
};
if (local) {

View File

@@ -3,12 +3,25 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const { app, BrowserWindow } = require('electron');
const { app, BrowserWindow, ipcMain, dialog } = require('electron');
const url = require('url');
const path = require('path');
let window = null;
ipcMain.handle('pickdir', async () => {
const result = await dialog.showOpenDialog(window, {
title: 'Choose Folder',
properties: ['openDirectory']
});
if (result.canceled || result.filePaths.length < 1) {
return undefined;
}
return result.filePaths[0];
});
app.once('ready', () => {
window = new BrowserWindow({ width: 800, height: 600, webPreferences: { nodeIntegration: true, webviewTag: true, enableWebSQL: false, nativeWindowOpen: true } });
window.setMenuBarVisibility(false);

View File

@@ -0,0 +1,58 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const vscode_universal_1 = require("vscode-universal");
const fs = require("fs-extra");
const path = require("path");
const plist = require("plist");
const product = require("../../product.json");
async function main() {
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
const arch = process.env['VSCODE_ARCH'];
if (!buildDir) {
throw new Error('$AGENT_BUILDDIRECTORY not set');
}
const appName = product.nameLong + '.app';
const x64AppPath = path.join(buildDir, 'vscode-x64', appName);
const arm64AppPath = path.join(buildDir, 'vscode-arm64', appName);
const x64AsarPath = path.join(x64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
const arm64AsarPath = path.join(arm64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
await vscode_universal_1.makeUniversalApp({
x64AppPath,
arm64AppPath,
x64AsarPath,
arm64AsarPath,
filesToSkip: [
'product.json',
'Credits.rtf',
'CodeResources',
'fsevents.node',
'.npmrc'
],
outAppPath,
force: true
});
let productJson = await fs.readJson(productJsonPath);
Object.assign(productJson, {
darwinUniversalAssetId: 'darwin-universal'
});
await fs.writeJson(productJsonPath, productJson);
let infoPlistString = await fs.readFile(infoPlistPath, 'utf8');
let infoPlistJson = plist.parse(infoPlistString);
Object.assign(infoPlistJson, {
LSRequiresNativeExecution: true
});
await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8');
}
if (require.main === module) {
main().catch(err => {
console.error(err);
process.exit(1);
});
}

View File

@@ -0,0 +1,66 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import { makeUniversalApp } from 'vscode-universal';
import * as fs from 'fs-extra';
import * as path from 'path';
import * as plist from 'plist';
import * as product from '../../product.json';
async function main() {
const buildDir = process.env['AGENT_BUILDDIRECTORY'];
const arch = process.env['VSCODE_ARCH'];
if (!buildDir) {
throw new Error('$AGENT_BUILDDIRECTORY not set');
}
const appName = product.nameLong + '.app';
const x64AppPath = path.join(buildDir, 'vscode-x64', appName);
const arm64AppPath = path.join(buildDir, 'vscode-arm64', appName);
const x64AsarPath = path.join(x64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
const arm64AsarPath = path.join(arm64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
await makeUniversalApp({
x64AppPath,
arm64AppPath,
x64AsarPath,
arm64AsarPath,
filesToSkip: [
'product.json',
'Credits.rtf',
'CodeResources',
'fsevents.node',
'.npmrc'
],
outAppPath,
force: true
});
let productJson = await fs.readJson(productJsonPath);
Object.assign(productJson, {
darwinUniversalAssetId: 'darwin-universal'
});
await fs.writeJson(productJsonPath, productJson);
let infoPlistString = await fs.readFile(infoPlistPath, 'utf8');
let infoPlistJson = plist.parse(infoPlistString);
Object.assign(infoPlistJson, {
LSRequiresNativeExecution: true
});
await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8');
}
if (require.main === module) {
main().catch(err => {
console.error(err);
process.exit(1);
});
}

View File

@@ -24,7 +24,6 @@ async function main() {
const appFrameworkPath = path.join(appRoot, appName, 'Contents', 'Frameworks');
const helperAppBaseName = product.nameShort;
const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app';
const pluginHelperAppName = helperAppBaseName + ' Helper (Plugin).app';
const rendererHelperAppName = helperAppBaseName + ' Helper (Renderer).app';
const defaultOpts = {
app: path.join(appRoot, appName),
@@ -43,14 +42,11 @@ async function main() {
// TODO(deepak1556): Incorrectly declared type in electron-osx-sign
ignore: (filePath) => {
return filePath.includes(gpuHelperAppName) ||
filePath.includes(pluginHelperAppName) ||
filePath.includes(rendererHelperAppName);
} });
const gpuHelperOpts = Object.assign(Object.assign({}, defaultOpts), { app: path.join(appFrameworkPath, gpuHelperAppName), entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist') });
const pluginHelperOpts = Object.assign(Object.assign({}, defaultOpts), { app: path.join(appFrameworkPath, pluginHelperAppName), entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-plugin-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-plugin-entitlements.plist') });
const rendererHelperOpts = Object.assign(Object.assign({}, defaultOpts), { app: path.join(appFrameworkPath, rendererHelperAppName), entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist') });
await codesign.signAsync(gpuHelperOpts);
await codesign.signAsync(pluginHelperOpts);
await codesign.signAsync(rendererHelperOpts);
await codesign.signAsync(appOpts);
}

View File

@@ -29,7 +29,6 @@ async function main(): Promise<void> {
const appFrameworkPath = path.join(appRoot, appName, 'Contents', 'Frameworks');
const helperAppBaseName = product.nameShort;
const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app';
const pluginHelperAppName = helperAppBaseName + ' Helper (Plugin).app';
const rendererHelperAppName = helperAppBaseName + ' Helper (Renderer).app';
const defaultOpts: codesign.SignOptions = {
@@ -51,7 +50,6 @@ async function main(): Promise<void> {
// TODO(deepak1556): Incorrectly declared type in electron-osx-sign
ignore: (filePath: string) => {
return filePath.includes(gpuHelperAppName) ||
filePath.includes(pluginHelperAppName) ||
filePath.includes(rendererHelperAppName);
}
};
@@ -63,13 +61,6 @@ async function main(): Promise<void> {
'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'),
};
const pluginHelperOpts: codesign.SignOptions = {
...defaultOpts,
app: path.join(appFrameworkPath, pluginHelperAppName),
entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-plugin-entitlements.plist'),
'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-plugin-entitlements.plist'),
};
const rendererHelperOpts: codesign.SignOptions = {
...defaultOpts,
app: path.join(appFrameworkPath, rendererHelperAppName),
@@ -78,7 +69,6 @@ async function main(): Promise<void> {
};
await codesign.signAsync(gpuHelperOpts);
await codesign.signAsync(pluginHelperOpts);
await codesign.signAsync(rendererHelperOpts);
await codesign.signAsync(appOpts as any);
}

36
build/eslint.js Normal file
View File

@@ -0,0 +1,36 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
const es = require('event-stream');
const vfs = require('vinyl-fs');
const { jsHygieneFilter, tsHygieneFilter } = require('./filters');
function eslint() {
const gulpeslint = require('gulp-eslint');
return vfs
.src([...jsHygieneFilter, ...tsHygieneFilter], { base: '.', follow: true, allowEmpty: true })
.pipe(
gulpeslint({
configFile: '.eslintrc.json',
rulePaths: ['./build/lib/eslint'],
})
)
.pipe(gulpeslint.formatEach('compact'))
.pipe(
gulpeslint.results((results) => {
if (results.warningCount > 0 || results.errorCount > 0) {
throw new Error('eslint failed with warnings and/or errors');
}
})
).pipe(es.through(function () { /* noop, important for the stream to end */ }));
}
if (require.main === module) {
eslint().on('error', (err) => {
console.error();
console.error(err);
process.exit(1);
});
}

152
build/filters.js Normal file
View File

@@ -0,0 +1,152 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
/**
* Hygiene works by creating cascading subsets of all our files and
* passing them through a sequence of checks. Here are the current subsets,
* named according to the checks performed on them. Each subset contains
* the following one, as described in mathematical notation:
*
* all ⊃ eol ⊇ indentation ⊃ copyright ⊃ typescript
*/
module.exports.all = [
'*',
'build/**/*',
'extensions/**/*',
'scripts/**/*',
'src/**/*',
'test/**/*',
'!out*/**',
'!test/**/out/**',
'!**/node_modules/**',
];
module.exports.indentationFilter = [
'**',
// except specific files
'!**/ThirdPartyNotices.txt',
'!**/LICENSE.{txt,rtf}',
'!LICENSES.chromium.html',
'!**/LICENSE',
'!src/vs/nls.js',
'!src/vs/nls.build.js',
'!src/vs/css.js',
'!src/vs/css.build.js',
'!src/vs/loader.js',
'!src/vs/base/common/insane/insane.js',
'!src/vs/base/common/marked/marked.js',
'!src/vs/base/common/semver/semver.js',
'!src/vs/base/node/terminateProcess.sh',
'!src/vs/base/node/cpuUsage.sh',
'!test/unit/assert.js',
'!resources/linux/snap/electron-launch',
'!build/ext.js',
// except specific folders
'!test/automation/out/**',
'!test/monaco/out/**',
'!test/smoke/out/**',
'!extensions/typescript-language-features/test-workspace/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!build/monaco/**',
'!build/win32/**',
// except multiple specific files
'!**/package.json',
'!**/yarn.lock',
'!**/yarn-error.log',
// except multiple specific folders
'!**/codicon/**',
'!**/fixtures/**',
'!**/lib/**',
'!extensions/**/dist/**',
'!extensions/**/out/**',
'!extensions/**/snippets/**',
'!extensions/**/syntaxes/**',
'!extensions/**/themes/**',
'!extensions/**/colorize-fixtures/**',
// except specific file types
'!src/vs/*/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts',
'!**/*.{svg,exe,png,bmp,jpg,scpt,bat,cmd,cur,ttf,woff,eot,md,ps1,template,yaml,yml,d.ts.recipe,ico,icns,plist}',
'!build/{lib,download,darwin}/**/*.js',
'!build/**/*.sh',
'!build/azure-pipelines/**/*.js',
'!build/azure-pipelines/**/*.config',
'!**/Dockerfile',
'!**/Dockerfile.*',
'!**/*.Dockerfile',
'!**/*.dockerfile',
'!extensions/markdown-language-features/media/*.js',
'!extensions/simple-browser/media/*.js',
];
module.exports.copyrightFilter = [
'**',
'!**/*.desktop',
'!**/*.json',
'!**/*.html',
'!**/*.template',
'!**/*.md',
'!**/*.bat',
'!**/*.cmd',
'!**/*.ico',
'!**/*.icns',
'!**/*.xml',
'!**/*.sh',
'!**/*.txt',
'!**/*.xpm',
'!**/*.opts',
'!**/*.disabled',
'!**/*.code-workspace',
'!**/*.js.map',
'!build/**/*.init',
'!resources/linux/snap/snapcraft.yaml',
'!resources/win32/bin/code.js',
'!resources/web/code-web.js',
'!resources/completions/**',
'!extensions/configuration-editing/build/inline-allOf.ts',
'!extensions/markdown-language-features/media/highlight.css',
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*',
'!src/vs/editor/test/node/classification/typescript-test.ts',
];
module.exports.jsHygieneFilter = [
'src/**/*.js',
'build/gulpfile.*.js',
'!src/vs/loader.js',
'!src/vs/css.js',
'!src/vs/nls.js',
'!src/vs/css.build.js',
'!src/vs/nls.build.js',
'!src/**/insane.js',
'!src/**/marked.js',
'!src/**/semver.js',
'!**/test/**',
];
module.exports.tsHygieneFilter = [
'src/**/*.ts',
'test/**/*.ts',
'extensions/**/*.ts',
'!src/vs/*/**/*.d.ts',
'!src/typings/**/*.d.ts',
'!extensions/**/*.d.ts',
'!**/fixtures/**',
'!**/typings/**',
'!**/node_modules/**',
'!extensions/**/colorize-fixtures/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/**/*.test.ts',
'!extensions/html-language-features/server/lib/jquery.d.ts',
];

View File

@@ -11,6 +11,11 @@ const task = require('./lib/task');
const compilation = require('./lib/compilation');
// Full compile, including nls and inline sources in sourcemaps, for build
const compileBuildTask = task.define('compile-build', task.series(util.rimraf('out-build'), compilation.compileTask('src', 'out-build', true)));
const compileBuildTask = task.define('compile-build',
task.series(
util.rimraf('out-build'),
compilation.compileTask('src', 'out-build', true)
)
);
gulp.task(compileBuildTask);
exports.compileBuildTask = compileBuildTask;

View File

@@ -16,8 +16,6 @@ const cp = require('child_process');
const compilation = require('./lib/compilation');
const monacoapi = require('./monaco/api');
const fs = require('fs');
const webpack = require('webpack');
const webpackGulp = require('webpack-stream');
let root = path.dirname(__dirname);
let sha1 = util.getVersion(root);
@@ -369,6 +367,9 @@ gulp.task('editor-distro',
);
const bundleEditorESMTask = task.define('editor-esm-bundle-webpack', () => {
const webpack = require('webpack');
const webpackGulp = require('webpack-stream');
const result = es.through();
const webpackConfigPath = path.join(root, 'build/monaco/monaco.webpack.config.js');

View File

@@ -9,17 +9,13 @@ require('events').EventEmitter.defaultMaxListeners = 100;
const gulp = require('gulp');
const path = require('path');
const nodeUtil = require('util');
const tsb = require('gulp-tsb');
const es = require('event-stream');
const filter = require('gulp-filter');
const webpack = require('webpack');
const util = require('./lib/util');
const task = require('./lib/task');
const watcher = require('./lib/watch');
const createReporter = require('./lib/reporter').createReporter;
const glob = require('glob');
const sourcemaps = require('gulp-sourcemaps');
const nlsDev = require('vscode-nls-dev');
const root = path.dirname(__dirname);
const commit = util.getVersion(root);
const plumber = require('gulp-plumber');
@@ -70,6 +66,10 @@ const tasks = compilations.map(function (tsconfigFile) {
}
function createPipeline(build, emitError) {
const nlsDev = require('vscode-nls-dev');
const tsb = require('gulp-tsb');
const sourcemaps = require('gulp-sourcemaps');
const reporter = createReporter('extensions');
overrideOptions.inlineSources = Boolean(build);
@@ -176,6 +176,8 @@ const compileExtensionsBuildTask = task.define('compile-extensions-build', task.
));
gulp.task(compileExtensionsBuildTask);
gulp.task(task.define('extensions-ci', task.series(compileExtensionsBuildTask)));
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;
const compileWebExtensionsTask = task.define('compile-web', () => buildWebExtensions(false));
@@ -187,6 +189,7 @@ gulp.task(watchWebExtensionsTask);
exports.watchWebExtensionsTask = watchWebExtensionsTask;
async function buildWebExtensions(isWatch) {
const webpack = require('webpack');
const webpackConfigLocations = await nodeUtil.promisify(glob)(
path.join(extensionsPath, '**', 'extension-browser.webpack.config.js'),

41
build/gulpfile.js Normal file
View File

@@ -0,0 +1,41 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
// Increase max listeners for event emitters
require('events').EventEmitter.defaultMaxListeners = 100;
const gulp = require('gulp');
const util = require('./lib/util');
const task = require('./lib/task');
const compilation = require('./lib/compilation');
const { monacoTypecheckTask/* , monacoTypecheckWatchTask */ } = require('./gulpfile.editor');
const { compileExtensionsTask, watchExtensionsTask } = require('./gulpfile.extensions');
// Fast compile for development time
const compileClientTask = task.define('compile-client', task.series(util.rimraf('out'), compilation.compileTask('src', 'out', false)));
gulp.task(compileClientTask);
const watchClientTask = task.define('watch-client', task.series(util.rimraf('out'), compilation.watchTask('out', false)));
gulp.task(watchClientTask);
// All
const compileTask = task.define('compile', task.parallel(monacoTypecheckTask, compileClientTask, compileExtensionsTask));
gulp.task(compileTask);
gulp.task(task.define('watch', task.parallel(/* monacoTypecheckWatchTask, */ watchClientTask, watchExtensionsTask)));
// Default
gulp.task('default', compileTask);
process.on('unhandledRejection', (reason, p) => {
console.log('Unhandled Rejection at: Promise', p, 'reason:', reason);
process.exit(1);
});
// Load all the gulpfiles only if running tasks other than the editor tasks
require('glob').sync('gulpfile.*.js', { cwd: __dirname })
.forEach(f => require(`./${f}`));

View File

@@ -14,10 +14,8 @@ const task = require('./lib/task');
const vfs = require('vinyl-fs');
const flatmap = require('gulp-flatmap');
const gunzip = require('gulp-gunzip');
const untar = require('gulp-untar');
const File = require('vinyl');
const fs = require('fs');
const remote = require('gulp-remote-retry-src');
const rename = require('gulp-rename');
const filter = require('gulp-filter');
const cp = require('child_process');
@@ -78,13 +76,17 @@ BUILD_TARGETS.forEach(({ platform, arch }) => {
}));
});
const defaultNodeTask = gulp.task(`node-${process.platform}-${process.arch}`);
const arch = process.platform === 'darwin' ? 'x64' : process.arch;
const defaultNodeTask = gulp.task(`node-${process.platform}-${arch}`);
if (defaultNodeTask) {
gulp.task(task.define('node', defaultNodeTask));
}
function nodejs(platform, arch) {
const remote = require('gulp-remote-retry-src');
const untar = require('gulp-untar');
if (arch === 'ia32') {
arch = 'x86';
}

View File

@@ -11,13 +11,10 @@ const os = require('os');
const cp = require('child_process');
const path = require('path');
const es = require('event-stream');
const azure = require('gulp-azure-storage');
const electron = require('gulp-atom-electron');
const vfs = require('vinyl-fs');
const rename = require('gulp-rename');
const replace = require('gulp-replace');
const filter = require('gulp-filter');
const json = require('gulp-json-editor');
const _ = require('underscore');
const util = require('./lib/util');
const task = require('./lib/task');
@@ -29,14 +26,12 @@ const packageJson = require('../package.json');
const product = require('../product.json');
const crypto = require('crypto');
const i18n = require('./lib/i18n');
const deps = require('./dependencies');
const { getProductionDependencies } = require('./lib/dependencies');
const { config } = require('./lib/electron');
const createAsar = require('./lib/asar').createAsar;
const { compileBuildTask } = require('./gulpfile.compile');
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
const productionDependencies = deps.getProductionDependencies(path.dirname(__dirname));
// Build
const vscodeEntryPoints = _.flatten([
buildfile.entrypoint('vs/workbench/workbench.desktop.main'),
@@ -57,7 +52,7 @@ const vscodeResources = [
'out-build/bootstrap-node.js',
'out-build/bootstrap-window.js',
'out-build/paths.js',
'out-build/vs/**/*.{svg,png,html}',
'out-build/vs/**/*.{svg,png,html,jpg}',
'!out-build/vs/code/browser/**/*.html',
'!out-build/vs/editor/standalone/**/*.svg',
'out-build/vs/base/common/performance.js',
@@ -97,7 +92,6 @@ const vscodeResources = [
'out-build/sql/workbench/parts/notebook/media/**/*.svg',
'out-build/sql/setup.js', // {{SQL CARBON EDIT}} end
'out-build/vs/code/electron-sandbox/processExplorer/processExplorer.js',
'out-build/vs/code/electron-sandbox/proxy/auth.js',
'!**/test/**'
];
@@ -122,6 +116,16 @@ const minifyVSCodeTask = task.define('minify-vscode', task.series(
));
gulp.task(minifyVSCodeTask);
const core = task.define('core-ci', task.series(
gulp.task('compile-build'),
task.parallel(
gulp.task('minify-vscode'),
gulp.task('minify-vscode-reh'),
gulp.task('minify-vscode-reh-web'),
)
));
gulp.task(core);
/**
* Compute checksums for some files.
*
@@ -163,6 +167,9 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
platform = platform || process.platform;
return () => {
const electron = require('gulp-atom-electron');
const json = require('gulp-json-editor');
const out = sourceFolderName;
const checksums = computeChecksums(out, [
@@ -221,8 +228,9 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
const telemetry = gulp.src('.build/telemetry/**', { base: '.build/telemetry', dot: true });
const jsFilter = util.filter(data => !data.isDirectory() &&/\.js$/.test(data.path));
const jsFilter = util.filter(data => !data.isDirectory() && /\.js$/.test(data.path));
const root = path.resolve(path.join(__dirname, '..'));
const productionDependencies = getProductionDependencies(root);
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]));
// {{SQL CARBON EDIT}} - fix runtime module load break
@@ -357,7 +365,7 @@ BUILD_TARGETS.forEach(buildTarget => {
const arch = buildTarget.arch;
const opts = buildTarget.opts;
['', 'min'].forEach(minified => {
const [vscode, vscodeMin] = ['', 'min'].map(minified => {
const sourceFolderName = `out-vscode${dashed(minified)}`;
const destinationFolderName = `azuredatastudio${dashed(platform)}${dashed(arch)}`;
@@ -374,7 +382,14 @@ BUILD_TARGETS.forEach(buildTarget => {
vscodeTaskCI
));
gulp.task(vscodeTask);
return vscodeTask;
});
if (process.platform === platform && process.arch === arch) {
gulp.task(task.define('vscode', task.series(vscode)));
gulp.task(task.define('vscode-min', task.series(vscodeMin)));
}
});
// Transifex Localizations
@@ -516,6 +531,8 @@ gulp.task(task.define(
task.series(
generateVSCodeConfigurationTask,
() => {
const azure = require('gulp-azure-storage');
if (!shouldSetupSettingsSearch()) {
const branch = process.env.BUILD_SOURCEBRANCH;
console.log(`Only runs on master and release branches, not ${branch}`);

View File

@@ -96,9 +96,6 @@ function prepareDebPackage(arch) {
const postinst = gulp.src('resources/linux/debian/postinst.template', { base: '.' })
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@ARCHITECTURE@@', debArch))
.pipe(replace('@@QUALITY@@', product.quality || '@@QUALITY@@'))
.pipe(replace('@@UPDATEURL@@', product.updateUrl || '@@UPDATEURL@@'))
.pipe(rename('DEBIAN/postinst'));
const all = es.merge(control, postinst, postrm, prerm, desktops, appdata, workspaceMime, icon, bash_completion, zsh_completion, code);
@@ -240,7 +237,8 @@ function prepareSnapPackage(arch) {
const snapcraft = gulp.src('resources/linux/snap/snapcraft.yaml', { base: '.' })
.pipe(replace('@@NAME@@', product.applicationName))
.pipe(replace('@@VERSION@@', commit.substr(0, 8)))
.pipe(replace('@@ARCHITECTURE@@', arch))
// Possible run-on values https://snapcraft.io/docs/architectures
.pipe(replace('@@ARCHITECTURE@@', arch === 'x64' ? 'amd64' : arch))
.pipe(rename('snap/snapcraft.yaml'));
const electronLaunch = gulp.src('resources/linux/snap/electron-launch', { base: '.' })

Some files were not shown because too many files have changed in this diff Show More