Compare commits

...

178 Commits

Author SHA1 Message Date
Charles Gagnon
eaf3482a64 Update azdata.d.ts (#16398)
(cherry picked from commit 88d28b7d51)
2021-07-22 13:52:56 -07:00
Alex Ma
a430fd2592 Port for locfunc task fix with XLFs and langpack source files. (#16195)
* added on end function to end of pipe

* Update for existing localized XLFs. (#16107)

* update to existing german extensions

* Update for existing Spanish Resources

* Added updates to existing French Resources

* update for Italian Resources

* Update for existing Japanese resources

* update for Korean resources

* update for Portuguese resources

* russian resources updated

* update for chinese resources

* Removed async copy in rename function

* added comments and removed async

* added changes to langpacks
2021-07-19 13:50:52 -07:00
Barbara Valdez
9577b70e37 Revert to previous markdown renderer (#16246) (#16249)
* Add marked files under sql/base/common
2021-07-19 13:46:13 -07:00
Vasu Bhog
6555fff09f Fix Windows WYSIWYG linking issue (switching to splitview and resolving links in WYSIWYG) (#16133) (#16234)
* fix relative links not correctly formatted due to marked js

* logic in one place
2021-07-16 13:52:37 -07:00
Aasim Khan
7c1d98ad06 Updating import extension (#16201) 2021-07-16 10:19:43 -07:00
Alan Ren
7aa00b80b0 make button not focusable when disabled (#16185) (#16205)
* make button not focusable when disabled

* update comment

* add carbon edit tag
2021-07-16 10:17:15 -07:00
Charles Gagnon
3d65519595 Remove second prompt for azdata install (#16182) (#16188)
(cherry picked from commit 2a74ad4190)
2021-07-15 13:33:55 -07:00
Charles Gagnon
2308b2cb3d Fix component unregistering from parent (#16166) (#16183)
(cherry picked from commit 1e81b6f054)
2021-07-15 13:22:47 -07:00
Udeesha Gautam
4d86a62c24 Fixing the error order for Project dialog validation (#16141) (#16186)
* Fixing the error order for Project dialog

* Taking PR review comments and making the change for open project also
2021-07-15 13:13:30 -07:00
Justin M
3f51fa42fd Added missing properties in kustoTreeDataProvider and azuremonitorTreeDataProvider (#16153) (#16155) 2021-07-15 11:26:41 -07:00
Lucy Zhang
97881ce62a don't show python upgrade prompt multiple times (#16080) (#16174) 2021-07-15 09:09:25 -07:00
Aditya Bist
c1fede2c75 Secondary actions (#16122) (#16132)
* data menu shows up

* clean up code

* remove dead code

* string literal

* add menu item instead

* remove unused code

(cherry picked from commit 5524a3659c)
2021-07-14 13:28:22 -07:00
Charles Gagnon
7c34261fd2 Add openQueryDocument API (#16117) (#16120)
* Add openQueryDocument API

* Remove open call

* Change try name

(cherry picked from commit a0f46fec65)
2021-07-13 21:57:30 -07:00
Barbara Valdez
aa05f77ef2 Set notebook path before moving. (#16055) (#16111)
* set the original path when moving a single notebook

* save original file path before attempting to move in case of error_
)
2021-07-13 20:46:06 -07:00
Kim Santiago
d737ed796c bump ADS dependency for sql database projects (#16092) (#16106) 2021-07-13 17:00:52 -07:00
Alan Ren
8d3187c511 left align the account information (#16086) (#16090) 2021-07-13 08:06:31 -07:00
Karl Burtram
6abc5f2287 Remove duplicate Getting Started menu (#16077) (#16082) 2021-07-12 15:14:36 -07:00
Charles Gagnon
ac4f53ed0a Fix opening new untitled query (#16078) (#16083)
(cherry picked from commit 9fc2cff654)
2021-07-12 14:54:43 -07:00
csigs
862c91d7d3 LEGO: check in for main to temporary branch. (#16003)
Co-authored-by: Alex Ma <alma1@microsoft.com>
2021-07-10 12:11:49 -07:00
csigs
313d5b026e LEGO: check in for main to temporary branch. (#16000)
Co-authored-by: Alex Ma <alma1@microsoft.com>
2021-07-10 12:10:14 -07:00
csigs
263f72523f LEGO: check in for main to temporary branch. (#15996)
Co-authored-by: Alex Ma <alma1@microsoft.com>
2021-07-10 11:00:21 -07:00
csigs
fd55651637 LEGO: check in for main to temporary branch. (#15995)
Co-authored-by: Alex Ma <alma1@microsoft.com>
2021-07-10 10:52:55 -07:00
csigs
67a0585dc0 LEGO: check in for main to temporary branch. (#15994)
Co-authored-by: Alex Ma <alma1@microsoft.com>
2021-07-10 09:55:08 -07:00
csigs
29efd0a0e2 LEGO: check in for main to temporary branch. (#15993) 2021-07-10 08:53:17 -07:00
Alan Ren
f1bd5e09ce new taskbar separator style (#16059) 2021-07-09 17:12:11 -07:00
Lucy Zhang
6c5d35eaae Check kernel dependencies when a new session is started (#16040)
* check dependencies when a new session is started

* fix test

* fix issue when kernel spec is not found
2021-07-09 12:51:21 -07:00
Justin M
e206eb81a3 Renamed extension.ts to main.ts for AzureMonitor (#16052) 2021-07-09 12:03:39 -07:00
Charles Gagnon
679e3d1393 Fix new file command titles (#16047) 2021-07-09 11:57:23 -07:00
Charles Gagnon
0045193262 Fix BDC auth (#16053) 2021-07-09 11:56:54 -07:00
Vasu Bhog
27823e9900 Fix CalloutDialog not opening after bad link (#16051)
* fix null parentNode
2021-07-09 09:01:48 -07:00
Charles Gagnon
8f202d91b6 editorReplacer -> editorOverride (#16041)
* editorReplacer -> editorOverride

* Update lifecycle phsae

* add back input factories

* Add comment

* add back tests

* comments

* fix log

* comments
2021-07-09 08:46:50 -07:00
Aasim Khan
7ba0e49673 Migration summary page refresh (#16046)
* Fixing the spacings in summary page

* Adding target database dialog to summary page
2021-07-08 16:15:00 -07:00
Kim Santiago
c69c303a2b fix duplicate sqlcmd var header when loading publish profile multiple times (#16007)
* fix duplicate sqlcmd var header when loading publish profile multiple times

* remove variable to keep track of count
2021-07-08 14:37:07 -07:00
Barbara Valdez
74cfac39d2 Fix book toc manager tests (#16034)
* fix book toc manager tests

* add js-yaml to notebooks package.json
2021-07-08 11:51:08 -07:00
Charles Gagnon
922348b846 Add basic structure for publish project quickpick (#16008)
* Add basic structure for publish project quickpick

* fixes

* feedback
2021-07-07 22:00:44 -07:00
Alan Ren
1a32db1343 set the width of the 'clear list' button (#16036) 2021-07-07 18:45:59 -07:00
Aasim Khan
37823db9c9 blocking sql migration dashboard on a managed instance (#16033) 2021-07-07 15:05:46 -07:00
Alan Ren
c5893a488f toolbar separator overlapping with other items (#16012)
* separator overlapping with other items

* Update notebook.css
2021-07-07 14:44:41 -07:00
Charles Gagnon
3f0ea580f5 Update some key az CLI references (#16032) 2021-07-07 14:43:30 -07:00
Barbara Valdez
9a8c9041df add create book entry point to the bookTreeView (#15902) 2021-07-07 14:17:30 -07:00
Lucy Zhang
f8c4d332bb add height css (#16026) 2021-07-07 13:49:33 -07:00
Alan Ren
64cee64516 make the toolbar items keyboard focusable (#16017)
* make the toolbar items keyboard focusable

* add comment
2021-07-07 13:45:07 -07:00
Charles Gagnon
d942799f9d Add azcli extension (#16029) 2021-07-07 13:00:12 -07:00
nasc17
6078e9f459 Make it so user cannot create or scale PG server group to 1 worker node (#16005)
* Added not equals validation

* Added validation to worker node in deploy

* Fixed info

* Change string

* Change string

* Change string
2021-07-07 12:25:07 -07:00
Vasu Bhog
14ce88023e Add allow root setting for Ubuntu and Web Mode users (#16027)
* add allow root setting for Ubuntu and Web Mode users
2021-07-07 11:02:12 -07:00
Rachel Kim
073f9e052b added conditional class to declarativetable for checkbox cells (#16013) 2021-07-06 19:41:53 -07:00
Lucy Zhang
9ba4057a3c fix notebook icon highlights (#16009) 2021-07-06 17:50:54 -07:00
Aasim Khan
7c68506975 Fixing next button on flat file wizard file configuration page (#15988)
* WIP

* Removing unnecessary validations from import extension

* readding abstract method implementation

* Moving setup navigation validator to base class
2021-07-06 16:53:44 -07:00
Justin M
48b7d96999 Add Azure Monitor Extension (#15397)
* Added Azure Log Analytics resource for generating AAD Token.

* Fixed AzureResource

* Removed debug code from connectionManagementService

* Moved AzureLogAnalytics from AzureResource enum in azdata.d.ts to azdata.proposed.d.ts.  Added azureLogAnalyticsResource to all azureSettings in providerSettings.ts

* Updated endpoint for generating AAD Token for LogAnalytics for UsGov, UsNat, and China

* Initial Commit of Azure Monitor Extension

* Added extension name to azuremonitor package strings

* Removed azureMonitor resource from germanyCloud in providerSettings

* Added logic to exclude menuItems in object explorer for LogAnalytics

* Changed exe from AzureMonitor to Kusto

* Added if clause for queryName for new queries

* Changed queryWindow name from KustoQuery to KQLQuery for Kusto and LogAnalytics.

* Added LogAnalytics for setTaskBarContent

* Added serialization and telemetry feature classes to AzureMonitor. Added references for azdata and vscode.

* Added azure monitor light and dark icons

* Added config for Dashboard in package.json

* Added workspace information to dashboard

* Added language support for LogAnalytics

* Added Notebook support

* Added Hide flag to package.json for databaseName

* Changed providerId from LogAnalytics to LOGANALYTICS

* Changed Workspace to Workspace ID in package.nls.json

* Added support for Azure Widget browser

* Changed fullName to use workspaceId when connecting

* Changed providerId from alertsManagement to azureMonitor

* Added .gitignore and *.vsix to vscodeignore.

* Removed unused devDependencies

* Code Review Feedback

* Changed tsconfig.json to match Kusto and Sql

* Changed package.json to match kusto package.

* Changed tsconfig to validate unused params and implictAny. Changed existing code to satisfy build.

* Fixed tsconfig to use the correct base class.

* Added objectExplorerNodeProvider and all related classes.

* Removed unused tmLanguage file

* Added logic to to download extension from toolservice

* Fixed launchArgs. Removed commented code from extension.ts. Changed config.json to use net5.0

* Added displayName to package.nls.json. Removed hide flag from databaseName. Other code review feedback.

* Added readme info to AzureMonitor

* Removed unused client-error-handler and ui-references files. Combined outputChannel in azuremonitorServer. Removed TODO from contextProvider. Renamed function in extension.ts. Removed unneeded 'use strict' from cancelableStream.ts. Removed second outputChannel from objectExplorerNodeProvider.

* Removed unused files
2021-07-06 15:27:19 -07:00
Lucy Zhang
9ac1f16cea Fix notebook toolbar when ADS screen width is decreased (#15987)
* fix toolbar when width is decreased

* remove height css
2021-07-06 11:45:46 -07:00
brian-harris
19e25f04b1 SQL Migration extension - accessibility bug - fixes filter enter and focus (#15953)
* fix filter enter and focus

* debounce and fix search

* refactor null check and undo change/remove await

* using control types to improve and remove 'any'
2021-07-02 19:11:50 -07:00
Alex Ma
9f77c74b9f Update for English XLFs (#15986)
* updates for English XLF files

* added xml-language-features

* another update to sql.xlf

* changed locproject entry to match new format.
2021-07-02 14:07:27 -07:00
Alex Ma
f894dad38b fixed LocProject (#15985) 2021-07-02 13:34:09 -07:00
Alex Ma
ba3ab201c8 added import-extensions-xlf gulp task (#15966)
* added import-extensions-xlf gulp task

* combined export and import

* changed vscode-translations-export

* added rename vscode langpacks task

* added node command to import language xlfs

* fixed import language xlfs command

* added comments

* removed import script as we need to investigate localization
2021-07-02 10:58:14 -07:00
Alan Ren
24b77a6a40 add focus outline style for hyperlink (#15977) 2021-07-02 10:34:26 -07:00
Charles Gagnon
e8ba89750b Update sqlite (#15973) 2021-07-01 15:44:54 -07:00
Aasim Khan
225788a121 Adding option to create a resource group in sql migration dms page. (#15856)
* Pushing all changes

* Fixing more stuff

* Added new to the newly created resource group

* removing error message when user starts typing

* removing unused import

* Localizing some string
adding aria lables and loading text

* Adding focus to resource group input box

* switching to name instead of display name as it contains (new) param

* changing focus to resource group dropdown after creation of a new resource group

* fixing typo
2021-07-01 13:48:10 -07:00
Charles Gagnon
b1a9d6baa1 Fix resource deployment validations (#15963)
* Fix resource deployment validations

* comment out unused for now

* Fix tests
2021-07-01 11:49:14 -07:00
Charles Gagnon
5bc5816a25 Bump electron@12.0.7 2021-07-01 11:42:35 -07:00
Alex Ma
30462dfb25 Webpack Extensions update (#15919)
* test for schemacompare

* added schema compare to package.json

* added arc for translation

* added admin-tool-ext-win

* added agent

* added cms to list of extensions webpacked

* removed dacpac

* added import

* added machine-learning

* removed profiler

* added server-report

* added sql-assessment

* added sql-migration

* added sql-database-projects

* added query-history

* fixed and updated dependencies in extensions for webpacking.

* fixed names of entry

* fixed webpack for machine-learning

* removed dependency external for dacpac

* reduced externals for server-report

* added back import

* removed extension.webpack.config.js

* removed useless handlebars function

* added todo message
2021-07-01 09:56:55 -07:00
Aasim Khan
ca07671a68 - Adding mouse in and out styling to clickbale divs (#15956)
* - Adding mouse in and out styling to clickbale divs
-Moving dashboard card styling to outer div container from inner form container

* removing detect changes

* Localizing aria label

* moving css from template to styles

* adding themeing participant to change hover styling

* removing unused event handlers

* separating styling
2021-07-01 09:49:08 -07:00
Charles Gagnon
91d51a43cd Remove space from distro (#15970) 2021-07-01 09:39:45 -07:00
Leila Lali
38a6fc4d60 Updating ML extension version to 0.11.0 (#15959) 2021-07-01 09:33:43 -07:00
Charles Gagnon
41bac47cbd Add open existing project functionality for VS Code (#15958) 2021-07-01 09:20:46 -07:00
Barbara Valdez
a6644333c0 Pin and unpin notebook smoke test (#15936)
* add pin/unpin smoketest

* fix simple search query assert
2021-06-30 22:14:18 -07:00
Vasu Bhog
7cf7ca5d15 use helper function for smoke test (#15964) 2021-06-30 21:33:54 -07:00
Barbara Valdez
0e10e3e3ca fix WYSIWYG toolbar (#15901)
* fix WYSIWYG toolbar

* add height

* remove new rule and specify css selectors
2021-06-30 17:28:11 -07:00
Karl Burtram
667cf760f0 Bump handlebars in samples (#15960) 2021-06-30 15:04:19 -07:00
Sakshi Sharma
e72ba26411 Bump Sql Db Projects extension version (#15688)
* Bump Sql Db Projects extension

* Bump azdata version for Sql Db projects

* Update package.json

Co-authored-by: Charles Gagnon <chgagnon@microsoft.com>
2021-06-30 11:03:42 -07:00
Charles Gagnon
7ce791d826 Load all data workspace projects directly from workspace (#15921)
* Load all projects directly from workspace

* fixes

* Remove relativity and fix tests

* fix compile

* PR comments

* remove unused

* distro
2021-06-30 10:58:34 -07:00
Charles Gagnon
66c1fdc457 Add Remote Book dialog smoke test (#15932)
* Add Remote Book smoke test

* fixes

* distro

* Try closing toasts

(cherry picked from commit fd53c91375f4ba73554c801be3378375b2521d31)
2021-06-30 10:51:56 -07:00
Aasim Khan
6e814d9ff0 Updating sqltoolservice (#15928) 2021-06-30 10:19:27 -07:00
Vasu Bhog
ec057ba8c1 [ADS Web] Fix Plotly notebook rendering issue (#15948)
* fix Plotly render issue
2021-06-30 09:56:09 -07:00
nasc17
3f4e7f8c36 Add and drop Postgres extensions (#15923)
* Works

* Clean up

* Aria labels added

* Pr fixes, only allow one drop at a time, check for citus

* Cleaning up refresh

* Created separate function for creating drop button

* Added with props, add comment about not able to drop citus extension

* Update url link of postgres extensions to match engine version after config is availible
2021-06-30 06:37:48 -07:00
Charles Gagnon
8ddb6ca7e8 Fix query history view actions (#15949)
* Fix query history view actions

* Update order

* update order values

* Use codicon
2021-06-29 19:56:44 -07:00
Alan Ren
aa3b4f0806 fix css selector (#15945) 2021-06-29 16:55:54 -07:00
brian-harris
1538adf5ed add aria label content to dropdown controls (#15940) 2021-06-29 15:35:48 -07:00
Karl Burtram
31e9c21c06 Fix regressions in web build (#15920)
* Update build for web issues

* Fix remote\web package.json

* Remove extension activate test

* Add terminal entry point

* Bump distro

* Update docker files and pipelines

* Add linux task

* Update build pool

* Correct image name

* Update pipelines

* Fix indention

* Update sql-web-build.yml for Azure Pipelines

Add new build variable

* Update sql-web-build.yml for Azure Pipelines

Revert previous commit

* Disable sourcemaps upload

* Remove compile step

* Fix yaml typo

* Fix dependson error

* Update yaml

* Add notebook parameters

* Remove web from desktop builds

* Add web createDrop script

* Bump distro

* Change execution mode on createDrop script

* Code review feedback

* Update CI hygiene cache key

* Bump distro

* Fix merge conflicts

* Revert "Fix merge conflicts"

This reverts commit 06f7a58b6e0a065520b7686e8469e4e7682e157a.

* Bump distro to before smoke test update
2021-06-29 12:12:27 -07:00
Charles Gagnon
10ad6f7119 Hide dropdown loading container when not used (#15934)
* Hide dropdown loading container when not used

* Remove unused
2021-06-29 11:46:12 -07:00
Alex Ma
aee24b202d Langpack refresh gulp task (#15930)
* added work in progress langpack commands

* working langpack refresh function added

* removed external extensions change.

* reverted extensions.js

* changed wording slightly

* changed wording

* added changelog and readme message

* clarified wording again

* added handling for outdated strings

* fixed wording and changed promise

* added another comment

* added have
2021-06-29 11:32:57 -07:00
Lucy Zhang
7ccbe61a1a Remove old python installation after new Python is successfully downloaded (#15931)
* remove old python after new download

* remove catch
2021-06-29 08:55:41 -07:00
Charles Gagnon
dac1bdafd7 Port VS Code 1.56.0 Hotfixes (#15912)
* fix #123040

* Switch priority of shell/shellArgs and defaultProfile setting

Fixes #123159

We were too aggressive introducing the new default profile system as many
users (as well as dev containers, microsoft/vscode-dev-containers#838) depend
on these settings for their workflow. This also includes a change in behavior
where if shellArgs are specified, they are applied to the fallback shell
(even when shell.platform isn't specified), which aligns with past behavior.

* fixes #123036 (#123263)

* fixes #123041 (#123287)

* 1.56 fix for #123044

* Do not invoke `resolveWithInteraction` over terminal settings (#123590)

* Change grunt, gulp and jake task auto detection to be off by default (#123588)

Co-authored-by: meganrogge <megrogge@gmail.com>
Co-authored-by: Daniel Imms <2193314+Tyriar@users.noreply.github.com>
Co-authored-by: João Moreno <joao.moreno@microsoft.com>
Co-authored-by: Martin Aeschlimann <martinae@microsoft.com>
Co-authored-by: Alex Dima <alexdima@microsoft.com>
2021-06-29 08:51:21 -07:00
Lucy Zhang
5fa53c3519 clean up trust notebook smoke test (#15926) 2021-06-28 15:39:29 -07:00
Charles Gagnon
a9e8885fe6 Update Mac Icons (#15911)
* Update MacOS Icons

* distro2

* Update package.json
2021-06-28 15:21:18 -07:00
Lucy Zhang
189fe762a9 remove layout call in output component init (#15905) 2021-06-28 14:48:44 -07:00
Vasu Bhog
c3e0958f2d Add simple smoke test for collapse/expand cell (#15879)
* simple smoke test for collapse / expand cells
2021-06-25 18:08:35 -07:00
Kim Santiago
edc319a90c Remove awaits that were blocking data workspace extension from loading (#15904)
* remove awaits that were blocking data workspace extension from loading

* add comment

* remove await and use then

* use return instead of await

* add notification when there's an error
2021-06-25 15:46:28 -07:00
Lucy Zhang
ff1a642157 Add jupyter server shutdown timeout setting (#15913)
* add jupyter server shutdown timeout setting

* no timeout if setting is 0

* set minimum value to 0
2021-06-25 15:16:32 -07:00
Alex Ma
7c6368b4b6 Carbon Edit Strings moved to Sql Folder (#15884)
* added first strings

* added more localized files

* translated all strings in vscode

* added exported strings to editStrings

* updated files

* more changes

* moved to base
2021-06-25 15:08:29 -07:00
Udeesha Gautam
768bf47aec adding more logs for transient failure cases in schema compare (#15900) 2021-06-25 12:00:23 -07:00
Charles Gagnon
33ff661c6f Add VS Code native New Project create flow (#15906)
* Add VS Code native New Project create flow

* Update project name title

* Ignore focus out

* comments

* ellipsis
2021-06-25 10:46:40 -07:00
Charles Gagnon
1caef2dc6e Remove duplicate extensions from browse filter (#15907)
* Remove duplicate extensions from browse filter

* Update placeholder
2021-06-24 23:55:53 -07:00
Hai Cao
3eef3f2a0b Fix missing add azure account button in accountDialog (#15852)
* fix missing add azure account button in accoundDialog

* add sql edit note

* fix other getActions overrides

* use registerAction2 for adding account button

* fix icon and cleanup

* fix big add account button

* lint
2021-06-24 17:21:11 -07:00
Lucy Zhang
0d2710341a increase jupyter server shutdown timeout (#15899) 2021-06-24 15:13:08 -07:00
Aditya Bist
a1652a5ecb fix icons getting cut off (#15898) 2021-06-24 12:44:12 -07:00
Kim Santiago
8c6be27254 fix projects notification showing when it shouldn't (#15890)
* fix projects notification showing when it shouldn't

* use path.resolve
2021-06-24 12:24:57 -07:00
Barbara Valdez
ae7e69381a Resize open book icon (#15828)
* resize icon

* rename icon and remove icon from commands
2021-06-24 09:50:50 -07:00
Charles Gagnon
0a3ed1a63a vBump arc and azdata (#15894) 2021-06-24 09:22:30 -07:00
nasc17
478a2bf64b View extensions of Postgres server group (#15887)
* Init

* Fix view

* Change strings

* Change strings edit/view

* Only view extensions

* Added loading text

* Add table label, remove try catch block, prfixes

* String add

* Take out podstatus model, add correct link
2021-06-24 09:20:22 -07:00
brian-harris
e639a94dda Dev/brih/task/assessment select first error (#15888)
* refactor assessment result display, auto-select 1st item

* add async support using setDataValues in declarativeTable

* address review feedback
2021-06-24 08:41:05 -07:00
Chris LaFreniere
f7a723d98d Notebooks A11Y: Add button role for Placeholder Links (#15851)
* Add button role

* Add aria-label

* localize aria label
2021-06-23 16:48:01 -07:00
Alan Ren
a180a6bc47 fix separator not visible issue (#15886) 2021-06-23 15:51:32 -07:00
Charles Gagnon
3a3d7f5271 withProperties -> withProps (#15876)
* withProperties -> withProps

* Fix errors

* remove ,

* fixes

* Update azdata-test

* Fix dacpac tests

* Add required and remove added layout
2021-06-23 14:26:14 -07:00
Alan Ren
e21f56d719 open query editor as default editor (#15874) 2021-06-22 23:53:55 -07:00
Charles Gagnon
4707c1601c Handle no azdata API in sql db proj extension gracefully (#15873)
* Update db proj ext to handle not having azdata API available

* Fixes

* Fix compile
2021-06-22 23:34:01 -07:00
Lucy Zhang
c636e24d03 Add simple notebook code cell smoke test (#15870)
* simple code cell test

* change test order

* fix select all text keybinding
2021-06-22 20:31:53 -07:00
Lucy Zhang
3634110974 fix python dialog selectors (#15866) 2021-06-22 17:03:44 -07:00
Cory Rivera
6c6a0506b2 Add Markdown Toolbar functionality to notebook smoke tests. (#15868) 2021-06-22 16:38:04 -07:00
Charles Gagnon
00361e52a2 Handle no azdata API in data-workspace extension gracefully (#15871) 2021-06-22 16:35:20 -07:00
Alan Ren
1e2cb1cdf9 increase the timeout for compile step (#15872)
give it 30 minutes more to see whether the compile step can finish within reasonable extra time.
2021-06-22 15:32:53 -07:00
Charles Gagnon
ec2f2b19fb Wait for SqlToolsService to start before running smoke tests (#15864) 2021-06-22 13:05:21 -07:00
Alex Ma
1dc4a437eb Export-XLF function for ADS (#15709)
* wip commit

* added function to generate XLFs for ADS

* code cleanup

* removed unnecessary locfunc

* updated extensions and locfunc

* changed wording to be more clear

* added working single extension compile

* added export all extensions

* added more comments and closing sql carbon edit braces.

* consolidated gulpfile.extensions changes

* changed name to remove ADS part.

* changed gulpfile name use
2021-06-22 09:57:15 -07:00
Charles Gagnon
49562239a0 Add create content folder test (#15849)
* Add create book from content folder test

* Remove hardcoded
2021-06-22 08:56:50 -07:00
Alan Ren
a35f8ffc52 fix extension view issues (#15850) 2021-06-21 18:07:48 -07:00
Charles Gagnon
999acca745 Fix create book dialog smoke test (#15848) 2021-06-21 16:28:05 -07:00
Maddy
90b90afeab remove images from metadata on source update (#15640)
* remove images from metadata on update

* update comment

* reset only on html update

* add comment

* fixes

Co-authored-by: chgagnon <chgagnon@microsoft.com>
2021-06-21 13:55:37 -07:00
Charles Gagnon
adbf40859f Fix smoke tests (#15847) 2021-06-21 12:53:09 -07:00
nasc17
3cc8d29821 Per role configuration customization when creating PG server group (#15779)
* Init

* Per role create calls in notebook

* Fixed info

* Change order and strings

* Fixed string

* Seperate strings from coordinator and workers
2021-06-21 11:05:33 -07:00
nasc17
248e5aa40d change name (#15845) 2021-06-21 11:05:13 -07:00
Alan Ren
0e1a8ab8ea support menu column (#15754)
* support menu column

* comments
2021-06-21 10:36:55 -07:00
Charles Gagnon
22dcf7777c Fix view action icon sizes (#15834) 2021-06-21 09:59:53 -07:00
Charles Gagnon
207312bd96 Remove unused workflows (#15782) 2021-06-19 11:49:13 -07:00
Alan Ren
acfa5e1d1b fix insights widget issue (#15827) 2021-06-18 23:12:06 -07:00
Charles Gagnon
38edcae32c Update VS Code version to match latest merge sync point (#15830) 2021-06-18 22:57:25 -07:00
Alan Ren
03f31a865a use more specific css selectors (#15826) 2021-06-18 18:33:25 -07:00
Cory Rivera
d92c1d5ca8 Add max size setting for Rich Text undo history in notebooks. (#15793) 2021-06-18 16:04:45 -07:00
Hai Cao
2eab870ca2 remove "Edit Data" menu option for SqlDataWarehouse (#15816) 2021-06-18 13:34:59 -07:00
Vasu Bhog
b8dbd3f0dc do not prompt for notebook uri file (#15809) 2021-06-18 13:07:08 -07:00
Chris LaFreniere
06e7d4d489 Let parent control width for text cell preview (#15800) 2021-06-18 12:51:58 -07:00
brian-harris
d08182416b add feedback button, open report issue dialog (#15762) 2021-06-18 12:22:41 -07:00
Alan Ren
aeda95bb70 fix race condition in declarative table (#15801) 2021-06-18 11:40:19 -07:00
chayaverma7
4c2f6eafe0 Update package.json (#15817)
Update ADS Version after Release
2021-06-18 23:26:50 +05:30
Aasim Khan
488ccea731 Improvements in blob storage support for SQL Migration. (#15693)
* changing the cutover icon on migration cutover page.

* Fixing monitoring table and pending log backups

* converting file upload times in utc to local time zones

* adding autorefresh to dashboard, migration status and cutover dialogs.

* Supporting blob container e2e

* vbump extension

* Fixing some PR comments

* Fixed broken blob container dropdown onChange event

* Localizing display string in refresh dialog
Fixing some localized strings

* Fixing var declaration

* making a class readonly for 250px width

* removing refresh interval dialog and replacing it with hardcoded values.

* Fixing summary page IR information.

* surfacing test connection error

* Clearing intervals on view closed to remove auto refresh.
2021-06-17 22:19:42 -07:00
Charles Gagnon
35832e83da Add required field to kube config picker (#15795) 2021-06-17 19:38:38 -07:00
Charles Gagnon
efd752aafe Add aria labels to buttons and spin inputs on Arc dashboards (#15798) 2021-06-17 19:38:27 -07:00
dependabot[bot]
df296939a8 Bump normalize-url from 4.5.0 to 4.5.1 in /build (#15777)
Bumps [normalize-url](https://github.com/sindresorhus/normalize-url) from 4.5.0 to 4.5.1.
- [Release notes](https://github.com/sindresorhus/normalize-url/releases)
- [Commits](https://github.com/sindresorhus/normalize-url/commits)

---
updated-dependencies:
- dependency-name: normalize-url
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2021-06-17 14:58:31 -07:00
Charles Gagnon
d96835c083 Fix secondary actions for Notebook editor toolbar (#15788) 2021-06-17 14:52:36 -07:00
Charles Gagnon
331f8115dc Fix input validation alert (#15749)
* Fix input validation alert

* Add override
2021-06-17 13:47:53 -07:00
Charles Gagnon
d871efc079 Fix SQL taskbar icons (#15786) 2021-06-17 12:47:44 -07:00
Alex Ma
2ec6a689d3 added updates to extensions for sqltoolsservice version (#15745) 2021-06-17 11:12:49 -07:00
Charles Gagnon
cae598d36c Correctly announce loading state for arc cluster context loading (#15747)
* Correctly announce loading state for arc cluster context loading

* localize

* Fix compile

* Fix test
2021-06-17 10:28:12 -07:00
chayaverma7
7c3a7e2646 Update CHANGELOG.md (#15778)
* Update CHANGELOG.md

* Update README.md
2021-06-17 21:45:29 +05:30
Charles Gagnon
3cb2f552a6 Merge from vscode a348d103d1256a06a2c9b3f9b406298a9fef6898 (#15681)
* Merge from vscode a348d103d1256a06a2c9b3f9b406298a9fef6898

* Fixes and cleanup

* Distro

* Fix hygiene yarn

* delete no yarn lock changes file

* Fix hygiene

* Fix layer check

* Fix CI

* Skip lib checks

* Remove tests deleted in vs code

* Fix tests

* Distro

* Fix tests and add removed extension point

* Skip failing notebook tests for now

* Disable broken tests and cleanup build folder

* Update yarn.lock and fix smoke tests

* Bump sqlite

* fix contributed actions and file spacing

* Fix user data path

* Update yarn.locks

Co-authored-by: ADS Merger <karlb@microsoft.com>
2021-06-17 08:17:11 -07:00
Christopher Suh
fdcb97c7f7 bump sts version (#15744) 2021-06-16 15:43:11 -07:00
Charles Gagnon
1231be124a Add aria-label to kube config input box (#15743) 2021-06-16 12:22:00 -07:00
Charles Gagnon
be1ff8e37b Add smoke test for Create Book dialog (#15714)
* Add smoke test for Create Book dialog

* Add comment

* Update distro

* update distro 2

* distro
2021-06-16 10:54:04 -07:00
Lucy Zhang
4c039f7a88 Re-enable notebook text cell smoke test (#15720)
* re-enable text cell test

* close notebook after text cell test
2021-06-16 06:20:21 -07:00
Alan Ren
3315214435 fix more issues related to query cancel (#15727)
* fix more issues related to query cancel

* comments
2021-06-15 21:10:41 -07:00
Aditya Bist
e4e703151f Update chokidar (#15726)
* update chokidar

* add yarn lock files
2021-06-15 17:29:00 -07:00
Alan Ren
5bd1552a59 add null check for errors in the console while using query editor (#15711)
* add null check

* another null check

* pr comments

* update typing
2021-06-15 16:05:38 -07:00
Alan Ren
a8f3d97f59 allow extension to select row in declarative table (#15703)
* allow extension to select row in declarative table

* pr comments

* use proper type for the event.

* update event arg
2021-06-15 15:41:37 -07:00
Lucy Zhang
24cd2106a1 make python installation more stable (#15719) 2021-06-15 14:08:55 -07:00
Charles Gagnon
6c23007a79 Clarify filtering instructions (#15724) 2021-06-15 12:42:37 -07:00
Charles Gagnon
ebe24046de Log extension host commands (#15721)
* Log extension host commands

* Add more comments
2021-06-15 12:10:09 -07:00
Charles Gagnon
c9850fe59f Remove en-us from links (#15722) 2021-06-15 11:51:35 -07:00
Charles Gagnon
db3a0ef7ac Trim pathname for link callout file paths (#15716)
* Don't use pathname for link URLs

* Trim pathname instead
2021-06-15 09:52:27 -07:00
Charles Gagnon
6a7e1dfd27 Fix create book dialog clearing input boxes on browse cancel (#15713) 2021-06-14 17:47:30 -07:00
nasc17
902b5dc1fa Changed default 2 (#15701) 2021-06-14 10:40:48 -07:00
Charles Gagnon
b8509d8b7c Fix dacpac utils function (#15702) 2021-06-14 08:38:29 -07:00
Alex Ma
b413de153c Localization option added to Kusto and MSSQL (#15674)
* added environment language need to test

* added environment language, need to test if it works

* added locale change to code

* added Kusto localization for its ServiceLayer

* removed extra space

* removed console log and moved onto one line

* removed quotes
2021-06-11 12:35:18 -07:00
dependabot[bot]
ddc6b2fdd8 Bump sanitize-html from 1.20.0 to 2.3.2 (#15345)
* Bump sanitize-html from 1.20.0 to 2.3.2

Bumps [sanitize-html](https://github.com/apostrophecms/sanitize-html) from 1.20.0 to 2.3.2.
- [Release notes](https://github.com/apostrophecms/sanitize-html/releases)
- [Changelog](https://github.com/apostrophecms/sanitize-html/blob/main/CHANGELOG.md)
- [Commits](https://github.com/apostrophecms/sanitize-html/compare/1.20.0...2.3.2)

Signed-off-by: dependabot[bot] <support@github.com>

* update sanitize-html in remote

* fix conflict

* update sanitize-html in remote/web

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Lucy Zhang <luczhan@microsoft.com>
2021-06-10 13:54:07 -07:00
Alan Ren
eea8c61b6e add config for open after save behavior (#15675) 2021-06-10 13:32:05 -07:00
brian-harris
922d1287ac Add DMS service info dialog link to DMS name column in migration status (#15669)
* SQL Migrtation service status dialog

* implement auth key refresh

* fix method name typo
2021-06-10 10:53:08 -07:00
dependabot[bot]
87b8f1a1aa Bump eslint-utils from 1.3.1 to 1.4.3 (#15673)
Bumps [eslint-utils](https://github.com/mysticatea/eslint-utils) from 1.3.1 to 1.4.3.
- [Release notes](https://github.com/mysticatea/eslint-utils/releases)
- [Commits](https://github.com/mysticatea/eslint-utils/compare/v1.3.1...v1.4.3)

---
updated-dependencies:
- dependency-name: eslint-utils
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2021-06-10 10:45:32 -07:00
dependabot[bot]
05f23fdd5e Bump y18n from 3.2.1 to 3.2.2 (#15667)
Bumps [y18n](https://github.com/yargs/y18n) from 3.2.1 to 3.2.2.
- [Release notes](https://github.com/yargs/y18n/releases)
- [Changelog](https://github.com/yargs/y18n/blob/master/CHANGELOG.md)
- [Commits](https://github.com/yargs/y18n/commits)

---
updated-dependencies:
- dependency-name: y18n
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2021-06-10 09:21:07 -07:00
dependabot[bot]
69a990469d Bump ssri from 6.0.1 to 6.0.2 (#15666)
Bumps [ssri](https://github.com/npm/ssri) from 6.0.1 to 6.0.2.
- [Release notes](https://github.com/npm/ssri/releases)
- [Changelog](https://github.com/npm/ssri/blob/v6.0.2/CHANGELOG.md)
- [Commits](https://github.com/npm/ssri/compare/v6.0.1...v6.0.2)

---
updated-dependencies:
- dependency-name: ssri
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2021-06-10 09:19:19 -07:00
dependabot[bot]
6506c9ca1f Bump yargs-parser from 5.0.0 to 5.0.1 (#15668)
Bumps [yargs-parser](https://github.com/yargs/yargs-parser) from 5.0.0 to 5.0.1.
- [Release notes](https://github.com/yargs/yargs-parser/releases)
- [Changelog](https://github.com/yargs/yargs-parser/blob/v5.0.1/CHANGELOG.md)
- [Commits](https://github.com/yargs/yargs-parser/compare/v5.0.0...v5.0.1)

---
updated-dependencies:
- dependency-name: yargs-parser
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2021-06-10 09:17:56 -07:00
Alex Ma
5374129350 fix for portuguese (#15665) 2021-06-09 15:50:28 -07:00
Charles Gagnon
c8ef4ee47a Promote updated width parameter for createModelViewDialog to stable (#15648) 2021-06-09 12:28:12 -07:00
brian-harris
7256bff094 fix findDropDownItemIndex bug (#15642) 2021-06-09 12:11:19 -07:00
Leila Lali
95704dc45e ML extension version bump (#15407) 2021-06-09 11:56:13 -07:00
dependabot[bot]
75dd19bb82 Bump glob-parent from 5.1.1 to 5.1.2 in /extensions/notebook (#15645)
Bumps [glob-parent](https://github.com/gulpjs/glob-parent) from 5.1.1 to 5.1.2.
- [Release notes](https://github.com/gulpjs/glob-parent/releases)
- [Changelog](https://github.com/gulpjs/glob-parent/blob/main/CHANGELOG.md)
- [Commits](https://github.com/gulpjs/glob-parent/compare/v5.1.1...v5.1.2)

---
updated-dependencies:
- dependency-name: glob-parent
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2021-06-09 09:28:40 -07:00
dependabot[bot]
ecd4bd6864 Bump glob-parent from 5.1.0 to 5.1.2 in /remote (#15644)
Bumps [glob-parent](https://github.com/gulpjs/glob-parent) from 5.1.0 to 5.1.2.
- [Release notes](https://github.com/gulpjs/glob-parent/releases)
- [Changelog](https://github.com/gulpjs/glob-parent/blob/main/CHANGELOG.md)
- [Commits](https://github.com/gulpjs/glob-parent/compare/v5.1.0...v5.1.2)

---
updated-dependencies:
- dependency-name: glob-parent
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2021-06-09 09:28:27 -07:00
dependabot[bot]
a2d348ccc4 Bump glob-parent from 5.1.1 to 5.1.2 in /build/lib/watch (#15646)
Bumps [glob-parent](https://github.com/gulpjs/glob-parent) from 5.1.1 to 5.1.2.
- [Release notes](https://github.com/gulpjs/glob-parent/releases)
- [Changelog](https://github.com/gulpjs/glob-parent/blob/main/CHANGELOG.md)
- [Commits](https://github.com/gulpjs/glob-parent/compare/v5.1.1...v5.1.2)

---
updated-dependencies:
- dependency-name: glob-parent
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>

Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2021-06-09 09:27:22 -07:00
Alan Ren
d91660b66f fix the input box validation (#15634)
* fix the input box validation

* fix one more place
2021-06-08 14:24:50 -07:00
Cory Rivera
a61462a2c0 Improve Undo and Redo functionality for notebook Rich Text editors (#15627) 2021-06-08 13:24:58 -07:00
Lucy Zhang
d6d2e7dc08 filter out prose-accelerator (#15633) 2021-06-08 11:48:57 -07:00
brian-harris
d8b693341e Dev/brih/task/make dropdowns searchable (#15615)
* make dropdown controls editable, searchable

* updte method name and return type

* update error message, and dropdown index selection logic

* address review feedback
2021-06-07 17:55:08 -07:00
3391 changed files with 285907 additions and 201794 deletions

View File

@@ -32,17 +32,17 @@ Next: **[Try it out!](#try-it)**
## Quick start - GitHub Codespaces
> **IMPORTANT:** The current free user beta for GitHub Codespaces uses a "Basic" sized codespace which does not have enough RAM to run a full build of VS Code and will be considerably slower during codespace start and running VS Code. You'll soon be able to use a "Standard" sized codespace (4-core, 8GB) that will be better suited for this purpose (along with even larger sizes should you need it).
> **IMPORTANT:** You need to use a "Standard" sized codespace or larger (4-core, 8GB) since VS Code needs 6GB of RAM to compile. This is now the default for GitHub Codespaces, but do not downgrade to "Basic" unless you do not intend to compile.
1. From the [microsoft/vscode GitHub repository](https://github.com/microsoft/vscode), click on the **Code** dropdown, select **Open with Codespaces**, and the **New codespace**
> Note that you will not see these options if you are not in the beta yet.
2. After the codespace is up and running in your browser, press <kbd>Ctrl/Cmd</kbd> + <kbd>Shift</kbd> + <kbd>P</kbd> and select **View: Show Remote Explorer**.
2. After the codespace is up and running in your browser, press <kbd>F1</kbd> and select **Ports: Focus on Ports View**.
3. You should see port `6080` under **Forwarded Ports**. Select the line and click on the globe icon to open it in a browser tab.
> If you do not see port `6080`, press <kbd>Ctrl/Cmd</kbd> + <kbd>Shift</kbd> + <kbd>P</kbd>, select **Forward a Port** and enter port `6080`.
> If you do not see port `6080`, press <kbd>F1</kbd>, select **Forward a Port** and enter port `6080`.
4. In the new tab, you should see noVNC. Click **Connect** and enter `vscode` as the password.
@@ -58,7 +58,7 @@ You will likely see better performance when accessing the codespace you created
2. Set up [VS Code for use with GitHub Codespaces](https://docs.github.com/github/developing-online-with-codespaces/using-codespaces-in-visual-studio-code)
3. After the VS Code is up and running, press <kbd>Ctrl/Cmd</kbd> + <kbd>Shift</kbd> + <kbd>P</kbd>, choose **Codespaces: Connect to Codespace**, and select the codespace you created.
3. After the VS Code is up and running, press <kbd>F1</kbd>, choose **Codespaces: Connect to Codespace**, and select the codespace you created.
4. After you've connected to the codespace, use a [VNC Viewer](https://www.realvnc.com/en/connect/download/viewer/) to connect to `localhost:5901` and enter `vscode` as the password.

View File

@@ -8,7 +8,7 @@ set -e
SCRIPT_PATH="$(cd "$(dirname $0)" && pwd)"
CONTAINER_IMAGE_REPOSITORY="$1"
BRANCH="${2:-"master"}"
BRANCH="${2:-"main"}"
if [ "${CONTAINER_IMAGE_REPOSITORY}" = "" ]; then
echo "Container repository not specified!"

View File

@@ -2,7 +2,7 @@
"name": "Code - OSS",
// Image contents: https://github.com/microsoft/vscode-dev-containers/blob/master/repository-containers/images/github.com/microsoft/vscode/.devcontainer/base.Dockerfile
"image": "mcr.microsoft.com/vscode/devcontainers/repos/microsoft/vscode:branch-master",
"image": "mcr.microsoft.com/vscode/devcontainers/repos/microsoft/vscode:branch-main",
"workspaceMount": "source=${localWorkspaceFolder},target=/home/node/workspace/vscode,type=bind,consistency=cached",
"workspaceFolder": "/home/node/workspace/vscode",

View File

@@ -13,5 +13,6 @@
**/extensions/**/out/**
**/extensions/**/build/**
**/extensions/markdown-language-features/media/**
**/extensions/markdown-language-features/notebook-out/**
**/extensions/typescript-basics/test/colorize-fixtures/**
**/extensions/**/dist/**

File diff suppressed because it is too large Load Diff

View File

@@ -8,12 +8,18 @@ assignees: ''
---
<!-- ⚠️⚠️ Do Not Delete This! bug_report_template ⚠️⚠️ -->
<!-- Please read our Rules of Conduct: https://opensource.microsoft.com/codeofconduct/ -->
<!-- Please search existing issues to avoid creating duplicates. -->
<!-- Also please test using the latest insiders build to make sure your issue has not already been fixed. -->
<!-- Use Help > Report Issue to prefill these. -->
<!-- 🔎 Search existing issues to avoid creating duplicates. -->
<!-- 🧪 Test using the latest Insiders build to see if your issue has already been fixed: https://github.com/Microsoft/azuredatastudio#try-out-the-latest-insiders-build-from-main -->
<!-- 💡 Instead of creating your report here, use 'Report Issue' from the 'Help' menu in Azure Data Studio to pre-fill useful information. -->
- Azure Data Studio Version:
- OS Version:
Steps to Reproduce:
1.
2.
<!-- 🔧 Launch with `azuredatastudio --disable-extensions` to check. -->
Does this issue occur when all extensions are disabled?: Yes/No
<!-- 📣 Issues caused by an extension need to be reported directly to the extension publisher. The 'Help > Report Issue' dialog can assist with this. -->

View File

@@ -1,35 +0,0 @@
name: "Build Chat"
on:
workflow_run:
workflows:
- CI
types:
- completed
branches:
- master
- release/*
jobs:
main:
runs-on: ubuntu-latest
steps:
- name: Checkout Actions
uses: actions/checkout@v2
with:
repository: "microsoft/vscode-github-triage-actions"
path: ./actions
- name: Install Actions
run: npm install --production --prefix ./actions
- name: Install Additional Dependencies
# Pulls in a bunch of other packages that arent needed for the rest of the actions
run: npm install @azure/storage-blob@12.1.1
- name: Build Chat
uses: ./actions/build-chat
with:
token: ${{ secrets.GITHUB_TOKEN }}
slack_token: ${{ secrets.SLACK_TOKEN }}
storage_connection_string: ${{ secrets.BUILD_CHAT_STORAGE_CONNECTION_STRING }}
workflow_run_url: ${{ github.event.workflow_run.url }}
notification_channel: build
log_channel: bot-log

View File

@@ -11,161 +11,296 @@ on:
- release/*
jobs:
windows:
name: Windows
runs-on: windows-latest
timeout-minutes: 30
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v2.2.0
- uses: actions/setup-node@v2
with:
node-version: 12
- uses: actions/setup-python@v2
with:
python-version: "2.x"
# {{SQL CARBON EDIT}} Skip caching for now
# - name: Compute node modules cache key
# id: nodeModulesCacheKey
# run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
# - name: Cache node_modules archive
# id: cacheNodeModules
# uses: actions/cache@v2
# with:
# path: ".build/node_modules_cache"
# key: "${{ runner.os }}-cacheNodeModulesArchive-${{ steps.nodeModulesCacheKey.outputs.value }}"
# - name: Extract node_modules archive
# if: ${{ steps.cacheNodeModules.outputs.cache-hit == 'true' }}
# run: 7z.exe x .build/node_modules_cache/cache.7z -aos
# - name: Get yarn cache directory path
# id: yarnCacheDirPath
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# run: echo "::set-output name=dir::$(yarn cache dir)"
# - name: Cache yarn directory
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# uses: actions/cache@v2
# with:
# path: ${{ steps.yarnCacheDirPath.outputs.dir }}
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-yarnCacheDir-
- name: Execute yarn
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} {{SQL CARBON EDIT}} Skipping caching for now
env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
run: yarn --frozen-lockfile --network-timeout 180000
# - name: Create node_modules archive {{SQL CARBON EDIT}} Skip caching for now
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# run: |
# mkdir -Force .build
# node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
# mkdir -Force .build/node_modules_cache
# 7z.exe a .build/node_modules_cache/cache.7z -mx3 `@.build/node_modules_list.txt
- name: Compile and Download
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" # {{SQL CARBON EDIT}} Remove unused options playwright-install download-builtin-extensions
- name: Run Unit Tests (Electron)
run: .\scripts\test.bat
# - name: Run Unit Tests (Browser) {{SQL CARBON EDIT}} disable for now
# run: yarn test-browser --browser chromium
# - name: Run Integration Tests (Electron) {{SQL CARBON EDIT}} disable for now
# run: .\scripts\test-integration.bat
linux:
name: Linux
runs-on: ubuntu-latest
timeout-minutes: 30
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v2.2.0
# TODO: rename azure-pipelines/linux/xvfb.init to github-actions
- run: |
- name: Setup Build Environment
run: |
sudo apt-get update
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libkrb5-dev # {{SQL CARBON EDIT}} add kerberos dep
sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libgbm1 libkrb5-dev # {{SQL CARBON EDIT}} add kerberos dep
sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
sudo chmod +x /etc/init.d/xvfb
sudo update-rc.d xvfb defaults
sudo service xvfb start
name: Setup Build Environment
- uses: actions/setup-node@v1
- uses: actions/setup-node@v2
with:
node-version: 12
# TODO: cache node modules
# Increase timeout to get around latency issues when fetching certain packages
- run: |
yarn config set network-timeout 300000
yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron x64
name: Download Electron
- run: yarn gulp hygiene
name: Run Hygiene Checks
- run: yarn strict-vscode # {{SQL CARBON EDIT}} add step
name: Run Strict Compile Options
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
# name: Run Monaco Editor Checks
- run: yarn valid-layers-check
name: Run Valid Layers Checks
- run: yarn compile
name: Compile Sources
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# name: Download Built-in Extensions
- run: DISPLAY=:10 ./scripts/test.sh --tfs "Unit Tests" --coverage --runGlob "**/sql/**/*.test.js"
name: Run Unit Tests (Electron)
- run: DISPLAY=:10 ./scripts/test-extensions-unit.sh
name: Run Extension Unit Tests (Electron)
# {{SQL CARBON EDIT}} Add coveralls. We merge first to get around issue where parallel builds weren't being combined correctly
- run: node test/combineCoverage
name: Combine code coverage files
# {{SQL CARBON EDIT}} Skip caching for now
# - name: Compute node modules cache key
# id: nodeModulesCacheKey
# run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
# - name: Cache node modules
# id: cacheNodeModules
# uses: actions/cache@v2
# with:
# path: "**/node_modules"
# key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-cacheNodeModules13-
# - name: Get yarn cache directory path
# id: yarnCacheDirPath
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# run: echo "::set-output name=dir::$(yarn cache dir)"
# - name: Cache yarn directory
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# uses: actions/cache@v2
# with:
# path: ${{ steps.yarnCacheDirPath.outputs.dir }}
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-yarnCacheDir-
- name: Execute yarn
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }} {{SQL CARBON EDIT}} Skip caching for now
env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
run: yarn --frozen-lockfile --network-timeout 180000
- name: Compile and Download
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" playwright-install download-builtin-extensions
- name: Run Unit Tests (Electron)
id: electron-unit-tests
run: DISPLAY=:10 ./scripts/test.sh --coverage --runGlob "**/sql/**/*.test.js" # {{SQL CARBON EDIT}} Run only our tests with coverage
- name: Run Extension Unit Tests (Electron)
id: electron-extension-unit-tests
run: DISPLAY=:10 ./scripts/test-extensions-unit.sh
# {{SQL CARBON EDIT}} Add coveralls. We merge first to get around issue where parallel builds weren't being combined correctly
- name: Combine code coverage files
run: node test/combineCoverage
- name: Upload Code Coverage
uses: coverallsapp/github-action@v1.1.1
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
path-to-lcov: "test/coverage/lcov.info"
# Fails with cryptic error (e.g. https://github.com/microsoft/vscode/pull/90292/checks?check_run_id=433681926#step:13:9)
# - run: DISPLAY=:10 yarn test-browser --browser chromium
# name: Run Unit Tests (Browser)
# - run: DISPLAY=:10 ./scripts/test-integration.sh --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
# name: Run Integration Tests (Electron)
# - name: Run Unit Tests (Browser) {{SQL CARBON EDIT}} Skip for now
# id: browser-unit-tests
# run: DISPLAY=:10 yarn test-browser --browser chromium
windows:
runs-on: windows-2016
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v2.2.0
- uses: actions/setup-node@v1
with:
node-version: 12
- uses: actions/setup-python@v1
with:
python-version: "2.x"
# Increase timeout to get around latency issues when fetching certain packages
- run: |
yarn config set network-timeout 300000
yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron
name: Download Electron
- run: yarn gulp hygiene
name: Run Hygiene Checks
- run: yarn strict-vscode # {{SQL CARBON EDIT}} add step
name: Run Strict Compile Options
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
# name: Run Monaco Editor Checks
- run: yarn valid-layers-check
name: Run Valid Layers Checks
- run: yarn compile
name: Compile Sources
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# name: Download Built-in Extensions
- run: .\scripts\test.bat --tfs "Unit Tests"
name: Run Unit Tests (Electron)
# - run: yarn test-browser --browser chromium {{SQL CARBON EDIT}} disable for now @TODO @anthonydresser
# name: Run Unit Tests (Browser)
# - run: .\scripts\test-integration.bat --tfs "Integration Tests" {{SQL CARBON EDIT}} remove step
# name: Run Integration Tests (Electron)
# - name: Run Integration Tests (Electron) {{SQL CARBON EDIT}} Skip for now
# id: electron-integration-tests
# run: DISPLAY=:10 ./scripts/test-integration.sh
darwin:
name: macOS
runs-on: macos-latest
timeout-minutes: 30
env:
CHILD_CONCURRENCY: "1"
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v2.2.0
- uses: actions/setup-node@v1
- uses: actions/setup-node@v2
with:
node-version: 12
# Increase timeout to get around latency issues when fetching certain packages
- run: |
yarn config set network-timeout 300000
yarn --frozen-lockfile
name: Install Dependencies
- run: yarn electron x64
name: Download Electron
- run: yarn gulp hygiene
name: Run Hygiene Checks
- run: yarn strict-vscode # {{SQL CARBON EDIT}} add step
name: Run Strict Compile Options
# - run: yarn monaco-compile-check {{SQL CARBON EDIT}} remove step
# name: Run Monaco Editor Checks
- run: yarn valid-layers-check
name: Run Valid Layers Checks
- run: yarn compile
name: Compile Sources
# - run: yarn download-builtin-extensions {{SQL CARBON EDIT}} remove step
# name: Download Built-in Extensions
- run: ./scripts/test.sh --tfs "Unit Tests"
name: Run Unit Tests (Electron)
# - run: yarn test-browser --browser chromium --browser webkit
# name: Run Unit Tests (Browser)
# - run: ./scripts/test-integration.sh --tfs "Integration Tests"
# name: Run Integration Tests (Electron)
# monaco:
# runs-on: ubuntu-latest
# env:
# CHILD_CONCURRENCY: "1"
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# steps:
# - uses: actions/checkout@v2.2.0
# # TODO: rename azure-pipelines/linux/xvfb.init to github-actions
# - run: |
# sudo apt-get update
# sudo apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 libgbm1
# sudo cp build/azure-pipelines/linux/xvfb.init /etc/init.d/xvfb
# sudo chmod +x /etc/init.d/xvfb
# sudo update-rc.d xvfb defaults
# sudo service xvfb start
# name: Setup Build Environment
# - uses: actions/setup-node@v1
# with:
# node-version: 10
# - run: yarn --frozen-lockfile
# name: Install Dependencies
# - run: yarn monaco-compile-check
# name: Run Monaco Editor Checks
# - run: yarn gulp editor-esm-bundle
# name: Editor Distro & ESM Bundle
# {{SQL CARBON EDIT}} Skip caching for now
# - name: Compute node modules cache key
# id: nodeModulesCacheKey
# run: echo "::set-output name=value::$(node build/azure-pipelines/common/computeNodeModulesCacheKey.js)"
# - name: Cache node modules
# id: cacheNodeModules
# uses: actions/cache@v2
# with:
# path: "**/node_modules"
# key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-cacheNodeModules13-
# - name: Get yarn cache directory path
# id: yarnCacheDirPath
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# run: echo "::set-output name=dir::$(yarn cache dir)"
# - name: Cache yarn directory
# if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
# uses: actions/cache@v2
# with:
# path: ${{ steps.yarnCacheDirPath.outputs.dir }}
# key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
# restore-keys: ${{ runner.os }}-yarnCacheDir-
- name: Execute yarn
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
run: yarn --frozen-lockfile --network-timeout 180000
- name: Compile and Download
run: yarn npm-run-all --max_old_space_size=4095 -lp compile "electron x64" playwright-install download-builtin-extensions
- name: Run Unit Tests (Electron)
run: DISPLAY=:10 ./scripts/test.sh
# - name: Run Unit Tests (Browser) {{SQL CARBON EDIT}} Skip for now
# run: DISPLAY=:10 yarn test-browser --browser chromium
# - name: Run Integration Tests (Electron) {{SQL CARBON EDIT}} Skip for now
# run: DISPLAY=:10 ./scripts/test-integration.sh
hygiene:
name: Hygiene and Layering
runs-on: ubuntu-latest
timeout-minutes: 30
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
steps:
- uses: actions/checkout@v2
- uses: actions/setup-node@v2
with:
node-version: 12
- name: Compute node modules cache key
id: nodeModulesCacheKey
run: echo "::set-output name=value::$(node build/azure-pipelines/common/sql-computeNodeModulesCacheKey.js)"
- name: Cache node modules
id: cacheNodeModules
uses: actions/cache@v2
with:
path: "**/node_modules"
key: ${{ runner.os }}-cacheNodeModules13-${{ steps.nodeModulesCacheKey.outputs.value }}
restore-keys: ${{ runner.os }}-cacheNodeModules13-
- name: Get yarn cache directory path
id: yarnCacheDirPath
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
run: echo "::set-output name=dir::$(yarn cache dir)"
- name: Cache yarn directory
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
uses: actions/cache@v2
with:
path: ${{ steps.yarnCacheDirPath.outputs.dir }}
key: ${{ runner.os }}-yarnCacheDir-${{ steps.nodeModulesCacheKey.outputs.value }}
restore-keys: ${{ runner.os }}-yarnCacheDir-
- name: Setup Build Environment # {{SQL CARBON EDIT}} Add step to install required packages if we need to run yarn
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
run: |
sudo apt-get update
sudo apt-get install -y libkrb5-dev
- name: Execute yarn
if: ${{ steps.cacheNodeModules.outputs.cache-hit != 'true' }}
env:
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
run: yarn --frozen-lockfile --network-timeout 180000
- name: Run Hygiene Checks
run: yarn gulp hygiene
- name: Run Valid Layers Checks
run: yarn valid-layers-check
- name: Run Strict Compile Options # {{SQL CARBON EDIT}} add step
run: yarn strict-vscode
# - name: Run Monaco Editor Checks {{SQL CARBON EDIT}} Remove Monaco checks
# run: yarn monaco-compile-check
- name: Run Trusted Types Checks
run: yarn tsec-compile-check
# - name: Editor Distro & ESM Bundle {{SQL CARBON EDIT}} Remove Monaco checks
# run: yarn gulp editor-esm-bundle
# - name: Typings validation prep {{SQL CARBON EDIT}} Remove Monaco checks
# run: |
# mkdir typings-test
# - name: Typings validation {{SQL CARBON EDIT}} Remove Monaco checks
# working-directory: ./typings-test
# run: |
# yarn init -yp
# ../node_modules/.bin/tsc --init
# echo "import '../out-monaco-editor-core';" > a.ts
# ../node_modules/.bin/tsc --noEmit
# - name: Webpack Editor {{SQL CARBON EDIT}} Remove Monaco checks
# working-directory: ./test/monaco
# run: yarn run bundle
# - name: Compile Editor Tests {{SQL CARBON EDIT}} Remove Monaco checks
# working-directory: ./test/monaco
# run: yarn run compile
# - name: Download Playwright {{SQL CARBON EDIT}} Remove Monaco checks
# run: yarn playwright-install
# - name: Run Editor Tests {{SQL CARBON EDIT}} Remove Monaco checks
# timeout-minutes: 5
# working-directory: ./test/monaco
# run: yarn test

View File

@@ -1,44 +0,0 @@
name: "Code Scanning - Action"
on:
push:
schedule:
- cron: "0 0 * * 0"
jobs:
CodeQL-Build:
strategy:
fail-fast: false
# CodeQL runs on ubuntu-latest, windows-latest, and macos-latest
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
# Override language selection by uncommenting this and choosing your languages
# with:
# languages: go, javascript, csharp, python, cpp, java
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below).
- name: Autobuild
uses: github/codeql-action/autobuild@v1
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1

View File

@@ -1,50 +0,0 @@
name: "Deep Classifier: Runner"
on:
schedule:
- cron: 0 * * * *
repository_dispatch:
types: [trigger-deep-classifier-runner]
jobs:
main:
runs-on: ubuntu-latest
steps:
- name: Checkout Actions
uses: actions/checkout@v2
with:
repository: "microsoft/vscode-github-triage-actions"
ref: v42
path: ./actions
- name: Install Actions
run: npm install --production --prefix ./actions
- name: Install Additional Dependencies
# Pulls in a bunch of other packages that arent needed for the rest of the actions
run: npm install @azure/storage-blob@12.1.1
- name: "Run Classifier: Scraper"
uses: ./actions/classifier-deep/apply/fetch-sources
with:
# slightly overlapping to protect against issues slipping through the cracks if a run is delayed
from: 80
until: 5
configPath: classifier
blobContainerName: vscode-issue-classifier
blobStorageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
- name: Set up Python 3.7
uses: actions/setup-python@v1
with:
python-version: 3.7
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install --upgrade numpy scipy scikit-learn joblib nltk simpletransformers torch torchvision
- name: "Run Classifier: Generator"
run: python ./actions/classifier-deep/apply/generate-labels/main.py
- name: "Run Classifier: Labeler"
uses: ./actions/classifier-deep/apply/apply-labels
with:
configPath: classifier
allowLabels: "needs more info|new release"
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}

View File

@@ -1,27 +0,0 @@
name: "Deep Classifier: Scraper"
on:
repository_dispatch:
types: [trigger-deep-classifier-scraper]
jobs:
main:
runs-on: ubuntu-latest
steps:
- name: Checkout Actions
uses: actions/checkout@v2
with:
repository: "microsoft/vscode-github-triage-actions"
ref: v42
path: ./actions
- name: Install Actions
run: npm install --production --prefix ./actions
- name: Install Additional Dependencies
# Pulls in a bunch of other packages that arent needed for the rest of the actions
run: npm install @azure/storage-blob@12.1.1
- name: "Run Classifier: Scraper"
uses: ./actions/classifier-deep/train/fetch-issues
with:
blobContainerName: vscode-issue-classifier
blobStorageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}
token: ${{secrets.ISSUE_SCRAPER_TOKEN}}
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}

View File

@@ -1,40 +0,0 @@
name: VS Code Repo Dev Container Cache Image Generation
on:
push:
# Currently doing this for master, but could be done for PRs as well
branches:
- "master"
# Only updates to these files result in changes to installed packages, so skip otherwise
paths:
- "**/package-lock.json"
- "**/yarn.lock"
jobs:
devcontainer:
name: Generate cache image
runs-on: ubuntu-latest
steps:
- name: Checkout
id: checkout
uses: actions/checkout@v2
- name: Azure CLI login
id: az_login
uses: azure/login@v1
with:
creds: ${{ secrets.AZ_ACR_CREDS }}
- name: Build and push
id: build_and_push
run: |
set -e
ACR_REGISTRY_NAME=$(echo ${{ secrets.CONTAINER_IMAGE_REGISTRY }} | grep -oP '(.+)(?=\.azurecr\.io)')
az acr login --name $ACR_REGISTRY_NAME
GIT_BRANCH=$(echo "${{ github.ref }}" | grep -oP 'refs/(heads|tags)/\K(.+)')
if [ "$GIT_BRANCH" == "" ]; then GIT_BRANCH=master; fi
.devcontainer/cache/build-cache-image.sh "${{ secrets.CONTAINER_IMAGE_REGISTRY }}/public/vscode/devcontainers/repos/microsoft/vscode" "${GIT_BRANCH}"

View File

@@ -1,27 +0,0 @@
name: Latest Release Monitor
on:
schedule:
- cron: 0/5 * * * *
repository_dispatch:
types: [trigger-latest-release-monitor]
jobs:
main:
runs-on: ubuntu-latest
steps:
- name: Checkout Actions
uses: actions/checkout@v2
with:
repository: "microsoft/vscode-github-triage-actions"
path: ./actions
ref: v42
- name: Install Actions
run: npm install --production --prefix ./actions
- name: Install Storage Module
run: npm install @azure/storage-blob@12.1.1
- name: Run Latest Release Monitor
uses: ./actions/latest-release-monitor
with:
storageKey: ${{secrets.AZURE_BLOB_STORAGE_CONNECTION_STRING}}
appInsightsKey: ${{secrets.TRIAGE_ACTIONS_APP_INSIGHTS}}
token: ${{secrets.VSCODE_ISSUE_TRIAGE_BOT_PAT}}

141
.vscode/launch.json vendored
View File

@@ -66,10 +66,147 @@
}
},
{
"type": "chrome",
"type": "extensionHost",
"request": "launch",
"name": "VS Code Emmet Tests",
"runtimeExecutable": "${execPath}",
"args": [
"${workspaceFolder}/extensions/emmet/test-fixtures",
"--extensionDevelopmentPath=${workspaceFolder}/extensions/emmet",
"--extensionTestsPath=${workspaceFolder}/extensions/emmet/out/test"
],
"outFiles": [
"${workspaceFolder}/out/**/*.js"
],
"presentation": {
"group": "5_tests",
"order": 6
}
},
{
"type": "extensionHost",
"request": "launch",
"name": "VS Code Git Tests",
"runtimeExecutable": "${execPath}",
"args": [
"/tmp/my4g9l",
"--extensionDevelopmentPath=${workspaceFolder}/extensions/git",
"--extensionTestsPath=${workspaceFolder}/extensions/git/out/test"
],
"outFiles": [
"${workspaceFolder}/extensions/git/out/**/*.js"
],
"presentation": {
"group": "5_tests",
"order": 6
}
},
{
"type": "extensionHost",
"request": "launch",
"name": "VS Code API Tests (single folder)",
"runtimeExecutable": "${execPath}",
"args": [
// "${workspaceFolder}", // Uncomment for running out of sources.
"${workspaceFolder}/extensions/vscode-api-tests/testWorkspace",
"--extensionDevelopmentPath=${workspaceFolder}/extensions/vscode-api-tests",
"--extensionTestsPath=${workspaceFolder}/extensions/vscode-api-tests/out/singlefolder-tests",
"--disable-extensions"
],
"outFiles": [
"${workspaceFolder}/out/**/*.js"
],
"presentation": {
"group": "5_tests",
"order": 3
}
},
{
"type": "extensionHost",
"request": "launch",
"name": "VS Code API Tests (workspace)",
"runtimeExecutable": "${execPath}",
"args": [
"${workspaceFolder}/extensions/vscode-api-tests/testworkspace.code-workspace",
"--extensionDevelopmentPath=${workspaceFolder}/extensions/vscode-api-tests",
"--extensionTestsPath=${workspaceFolder}/extensions/vscode-api-tests/out/workspace-tests"
],
"outFiles": [
"${workspaceFolder}/out/**/*.js"
],
"presentation": {
"group": "5_tests",
"order": 4
}
},
{
"type": "extensionHost",
"request": "launch",
"name": "VS Code Tokenizer Tests",
"runtimeExecutable": "${execPath}",
"args": [
"${workspaceFolder}/extensions/vscode-colorize-tests/test",
"--extensionDevelopmentPath=${workspaceFolder}/extensions/vscode-colorize-tests",
"--extensionTestsPath=${workspaceFolder}/extensions/vscode-colorize-tests/out"
],
"outFiles": [
"${workspaceFolder}/out/**/*.js"
],
"presentation": {
"group": "5_tests",
"order": 5
}
},
{
"type": "extensionHost",
"request": "launch",
"name": "VS Code Notebook Tests",
"runtimeExecutable": "${execPath}",
"args": [
"${workspaceFolder}/extensions/vscode-notebook-tests/test",
"--extensionDevelopmentPath=${workspaceFolder}/extensions/vscode-notebook-tests",
"--extensionTestsPath=${workspaceFolder}/extensions/vscode-notebook-tests/out"
],
"outFiles": [
"${workspaceFolder}/out/**/*.js"
],
"presentation": {
"group": "5_tests",
"order": 6
}
},
{
"type": "extensionHost",
"request": "launch",
"name": "VS Code Custom Editor Tests",
"runtimeExecutable": "${execPath}",
"args": [
"${workspaceFolder}/extensions/vscode-custom-editor-tests/test-workspace",
"--extensionDevelopmentPath=${workspaceFolder}/extensions/vscode-custom-editor-tests",
"--extensionTestsPath=${workspaceFolder}/extensions/vscode-custom-editor-tests/out/test"
],
"outFiles": [
"${workspaceFolder}/out/**/*.js"
],
"presentation": {
"group": "5_tests",
"order": 6
}
},
{
"type": "pwa-chrome",
"request": "attach",
"name": "Attach to azuredatastudio",
"port": 9222
"browserAttachLocation": "workspace",
"port": 9222,
"trace": true,
"outFiles": [
"${workspaceFolder}/out/**/*.js"
],
"resolveSourceMapLocations": [
"${workspaceFolder}/out/**/*.js"
],
"perScriptSourcemaps": "yes"
},
{
"type": "pwa-chrome",

View File

@@ -8,7 +8,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"February 2021\"",
"value": "$repo=repo:microsoft/vscode\n$milestone=milestone:\"April 2021\"",
"editable": true
},
{

View File

@@ -2,127 +2,106 @@
{
"kind": 1,
"language": "markdown",
"value": "#### Macros",
"editable": true
"value": "#### Macros"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper\n\n$MILESTONE=milestone:\"January 2021\"",
"editable": true
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-emmet-helper repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"April 2021\""
},
{
"kind": 1,
"language": "markdown",
"value": "# Preparation",
"editable": true
"value": "# Preparation"
},
{
"kind": 1,
"language": "markdown",
"value": "## Open Pull Requests on the Milestone",
"editable": true
"value": "## Open Pull Requests on the Milestone"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE is:pr is:open",
"editable": true
"value": "$REPOS $MILESTONE is:pr is:open"
},
{
"kind": 1,
"language": "markdown",
"value": "## Open Issues on the Milestone",
"editable": true
"value": "## Open Issues on the Milestone"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE is:issue is:open -label:iteration-plan -label:endgame-plan -label:testplan-item",
"editable": true
"value": "$REPOS $MILESTONE is:issue is:open -label:iteration-plan -label:endgame-plan -label:testplan-item"
},
{
"kind": 1,
"language": "markdown",
"value": "## Feature Requests Missing Labels",
"editable": true
"value": "## Feature Requests Missing Labels"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE is:issue is:closed label:feature-request -label:verification-needed -label:on-testplan -label:verified -label:*duplicate",
"editable": true
"value": "$REPOS $MILESTONE is:issue is:closed label:feature-request -label:verification-needed -label:on-testplan -label:verified -label:*duplicate"
},
{
"kind": 1,
"language": "markdown",
"value": "# Testing",
"editable": true
"value": "# Testing"
},
{
"kind": 1,
"language": "markdown",
"value": "## Test Plan Items",
"editable": true
"value": "## Test Plan Items"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE is:issue is:open label:testplan-item",
"editable": true
"value": "$REPOS $MILESTONE is:issue is:open label:testplan-item"
},
{
"kind": 1,
"language": "markdown",
"value": "## Verification Needed",
"editable": true
"value": "## Verification Needed"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE is:issue is:closed label:feature-request label:verification-needed -label:verified",
"editable": true
"value": "$REPOS $MILESTONE is:issue is:closed label:feature-request label:verification-needed -label:verified"
},
{
"kind": 1,
"language": "markdown",
"value": "# Verification",
"editable": true
"value": "# Verification"
},
{
"kind": 1,
"language": "markdown",
"value": "## Verifiable Fixes",
"editable": true
"value": "## Verifiable Fixes"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -label:z-author-verified -label:unreleased",
"editable": true
"value": "$REPOS $MILESTONE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -label:z-author-verified -label:unreleased"
},
{
"kind": 1,
"language": "markdown",
"value": "## Unreleased Fixes",
"editable": true
"value": "## Unreleased Fixes"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -label:z-author-verified label:unreleased",
"editable": true
"value": "$REPOS $MILESTONE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -label:z-author-verified label:unreleased"
},
{
"kind": 1,
"language": "markdown",
"value": "# Candidates",
"editable": true
"value": "# Candidates"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE is:open label:candidate",
"editable": true
"value": "$REPOS $MILESTONE is:open label:candidate"
}
]

View File

@@ -3,24 +3,28 @@
"kind": 1,
"language": "markdown",
"value": "### Categorizing Issues\n\nEach issue must have a type label. Most type labels are grey, some are yellow. Bugs are grey with a touch of red.",
"editable": true
"editable": true,
"outputs": []
},
{
"kind": 2,
"language": "github-issues",
"value": "repo:microsoft/vscode is:open is:issue assignee:@me -label:\"needs more info\" -label:bug -label:feature-request -label:under-discussion -label:debt -label:*question -label:upstream -label:electron -label:engineering -label:plan-item ",
"editable": true
"editable": true,
"outputs": []
},
{
"kind": 1,
"language": "markdown",
"value": "### Feature Areas\n\nEach issue should be assigned to a feature area",
"editable": true
"editable": true,
"outputs": []
},
{
"kind": 2,
"language": "github-issues",
"value": "repo:microsoft/vscode is:open is:issue assignee:@me -label:L10N -label:VIM -label:api -label:api-finalization -label:api-proposal -label:authentication -label:breadcrumbs -label:callhierarchy -label:code-lens -label:color-palette -label:comments -label:config -label:context-keys -label:css-less-scss -label:custom-editors -label:debug -label:debug-console -label:dialogs -label:diff-editor -label:dropdown -label:editor -label:editor-RTL -label:editor-autoclosing -label:editor-autoindent -label:editor-bracket-matching -label:editor-clipboard -label:editor-code-actions -label:editor-color-picker -label:editor-columnselect -label:editor-commands -label:editor-comments -label:editor-contrib -label:editor-core -label:editor-drag-and-drop -label:editor-error-widget -label:editor-find -label:editor-folding -label:editor-highlight -label:editor-hover -label:editor-indent-detection -label:editor-indent-guides -label:editor-input -label:editor-input-IME -label:editor-insets -label:editor-minimap -label:editor-multicursor -label:editor-parameter-hints -label:editor-render-whitespace -label:editor-rendering -label:editor-scrollbar -label:editor-symbols -label:editor-synced-region -label:editor-textbuffer -label:editor-theming -label:editor-wordnav -label:editor-wrapping -label:emmet -label:error-list -label:explorer-custom -label:extension-host -label:extension-recommendations -label:extensions -label:extensions-development -label:file-decorations -label:file-encoding -label:file-explorer -label:file-glob -label:file-guess-encoding -label:file-io -label:file-watcher -label:font-rendering -label:formatting -label:git -label:github -label:gpu -label:grammar -label:grid-view -label:html -label:i18n -label:icon-brand -label:icons-product -label:install-update -label:integrated-terminal -label:integrated-terminal-conpty -label:integrated-terminal-links -label:integrated-terminal-rendering -label:integrated-terminal-winpty -label:intellisense-config -label:ipc -label:issue-bot -label:issue-reporter -label:javascript -label:json -label:keybindings -label:keybindings-editor -label:keyboard-layout -label:label-provider -label:languages-basic -label:languages-diagnostics -label:languages-guessing -label:layout -label:lcd-text-rendering -label:list -label:log -label:markdown -label:marketplace -label:menus -label:merge-conflict -label:notebook -label:outline -label:output -label:perf -label:perf-bloat -label:perf-startup -label:php -label:portable-mode -label:proxy -label:quick-pick -label:references-viewlet -label:release-notes -label:remote -label:remote-explorer -label:rename -label:sandbox -label:scm -label:screencast-mode -label:search -label:search-api -label:search-editor -label:search-replace -label:semantic-tokens -label:settings-editor -label:settings-sync -label:settings-sync-server -label:shared-process -label:simple-file-dialog -label:smart-select -label:snap -label:snippets -label:splitview -label:suggest -label:sync-error-handling -label:tasks -label:telemetry -label:themes -label:timeline -label:timeline-git -label:titlebar -label:tokenization -label:touch/pointer -label:trackpad/scroll -label:tree -label:typescript -label:undo-redo -label:uri -label:ux -label:variable-resolving -label:vscode-build -label:vscode-website -label:web -label:webview -label:workbench-actions -label:workbench-cli -label:workbench-diagnostics -label:workbench-dnd -label:workbench-editor-grid -label:workbench-editors -label:workbench-electron -label:workbench-feedback -label:workbench-history -label:workbench-hot-exit -label:workbench-hover -label:workbench-launch -label:workbench-link -label:workbench-multiroot -label:workbench-notifications -label:workbench-os-integration -label:workbench-rapid-render -label:workbench-run-as-admin -label:workbench-state -label:workbench-status -label:workbench-tabs -label:workbench-touchbar -label:workbench-views -label:workbench-welcome -label:workbench-window -label:workbench-zen -label:workspace-edit -label:workspace-symbols -label:zoom",
"editable": true
"editable": true,
"outputs": []
}
]

View File

@@ -18,9 +18,9 @@
"editable": true
},
{
"kind": 1,
"language": "markdown",
"value": "New issues or pull requests submitted by the community are initially triaged by an [automatic classification bot](https://github.com/microsoft/vscode-github-triage-actions/tree/master/classifier-deep). Issues that the bot does not correctly triage are then triaged by a team member. The team rotates the inbox tracker on a weekly basis.\n\nA [mirror](https://github.com/JacksonKearl/testissues/issues) of the VS Code issue stream is available with details about how the bot classifies issues, including feature-area classifications and confidence ratings. Per-category confidence thresholds and feature-area ownership data is maintained in [.github/classifier.json](https://github.com/microsoft/vscode/blob/master/.github/classifier.json). \n\n💡 The bot is being run through a GitHub action that runs every 30 minutes. Give the bot the opportunity to classify an issue before doing it manually.\n\n### Inbox Tracking\n\nThe inbox tracker is responsible for the [global inbox](https://github.com/Microsoft/vscode/issues?utf8=%E2%9C%93&q=is%3Aopen+no%3Aassignee+-label%3Afeature-request+-label%3Atestplan-item+-label%3Aplan-item) containing all **open issues and pull requests** that\n- are neither **feature requests** nor **test plan items** nor **plan items** and\n- have **no owner assignment**.\n\nThe **inbox tracker** may perform any step described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) but its main responsibility is to route issues to the actual feature area owner.\n\nFeature area owners track the **feature area inbox** containing all **open issues and pull requests** that\n- are personally assigned to them and are not assigned to any milestone\n- are labeled with their feature area label and are not assigned to any milestone.\nThis secondary triage may involve any of the steps described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) and results in a fully triaged or closed issue.\n\nThe [github triage extension](https://github.com/microsoft/vscode-github-triage-extension) can be used to assist with triaging — it provides a \"Command Palette\"-style list of triaging actions like assignment, labeling, and triggers for various bot actions.",
"kind": 2,
"language": "github-issues",
"value": "$inbox=repo:microsoft/vscode is:open no:assignee -label:feature-request -label:testplan-item -label:plan-item ",
"editable": true
},
{
@@ -32,7 +32,7 @@
{
"kind": 1,
"language": "markdown",
"value": "New issues or pull requests submitted by the community are initially triaged by an [automatic classification bot](https://github.com/microsoft/vscode-github-triage-actions/tree/master/classifier-deep). Issues that the bot does not correctly triage are then triaged by a team member. The team rotates the inbox tracker on a weekly basis.\n\nA [mirror](https://github.com/JacksonKearl/testissues/issues) of the VS Code issue stream is available with details about how the bot classifies issues, including feature-area classifications and confidence ratings. Per-category confidence thresholds and feature-area ownership data is maintained in [.github/classifier.json](https://github.com/microsoft/vscode/blob/master/.github/classifier.json). \n\n💡 The bot is being run through a GitHub action that runs every 30 minutes. Give the bot the opportunity to classify an issue before doing it manually.\n\n### Inbox Tracking\n\nThe inbox tracker is responsible for the [global inbox](https://github.com/microsoft/vscode/issues?utf8=%E2%9C%93&q=is%3Aopen+no%3Aassignee+-label%3Afeature-request+-label%3Atestplan-item+-label%3Aplan-item) containing all **open issues and pull requests** that\n- are neither **feature requests** nor **test plan items** nor **plan items** and\n- have **no owner assignment**.\n\nThe **inbox tracker** may perform any step described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) but its main responsibility is to route issues to the actual feature area owner.\n\nFeature area owners track the **feature area inbox** containing all **open issues and pull requests** that\n- are personally assigned to them and are not assigned to any milestone\n- are labeled with their feature area label and are not assigned to any milestone.\nThis secondary triage may involve any of the steps described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) and results in a fully triaged or closed issue.\n\nThe [github triage extension](https://github.com/microsoft/vscode-github-triage-extension) can be used to assist with triaging — it provides a \"Command Palette\"-style list of triaging actions like assignment, labeling, and triggers for various bot actions.",
"value": "New issues or pull requests submitted by the community are initially triaged by an [automatic classification bot](https://github.com/microsoft/vscode-github-triage-actions/tree/master/classifier-deep). Issues that the bot does not correctly triage are then triaged by a team member. The team rotates the inbox tracker on a weekly basis.\n\nA [mirror](https://github.com/JacksonKearl/testissues/issues) of the VS Code issue stream is available with details about how the bot classifies issues, including feature-area classifications and confidence ratings. Per-category confidence thresholds and feature-area ownership data is maintained in [.github/classifier.json](https://github.com/microsoft/vscode/blob/main/.github/classifier.json). \n\n💡 The bot is being run through a GitHub action that runs every 30 minutes. Give the bot the opportunity to classify an issue before doing it manually.\n\n### Inbox Tracking\n\nThe inbox tracker is responsible for the [global inbox](https://github.com/microsoft/vscode/issues?utf8=%E2%9C%93&q=is%3Aopen+no%3Aassignee+-label%3Afeature-request+-label%3Atestplan-item+-label%3Aplan-item) containing all **open issues and pull requests** that\n- are neither **feature requests** nor **test plan items** nor **plan items** and\n- have **no owner assignment**.\n\nThe **inbox tracker** may perform any step described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) but its main responsibility is to route issues to the actual feature area owner.\n\nFeature area owners track the **feature area inbox** containing all **open issues and pull requests** that\n- are personally assigned to them and are not assigned to any milestone\n- are labeled with their feature area label and are not assigned to any milestone.\nThis secondary triage may involve any of the steps described in our [issue triaging documentation](https://github.com/microsoft/vscode/wiki/Issues-Triaging) and results in a fully triaged or closed issue.\n\nThe [github triage extension](https://github.com/microsoft/vscode-github-triage-extension) can be used to assist with triaging — it provides a \"Command Palette\"-style list of triaging actions like assignment, labeling, and triggers for various bot actions.",
"editable": true
},
{

View File

@@ -2,217 +2,181 @@
{
"kind": 1,
"language": "markdown",
"value": "#### Macros",
"editable": true
"value": "#### Macros"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server\n\n$MILESTONE=milestone:\"January 2021\"\n\n$MINE=assignee:@me",
"editable": true
"value": "$REPOS=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-js-debug repo:microsoft/vscode-remote-release repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-settings-sync-server repo:microsoft/vscode-remotehub\n\n$MILESTONE=milestone:\"April 2021\"\n\n$MINE=assignee:@me"
},
{
"kind": 1,
"language": "markdown",
"value": "# Preparation",
"editable": true
"value": "# Preparation"
},
{
"kind": 1,
"language": "markdown",
"value": "## Open Pull Requests on the Milestone",
"editable": true
"value": "## Open Pull Requests on the Milestone"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE $MINE is:pr is:open",
"editable": true
"value": "$REPOS $MILESTONE $MINE is:pr is:open"
},
{
"kind": 1,
"language": "markdown",
"value": "## Open Issues on the Milestone",
"editable": true
"value": "## Open Issues on the Milestone"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE $MINE is:issue is:open -label:iteration-plan -label:endgame-plan -label:testplan-item",
"editable": true
"value": "$REPOS $MILESTONE $MINE is:issue is:open -label:iteration-plan -label:endgame-plan -label:testplan-item"
},
{
"kind": 1,
"language": "markdown",
"value": "## Feature Requests Missing Labels",
"editable": true
"value": "## Feature Requests Missing Labels"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE $MINE is:issue is:closed label:feature-request -label:verification-needed -label:on-testplan -label:verified -label:*duplicate",
"editable": true
"value": "$REPOS $MILESTONE $MINE is:issue is:closed label:feature-request -label:verification-needed -label:on-testplan -label:verified -label:*duplicate"
},
{
"kind": 1,
"language": "markdown",
"value": "## Test Plan Items",
"editable": true
"value": "## Test Plan Items"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE is:issue is:open author:@me label:testplan-item",
"editable": true
"value": "$REPOS $MILESTONE is:issue is:open author:@me label:testplan-item"
},
{
"kind": 1,
"language": "markdown",
"value": "## Verification Needed",
"editable": true
"value": "## Verification Needed"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE $MINE is:issue is:closed label:feature-request label:verification-needed",
"editable": true
"value": "$REPOS $MILESTONE $MINE is:issue is:closed label:feature-request label:verification-needed"
},
{
"kind": 1,
"language": "markdown",
"value": "# Testing",
"editable": true
"value": "# Testing"
},
{
"kind": 1,
"language": "markdown",
"value": "## Test Plan Items",
"editable": true
"value": "## Test Plan Items"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE $MINE is:issue is:open label:testplan-item",
"editable": true
"value": "$REPOS $MILESTONE $MINE is:issue is:open label:testplan-item"
},
{
"kind": 1,
"language": "markdown",
"value": "## Verification Needed",
"editable": true
"value": "## Verification Needed"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed -assignee:@me -label:verified label:feature-request label:verification-needed",
"editable": true
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed -assignee:@me -label:verified -label:z-author-verified label:feature-request label:verification-needed"
},
{
"kind": 1,
"language": "markdown",
"value": "# Fixing",
"editable": true
"value": "# Fixing"
},
{
"kind": 1,
"language": "markdown",
"value": "## Open Issues",
"editable": true
"value": "## Open Issues"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE $MINE is:issue is:open -label:endgame-plan",
"editable": true
"value": "$REPOS $MILESTONE $MINE is:issue is:open -label:endgame-plan -label:testplan-item -label:iteration-plan"
},
{
"kind": 1,
"language": "markdown",
"value": "## Open Bugs",
"editable": true
"value": "## Open Bugs"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE $MINE is:issue is:open label:bug",
"editable": true
"value": "$REPOS $MILESTONE $MINE is:issue is:open label:bug"
},
{
"kind": 1,
"language": "markdown",
"value": "# Verification",
"editable": true
"value": "# Verification"
},
{
"kind": 1,
"language": "markdown",
"value": "## My Issues (verification-steps-needed)",
"editable": true
"value": "## My Issues (verification-steps-needed)"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE $MINE is:issue label:bug label:verification-steps-needed",
"editable": true
"value": "$REPOS $MILESTONE $MINE is:issue label:bug label:verification-steps-needed"
},
{
"kind": 1,
"language": "markdown",
"value": "## My Issues (verification-found)",
"editable": true
"value": "## My Issues (verification-found)"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE $MINE is:issue label:bug label:verification-found",
"editable": true
"value": "$REPOS $MILESTONE $MINE is:issue label:bug label:verification-found"
},
{
"kind": 1,
"language": "markdown",
"value": "## Issues filed by me",
"editable": true
"value": "## Issues filed by me"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed author:@me sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found",
"editable": true
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed author:@me sort:updated-asc label:bug -label:verified -label:z-author-verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found"
},
{
"kind": 1,
"language": "markdown",
"value": "## Issues filed from outside team",
"editable": true
"value": "## Issues filed from outside team"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -author:aeschli -author:alexdima -author:alexr00 -author:AmandaSilver -author:bamurtaugh -author:bpasero -author:btholt -author:chrisdias -author:chrmarti -author:Chuxel -author:connor4312 -author:dbaeumer -author:deepak1556 -author:devinvalenciano -author:digitarald -author:eamodio -author:egamma -author:fiveisprime -author:gregvanl -author:isidorn -author:ItalyPaleAle -author:JacksonKearl -author:joaomoreno -author:jrieken -author:kieferrm -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:ornellaalt -author:orta -author:rebornix -author:RMacfarlane -author:roblourens -author:rzhao271 -author:sana-ajani -author:sandy081 -author:sbatten -author:stevencl -author:Tyriar -author:weinand -author:TylerLeonhardt -author:lramos15",
"editable": true
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed sort:updated-asc label:bug -label:verified -label:z-author-verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found -author:aeschli -author:alexdima -author:alexr00 -author:AmandaSilver -author:bamurtaugh -author:bpasero -author:btholt -author:chrisdias -author:chrmarti -author:Chuxel -author:connor4312 -author:dbaeumer -author:deepak1556 -author:devinvalenciano -author:digitarald -author:eamodio -author:egamma -author:fiveisprime -author:gregvanl -author:isidorn -author:ItalyPaleAle -author:JacksonKearl -author:joaomoreno -author:jrieken -author:kieferrm -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:ornellaalt -author:orta -author:rebornix -author:RMacfarlane -author:roblourens -author:rzhao271 -author:sana-ajani -author:sandy081 -author:sbatten -author:stevencl -author:Tyriar -author:weinand -author:TylerLeonhardt -author:lramos15"
},
{
"kind": 1,
"language": "markdown",
"value": "## Issues filed by others",
"editable": true
"value": "## Issues filed by others"
},
{
"kind": 2,
"language": "github-issues",
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed -author:@me sort:updated-asc label:bug -label:verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found",
"editable": true
"value": "$REPOS $MILESTONE -$MINE is:issue is:closed -author:@me sort:updated-asc label:bug -label:verified -label:z-author-verified -label:on-testplan -label:*duplicate -label:duplicate -label:invalid -label:*as-designed -label:error-telemetry -label:verification-steps-needed -label:verification-found"
},
{
"kind": 1,
"language": "markdown",
"value": "# Release Notes",
"editable": true
"value": "# Release Notes"
},
{
"kind": 2,
"language": "github-issues",
"value": "repo:microsoft/vscode $MILESTONE is:issue is:closed label:feature-request -label:on-release-notes",
"editable": true
"value": "repo:microsoft/vscode $MILESTONE $MINE is:issue is:closed label:feature-request -label:on-release-notes"
}
]

View File

@@ -8,7 +8,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"February 2021\"",
"value": "// list of repos we work in\n$repos=repo:microsoft/vscode repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks repo:microsoft/vscode-internalbacklog\n\n// current milestone name\n$milestone=milestone:\"April 2021\"",
"editable": true
},
{
@@ -21,7 +21,7 @@
"kind": 2,
"language": "github-issues",
"value": "$repos $milestone assignee:@me is:open",
"editable": false
"editable": true
},
{
"kind": 1,
@@ -81,7 +81,7 @@
"kind": 2,
"language": "github-issues",
"value": "$repos assignee:@me is:open milestone:\"Backlog Candidates\"",
"editable": false
"editable": true
},
{
"kind": 1,
@@ -111,6 +111,6 @@
"kind": 2,
"language": "github-issues",
"value": "$repos assignee:@me is:open label:\"needs more info\"",
"editable": false
"editable": true
}
]

View File

@@ -41,4 +41,4 @@
"value": "repo:microsoft/vscode is:open assignee:@me label:\"papercut :drop_of_blood:\"",
"editable": true
}
]
]

View File

@@ -14,7 +14,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"January 2021\"",
"value": "$repos=repo:microsoft/vscode repo:microsoft/vscode-internalbacklog repo:microsoft/vscode-remote-release repo:microsoft/vscode-js-debug repo:microsoft/vscode-pull-request-github repo:microsoft/vscode-github-issue-notebooks \n$milestone=milestone:\"March 2021\"",
"editable": true
},
{
@@ -38,7 +38,7 @@
{
"kind": 2,
"language": "github-issues",
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate -author:@me -assignee:@me label:bug -label:verified -author:@me -author:aeschli -author:alexdima -author:alexr00 -author:bpasero -author:chrisdias -author:chrmarti -author:connor4312 -author:dbaeumer -author:deepak1556 -author:eamodio -author:egamma -author:gregvanl -author:isidorn -author:JacksonKearl -author:joaomoreno -author:jrieken -author:lramos15 -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:rebornix -author:RMacfarlane -author:roblourens -author:sana-ajani -author:sandy081 -author:sbatten -author:Tyriar -author:weinand",
"value": "$repos $milestone is:closed -assignee:@me label:bug -label:verified -label:*duplicate -author:@me -assignee:@me label:bug -label:verified -author:@me -author:aeschli -author:alexdima -author:alexr00 -author:bpasero -author:chrisdias -author:chrmarti -author:connor4312 -author:dbaeumer -author:deepak1556 -author:eamodio -author:egamma -author:gregvanl -author:isidorn -author:JacksonKearl -author:joaomoreno -author:jrieken -author:lramos15 -author:lszomoru -author:meganrogge -author:misolori -author:mjbvz -author:rebornix -author:RMacfarlane -author:roblourens -author:sana-ajani -author:sandy081 -author:sbatten -author:Tyriar -author:weinand -author:rzhao271 -author:kieferrm -author:TylerLeonhardt -author:bamurtaugh",
"editable": false
},
{

View File

@@ -79,5 +79,7 @@
"[typescript]": {
"editor.defaultFormatter": "vscode.typescript-language-features"
},
"typescript.tsc.autoDetect": "off"
"typescript.tsc.autoDetect": "off",
"notebook.experimental.useMarkdownRenderer": true,
"testing.autoRun.mode": "rerun",
}

76
.vscode/tasks.json vendored
View File

@@ -4,10 +4,11 @@
{
"type": "npm",
"script": "watch-clientd",
"label": "Build VS Code Core",
"label": "Core - Build",
"isBackground": true,
"presentation": {
"reveal": "never"
"reveal": "never",
"group": "buildWatchers"
},
"problemMatcher": {
"owner": "typescript",
@@ -30,10 +31,11 @@
{
"type": "npm",
"script": "watch-extensionsd",
"label": "Build VS Code Extensions",
"label": "Ext - Build",
"isBackground": true,
"presentation": {
"reveal": "never"
"reveal": "never",
"group": "buildWatchers"
},
"problemMatcher": {
"owner": "typescript",
@@ -54,10 +56,38 @@
}
},
{
"label": "Build VS Code",
"type": "npm",
"script": "watch-extension-mediad",
"label": "Ext Media - Build",
"isBackground": true,
"presentation": {
"reveal": "never",
"group": "buildWatchers"
},
"problemMatcher": {
"owner": "typescript",
"applyTo": "closedDocuments",
"fileLocation": [
"absolute"
],
"pattern": {
"regexp": "Error: ([^(]+)\\((\\d+|\\d+,\\d+|\\d+,\\d+,\\d+,\\d+)\\): (.*)$",
"file": 1,
"location": 2,
"message": 3
},
"background": {
"beginsPattern": "Starting compilation",
"endsPattern": "Finished compilation"
}
}
},
{
"label": "VS Code - Build",
"dependsOn": [
"Build VS Code Core",
"Build VS Code Extensions"
"Core - Build",
"Ext - Build",
"Ext Media - Build",
],
"group": {
"kind": "build",
@@ -68,28 +98,42 @@
{
"type": "npm",
"script": "kill-watch-clientd",
"label": "Kill Build VS Code Core",
"label": "Kill Core - Build",
"group": "build",
"presentation": {
"reveal": "never"
"reveal": "never",
"group": "buildKillers"
},
"problemMatcher": "$tsc"
},
{
"type": "npm",
"script": "kill-watch-extensionsd",
"label": "Kill Build VS Code Extensions",
"label": "Kill Ext - Build",
"group": "build",
"presentation": {
"reveal": "never"
"reveal": "never",
"group": "buildKillers"
},
"problemMatcher": "$tsc"
},
{
"label": "Kill Build VS Code",
"type": "npm",
"script": "kill-watch-extension-mediad",
"label": "Kill Ext Media - Build",
"group": "build",
"presentation": {
"reveal": "never",
"group": "buildKillers"
},
"problemMatcher": "$tsc"
},
{
"label": "Kill VS Code - Build",
"dependsOn": [
"Kill Build VS Code Core",
"Kill Build VS Code Extensions"
"Kill Core - Build",
"Kill Ext - Build",
"Kill Ext Media - Build",
],
"group": "build",
"problemMatcher": []
@@ -111,7 +155,7 @@
{
"type": "npm",
"script": "watch-webd",
"label": "Build Web Extensions",
"label": "Web Ext - Build",
"group": "build",
"isBackground": true,
"presentation": {
@@ -138,7 +182,7 @@
{
"type": "npm",
"script": "kill-watch-webd",
"label": "Kill Build Web Extensions",
"label": "Kill Web Ext - Build",
"group": "build",
"presentation": {
"reveal": "never"

View File

@@ -1,3 +1,3 @@
disturl "https://electronjs.org/headers"
target "11.2.2"
target "12.0.7"
runtime "electron"

View File

@@ -1,5 +1,21 @@
# Change Log
## Version 1.30.0
* Release date: June 17, 2021
* Release status: General Availability
* New Notebook Features:
* Show book's notebook TOC title in pinned notebooks view
* Add new book icon
* Update Python to 3.8.10
* Query Editor Features:
* Added filtering/sorting feature for query result grid in query editor and notebook, the feature can be invoked from the column headers. Note that this feature is only available when you enable the preview features
* Added a status bar item to show summary of the selected cells if there are multiple numeric values
* Extension Updates:
* SQL Database Projects
* Machine Learning
* Bug Fixes
* Fix WYSIWYG Table cell adding new line in table cell
## Version 1.29.0
* Release date: May 19, 2021
* Release status: General Availability
@@ -587,7 +603,7 @@ The May release is focused on stabilization and bug fixes leading up to the Buil
* Announcing **Redgate SQL Search** extension available in Extension Manager
* Community Localization available for 10 languages: **German, Spanish, French, Italian, Japanese, Korean, Portuguese, Russian, Simplified Chinese and Traditional Chinese!**
* Reduced telemetry collection, improved [opt-out](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Disable-Telemetry-Reporting) experience and in-product links to [Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement)
* Reduced telemetry collection, improved [opt-out](https://github.com/Microsoft/azuredatastudio/wiki/How-to-Disable-Telemetry-Reporting) experience and in-product links to [Privacy Statement](https://privacy.microsoft.com/privacystatement)
* Extension Manager has improved Marketplace experience to easily discover community extensions
* SQL Agent extension Jobs and Job History view improvement
* Updates for **whoisactive** and **Server Reports** extensions

View File

@@ -65,7 +65,7 @@ This project has adopted the [Microsoft Open Source Code of Conduct](https://ope
Azure Data Studio is localized into 10 languages: French, Italian, German, Spanish, Simplified Chinese, Traditional Chinese, Japanese, Korean, Russian, and Portuguese (Brazil). The language packs are available in the Extension Manager marketplace. Simply, search for the specific language using the extension marketplace and install. Once you install the selected language, Azure Data Studio will prompt you to restart with the new language.
## Privacy Statement
The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.microsoft.com/en-us/privacystatement) describes the privacy statement of this software.
The [Microsoft Enterprise and Developer Privacy Statement](https://privacy.microsoft.com/privacystatement) describes the privacy statement of this software.
## Contributions and "Thank You"
We would like to thank all our users who raised issues, and in particular the following users who helped contribute fixes:
@@ -131,10 +131,10 @@ Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the [Source EULA](LICENSE.txt).
[win-user]: https://go.microsoft.com/fwlink/?linkid=2163435
[win-system]: https://go.microsoft.com/fwlink/?linkid=2163531
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2163529
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2163528
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2163530
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2163437
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2163436
[win-user]: https://go.microsoft.com/fwlink/?linkid=2165736
[win-system]: https://go.microsoft.com/fwlink/?linkid=2165737
[win-zip]: https://go.microsoft.com/fwlink/?linkid=2165838
[osx-zip]: https://go.microsoft.com/fwlink/?linkid=2165942
[linux-zip]: https://go.microsoft.com/fwlink/?linkid=2165841
[linux-rpm]: https://go.microsoft.com/fwlink/?linkid=2165842
[linux-deb]: https://go.microsoft.com/fwlink/?linkid=2165738

41
SECURITY.md Normal file
View File

@@ -0,0 +1,41 @@
<!-- BEGIN MICROSOFT SECURITY.MD V0.0.5 BLOCK -->
## Security
Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/).
If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://docs.microsoft.com/en-us/previous-versions/tn-archive/cc751383(v=technet.10)), please report it to us as described below.
## Reporting Security Issues
**Please do not report security vulnerabilities through public GitHub issues.**
Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://msrc.microsoft.com/create-report).
If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/en-us/msrc/pgp-key-msrc).
You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc).
Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
* Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
* Full paths of source file(s) related to the manifestation of the issue
* The location of the affected source code (tag/branch/commit or direct URL)
* Any special configuration required to reproduce the issue
* Step-by-step instructions to reproduce the issue
* Proof-of-concept or exploit code (if possible)
* Impact of the issue, including how an attacker might exploit the issue
This information will help us triage your report more quickly.
If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://microsoft.com/msrc/bounty) page for more details about our active programs.
## Preferred Languages
We prefer all communications to be in English.
## Policy
Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/en-us/msrc/cvd).
<!-- END MICROSOFT SECURITY.MD BLOCK -->

View File

@@ -12,7 +12,7 @@ expressly granted herein, whether by implication, estoppel or otherwise.
angular2-grid: https://github.com/BTMorton/angular2-grid
angular2-slickgrid: https://github.com/Microsoft/angular2-slickgrid
applicationinsights: https://github.com/Microsoft/ApplicationInsights-node.js
axios: https://github.com/axios/axios
axios: https://github.com/axios/axios
bootstrap: https://github.com/twbs/bootstrap
chart.js: https://github.com/Timer/chartjs
chokidar: https://github.com/paulmillr/chokidar

View File

@@ -1 +1 @@
2021-04-07T00:04:17.775Z
2021-04-07T03:52:18.011Z

View File

@@ -104,13 +104,11 @@ kerberos/build/**
# END SQL Modules
vscode-nsfw/binding.gyp
vscode-nsfw/build/**
vscode-nsfw/src/**
vscode-nsfw/openpa/**
vscode-nsfw/includes/**
!vscode-nsfw/build/Release/*.node
!vscode-nsfw/**/*.a
nsfw/binding.gyp
nsfw/build/**
nsfw/src/**
nsfw/includes/**
!nsfw/build/Release/*.node
vsda/build/**
vsda/ci/**

View File

@@ -83,7 +83,7 @@ async function main() {
console.log('Asset:', JSON.stringify(asset, null, ' '));
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
await retry_1.retry(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
await (0, retry_1.retry)(() => scripts.storedProcedure('createAsset').execute('', [commit, asset, true]));
}
main().then(() => {
console.log('Asset successfully created');

View File

@@ -40,7 +40,7 @@ async function main() {
};
const client = new cosmos_1.CosmosClient({ endpoint: process.env['AZURE_DOCUMENTDB_ENDPOINT'], key: process.env['AZURE_DOCUMENTDB_MASTERKEY'] });
const scripts = client.database('builds').container(quality).scripts;
await retry_1.retry(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
await (0, retry_1.retry)(() => scripts.storedProcedure('createBuild').execute('', [Object.assign(Object.assign({}, build), { _partitionKey: '' })]));
}
main().then(() => {
console.log('Build successfully created');

View File

@@ -39,7 +39,7 @@ async function publish(commit, files) {
.withFilter(new azure.ExponentialRetryPolicyFilter(20));
await assertContainer(blobService, commit);
for (const file of files) {
const blobName = path_1.basename(file);
const blobName = (0, path_1.basename)(file);
const blobExists = await doesBlobExist(blobService, commit, blobName);
if (blobExists) {
console.log(`Blob ${commit}, ${blobName} already exists, not publishing again.`);
@@ -58,7 +58,7 @@ function main() {
}
const opts = minimist(process.argv.slice(2));
const [directory] = opts._;
const files = fileNames.map(fileName => path_1.join(directory, fileName));
const files = fileNames.map(fileName => (0, path_1.join)(directory, fileName));
publish(commit, files).catch(err => {
console.error(err);
process.exit(1);

View File

@@ -39,7 +39,7 @@ async function main() {
}
console.log(`Releasing build ${commit}...`);
const scripts = client.database('builds').container(quality).scripts;
await retry_1.retry(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
await (0, retry_1.retry)(() => scripts.storedProcedure('releaseBuild').execute('', [commit]));
}
main().then(() => {
console.log('Build successfully released');

View File

@@ -62,7 +62,7 @@ async function sync(commit, quality) {
await new Promise((c, e) => readStream.pipe(writeStream).on('finish', c).on('error', e));
log(` Updating build in DB...`);
const mooncakeUrl = `${process.env['MOONCAKE_CDN_URL']}${blobPath}`;
await retry_1.retry(() => container.scripts.storedProcedure('setAssetMooncakeUrl')
await (0, retry_1.retry)(() => container.scripts.storedProcedure('setAssetMooncakeUrl')
.execute('', [commit, asset.platform, asset.type, mooncakeUrl]));
log(` Done ✔️`);
}

View File

@@ -8,5 +8,11 @@
<true/>
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
<true/>
<key>com.apple.security.device.audio-input</key>
<true/>
<key>com.apple.security.device.camera</key>
<true/>
<key>com.apple.security.automation.apple-events</key>
<true/>
</dict>
</plist>

View File

@@ -0,0 +1,129 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "14.x"
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
displayName: Prepare tooling
- script: |
set -e
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
displayName: Merge distro
- script: |
pushd build \
&& yarn \
&& npm install -g typescript \
&& tsc azure-pipelines/common/createAsset.ts \
&& popd
displayName: Restore modules for just build folder and compile it
- download: current
artifact: vscode-darwin-$(VSCODE_ARCH)
displayName: Download $(VSCODE_ARCH) artifact
- script: |
set -e
unzip $(Pipeline.Workspace)/vscode-darwin-$(VSCODE_ARCH)/VSCode-darwin-$(VSCODE_ARCH).zip -d $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
mv $(Pipeline.Workspace)/vscode-darwin-$(VSCODE_ARCH)/VSCode-darwin-$(VSCODE_ARCH).zip $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
displayName: Unzip & move
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: "ESRP CodeSign"
FolderPath: "$(agent.builddirectory)"
Pattern: "VSCode-darwin-$(VSCODE_ARCH).zip"
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppDeveloperSign",
"parameters": [
{
"parameterName": "Hardening",
"parameterValue": "--options=runtime"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 60
displayName: Codesign
- script: |
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
APP_NAME="`ls $APP_ROOT | head -n 1`"
BUNDLE_IDENTIFIER=$(node -p "require(\"$APP_ROOT/$APP_NAME/Contents/Resources/app/product.json\").darwinBundleIdentifier")
echo "##vso[task.setvariable variable=BundleIdentifier]$BUNDLE_IDENTIFIER"
displayName: Export bundle identifier
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: "ESRP CodeSign"
FolderPath: "$(agent.builddirectory)"
Pattern: "VSCode-darwin-$(VSCODE_ARCH).zip"
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppNotarize",
"parameters": [
{
"parameterName": "BundleId",
"parameterValue": "$(BundleIdentifier)"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 60
displayName: Notarization
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
APP_NAME="`ls $APP_ROOT | head -n 1`"
"$APP_ROOT/$APP_NAME/Contents/Resources/app/bin/code" --export-default-configuration=.build
displayName: Verify start after signing (export configuration)
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'arm64'))
- script: |
set -e
# For legacy purposes, arch for x64 is just 'darwin'
case $VSCODE_ARCH in
x64) ASSET_ID="darwin" ;;
arm64) ASSET_ID="darwin-arm64" ;;
universal) ASSET_ID="darwin-universal" ;;
esac
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
node build/azure-pipelines/common/createAsset.js \
"$ASSET_ID" \
archive \
"VSCode-$ASSET_ID.zip" \
../VSCode-darwin-$(VSCODE_ARCH).zip
displayName: Publish Clients

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.18.3"
versionSpec: "14.x"
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
@@ -22,6 +22,8 @@ steps:
displayName: Extract compilation output
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
# Set up the credentials to retrieve distro repo and setup git persona
# to create a merge commit for when we merge distro into oss
- script: |
set -e
cat << EOF > ~/.netrc
@@ -71,6 +73,7 @@ steps:
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: |
set -e
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
@@ -80,6 +83,7 @@ steps:
set -e
export npm_config_arch=$(VSCODE_ARCH)
export npm_config_node_gyp=$(which node-gyp)
export npm_config_build_from_source=true
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
for i in {1..3}; do # try 3 times, for Terrapin
@@ -104,30 +108,7 @@ steps:
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive
- script: |
set -e
export npm_config_arch=$(VSCODE_ARCH)
export npm_config_node_gyp=$(which node-gyp)
export npm_config_build_from_source=true
export SDKROOT=/Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX11.0.sdk
ls /Applications/Xcode_12.2.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/
yarn electron-rebuild
# remove once https://github.com/prebuild/prebuild-install/pull/140 is merged and found in keytar
cd ./node_modules/keytar
node-gyp rebuild
displayName: Rebuild native modules for ARM64
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'arm64'))
- download: current
artifact: vscode-darwin-x64
displayName: Download x64 artifact
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
- download: current
artifact: vscode-darwin-arm64
displayName: Download arm64 artifact
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
# This script brings in the right resources (images, icons, etc) based on the quality (insiders, stable, exploration)
- script: |
set -e
node build/azure-pipelines/mixin
@@ -137,7 +118,7 @@ steps:
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-darwin-$(VSCODE_ARCH)-min-ci
displayName: Build
displayName: Build client
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'))
- script: |
@@ -149,14 +130,6 @@ steps:
displayName: Build Server
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
- script: |
set -e
unzip $(Pipeline.Workspace)/vscode-darwin-x64/VSCode-darwin-x64.zip -d $(agent.builddirectory)/vscode-x64
unzip $(Pipeline.Workspace)/vscode-darwin-arm64/VSCode-darwin-arm64.zip -d $(agent.builddirectory)/vscode-arm64
DEBUG=* node build/darwin/create-universal-app.js
displayName: Create Universal App
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
@@ -164,6 +137,29 @@ steps:
displayName: Download Electron and Playwright
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'universal'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- download: current
artifact: vscode-darwin-x64
displayName: Download x64 artifact
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
- download: current
artifact: vscode-darwin-arm64
displayName: Download arm64 artifact
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
- script: |
set -e
cp $(Pipeline.Workspace)/vscode-darwin-x64/VSCode-darwin-x64.zip $(agent.builddirectory)/VSCode-darwin-x64.zip
cp $(Pipeline.Workspace)/vscode-darwin-arm64/VSCode-darwin-arm64.zip $(agent.builddirectory)/VSCode-darwin-arm64.zip
unzip $(agent.builddirectory)/VSCode-darwin-x64.zip -d $(agent.builddirectory)/VSCode-darwin-x64
unzip $(agent.builddirectory)/VSCode-darwin-arm64.zip -d $(agent.builddirectory)/VSCode-darwin-arm64
DEBUG=* node build/darwin/create-universal-app.js
displayName: Create Universal App
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'universal'))
# Setting hardened entitlements is a requirement for:
# * Apple notarization
# * Running tests on Big Sur (because Big Sur has additional security precautions)
- script: |
set -e
security create-keychain -p pwd $(agent.tempdirectory)/buildagent.keychain
@@ -172,22 +168,21 @@ steps:
echo "$(macos-developer-certificate)" | base64 -D > $(agent.tempdirectory)/cert.p12
security import $(agent.tempdirectory)/cert.p12 -k $(agent.tempdirectory)/buildagent.keychain -P "$(macos-developer-certificate-key)" -T /usr/bin/codesign
security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k pwd $(agent.tempdirectory)/buildagent.keychain
VSCODE_ARCH="$(VSCODE_ARCH)" DEBUG=electron-osx-sign* node build/darwin/sign.js
VSCODE_ARCH=$(VSCODE_ARCH) DEBUG=electron-osx-sign* node build/darwin/sign.js
displayName: Set Hardened Entitlements
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -e
./scripts/test.sh --build --tfs "Unit Tests"
displayName: Run unit tests (Electron)
timeoutInMinutes: 5
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
yarn test-browser --build --browser chromium --browser webkit --browser firefox --tfs "Browser Unit Tests"
displayName: Run unit tests (Browser)
timeoutInMinutes: 5
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
@@ -215,7 +210,7 @@ steps:
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-darwin" \
./resources/server/test/test-web-integration.sh --browser webkit
displayName: Run integration tests (Browser)
timeoutInMinutes: 5
timeoutInMinutes: 10
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
@@ -226,7 +221,7 @@ steps:
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-darwin" \
./resources/server/test/test-remote-integration.sh
displayName: Run remote integration tests (Electron)
timeoutInMinutes: 5
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
@@ -283,91 +278,19 @@ steps:
displayName: Archive build
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: "ESRP CodeSign"
FolderPath: "$(agent.builddirectory)"
Pattern: "VSCode-darwin-$(VSCODE_ARCH).zip"
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppDeveloperSign",
"parameters": [
{
"parameterName": "Hardening",
"parameterValue": "--options=runtime"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 60
displayName: Codesign
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
zip -d $(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH).zip "*.pkg"
displayName: Clean
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
APP_NAME="`ls $APP_ROOT | head -n 1`"
BUNDLE_IDENTIFIER=$(node -p "require(\"$APP_ROOT/$APP_NAME/Contents/Resources/app/product.json\").darwinBundleIdentifier")
echo "##vso[task.setvariable variable=BundleIdentifier]$BUNDLE_IDENTIFIER"
displayName: Export bundle identifier
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: "ESRP CodeSign"
FolderPath: "$(agent.builddirectory)"
Pattern: "VSCode-darwin-$(VSCODE_ARCH).zip"
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-401337-Apple",
"operationSetCode": "MacAppNotarize",
"parameters": [
{
"parameterName": "BundleId",
"parameterValue": "$(BundleIdentifier)"
}
],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 60
displayName: Notarization
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -e
APP_ROOT=$(agent.builddirectory)/VSCode-darwin-$(VSCODE_ARCH)
APP_NAME="`ls $APP_ROOT | head -n 1`"
"$APP_ROOT/$APP_NAME/Contents/Resources/app/bin/code" --export-default-configuration=.build
displayName: Verify start after signing (export configuration)
condition: and(succeeded(), ne(variables['VSCODE_ARCH'], 'arm64'), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
AZURE_STORAGE_ACCESS_KEY="$(ticino-storage-key)" \
AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
VSCODE_ARCH="$(VSCODE_ARCH)" \
./build/azure-pipelines/darwin/publish.sh
displayName: Publish
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
VSCODE_ARCH="$(VSCODE_ARCH)" ./build/azure-pipelines/darwin/publish-server.sh
displayName: Publish Servers
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/VSCode-darwin-$(VSCODE_ARCH).zip
artifact: vscode-darwin-$(VSCODE_ARCH)
displayName: Publish archive
displayName: Publish client archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- publish: $(Agent.BuildDirectory)/vscode-server-darwin.zip
@@ -385,9 +308,5 @@ steps:
VSCODE_ARCH="$(VSCODE_ARCH)" \
yarn gulp upload-vscode-configuration
displayName: Upload configuration (for Bing settings search)
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), ne(variables['VSCODE_PUBLISH'], 'false'))
continueOnError: true
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: "Component Detection"
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'))
continueOnError: true

View File

@@ -1,20 +1,6 @@
#!/usr/bin/env bash
set -e
# Publish DEB
case $VSCODE_ARCH in
x64) ASSET_ID="darwin" ;;
arm64) ASSET_ID="darwin-arm64" ;;
universal) ASSET_ID="darwin-universal" ;;
esac
# publish the build
node build/azure-pipelines/common/createAsset.js \
"$ASSET_ID" \
archive \
"VSCode-$ASSET_ID.zip" \
../VSCode-darwin-$VSCODE_ARCH.zip
if [ "$VSCODE_ARCH" == "x64" ]; then
# package Remote Extension Host
pushd .. && mv vscode-reh-darwin vscode-server-darwin && zip -Xry vscode-server-darwin.zip vscode-server-darwin && popd

View File

@@ -3,39 +3,39 @@ pool:
trigger:
branches:
include: ['main', 'release/*']
include: ["main", "release/*"]
pr:
branches:
include: ['main', 'release/*']
include: ["main", "release/*"]
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.18.3"
versionSpec: "14.x"
- task: AzureKeyVault@1
displayName: 'Azure Key Vault: Get Secrets'
inputs:
azureSubscription: 'azuredatastudio-adointegration'
KeyVaultName: ado-secrets
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
inputs:
azureSubscription: "vscode-builds-subscription"
KeyVaultName: vscode
- script: |
set -e
cat << EOF > ~/.netrc
machine github.com
login azuredatastudio
password $(github-distro-mixin-password)
EOF
cat << EOF > ~/.netrc
machine github.com
login vscode
password $(github-distro-mixin-password)
EOF
git config user.email "sqltools@service.microsoft.com"
git config user.name "AzureDataStudio"
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
git remote add distro "https://github.com/$(VSCODE_MIXIN_REPO).git"
git fetch distro
git remote add distro "https://github.com/$VSCODE_MIXIN_REPO.git"
git fetch distro
# Push main branch into oss/master
git push distro origin/main:refs/heads/oss/master
# Push main branch into oss/main
git push distro origin/main:refs/heads/oss/main
# Push every release branch into oss/release
git for-each-ref --format="%(refname:short)" refs/remotes/origin/release/* | sed 's/^origin\/\(.*\)$/\0:refs\/heads\/oss\/\1/' | xargs git push distro

View File

@@ -1,10 +1,14 @@
#Download base image ubuntu 18.04
FROM ubuntu:18.04
#Download base image ubuntu 21.04
FROM ubuntu:21.04
ENV TZ=America/Los_Angeles
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
# Update Software repository
RUN apt-get update
RUN apt-get update && apt-get upgrade -y
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus libgtk-3-0
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 \
libkrb5-dev git apt-transport-https ca-certificates curl gnupg-agent software-properties-common \
libnss3 libasound2 make gcc libx11-dev fakeroot rpm libgconf-2-4 libunwind8 g++ python3-dev python3-pip
ADD ./ /opt/ads-server

View File

@@ -11,7 +11,7 @@ pr:
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.18.3"
versionSpec: "14.x"
- task: AzureKeyVault@1
displayName: "Azure Key Vault: Get Secrets"
@@ -31,10 +31,10 @@ steps:
git config user.email "vscode@microsoft.com"
git config user.name "VSCode"
git checkout origin/electron-11.x.y
git merge origin/master
git checkout origin/electron-12.x.y
git merge origin/main
# Push master branch into exploration branch
git push origin HEAD:electron-11.x.y
# Push main branch into exploration branch
git push origin HEAD:electron-12.x.y
displayName: Sync & Merge Exploration

View File

@@ -14,22 +14,4 @@ TARBALL_PATH="$REPO/.build/linux/archive/$TARBALL_FILENAME"
rm -rf $ROOT/code-*.tar.*
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
# # Publish Remote Extension Host
# LEGACY_SERVER_BUILD_NAME="azuredatastudio-reh-$PLATFORM_LINUX"
# SERVER_BUILD_NAME="azuredatastudio-server-$PLATFORM_LINUX"
# SERVER_TARBALL_FILENAME="azuredatastudio-server-$PLATFORM_LINUX.tar.gz"
# SERVER_TARBALL_PATH="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME"
# rm -rf $ROOT/azuredatastudio-server-*.tar.*
# (cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
# Publish Remote Extension Host (Web)
LEGACY_SERVER_BUILD_NAME_WEB="azuredatastudio-reh-web-$PLATFORM_LINUX"
SERVER_BUILD_NAME_WEB="azuredatastudio-server-$PLATFORM_LINUX-web"
SERVER_TARBALL_FILENAME_WEB="azuredatastudio-server-$PLATFORM_LINUX-web.tar.gz"
SERVER_TARBALL_PATH_WEB="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME_WEB"
rm -rf $ROOT/azuredatastudio-server-*.tar.*
(cd $ROOT && mv vscode-reh-web-linux-x64 $SERVER_BUILD_NAME_WEB && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH_WEB $SERVER_BUILD_NAME_WEB)
node build/azure-pipelines/common/copyArtifacts.js

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.18.3"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
@@ -69,6 +69,7 @@ steps:
displayName: Extract node_modules cache
- script: |
set -e
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
@@ -132,7 +133,3 @@ steps:
artifact: vscode-server-linux-alpine-web
displayName: Publish web server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: "Component Detection"
continueOnError: true

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.18.3"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
@@ -61,12 +61,6 @@ steps:
- script: |
set -e
npm install -g node-gyp@latest
node-gyp --version
displayName: Update node-gyp
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['VSCODE_ARCH'], 'x64'))
- script: |
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
@@ -85,7 +79,6 @@ steps:
if [ "$VSCODE_ARCH" == "x64" ]; then
export VSCODE_REMOTE_CC=$(which gcc-4.8)
export VSCODE_REMOTE_CXX=$(which g++-4.8)
export VSCODE_REMOTE_NODE_GYP=$(which node-gyp)
fi
for i in {1..3}; do # try 3 times, for Terrapin
@@ -96,10 +89,6 @@ steps:
fi
echo "Yarn failed $i, trying again..."
done
# remove once https://github.com/prebuild/prebuild-install/pull/140 is merged and found in keytar
cd ./node_modules/keytar
npx node-gyp rebuild
env:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
@@ -144,14 +133,14 @@ steps:
set -e
DISPLAY=:10 ./scripts/test.sh --build --tfs "Unit Tests"
displayName: Run unit tests (Electron)
timeoutInMinutes: 5
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
set -e
DISPLAY=:10 yarn test-browser --build --browser chromium --tfs "Browser Unit Tests"
displayName: Run unit tests (Browser)
timeoutInMinutes: 5
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
@@ -180,7 +169,7 @@ steps:
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-web-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./resources/server/test/test-web-integration.sh --browser chromium
displayName: Run integration tests (Browser)
timeoutInMinutes: 5
timeoutInMinutes: 10
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- script: |
@@ -192,7 +181,7 @@ steps:
VSCODE_REMOTE_SERVER_PATH="$(agent.builddirectory)/vscode-reh-linux-$(VSCODE_ARCH)" \
DISPLAY=:10 ./resources/server/test/test-remote-integration.sh
displayName: Run remote integration tests (Electron)
timeoutInMinutes: 5
timeoutInMinutes: 7
condition: and(succeeded(), eq(variables['VSCODE_ARCH'], 'x64'), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
- task: PublishPipelineArtifact@0
@@ -286,7 +275,3 @@ steps:
artifactName: "snap-$(VSCODE_ARCH)"
targetPath: .build/linux/snap-tarball
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: "Component Detection"
continueOnError: true

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.18.3"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
@@ -48,7 +48,7 @@ steps:
x64) SNAPCRAFT_TARGET_ARGS="" ;;
*) SNAPCRAFT_TARGET_ARGS="--target-arch $(VSCODE_ARCH)" ;;
esac
(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft snap $SNAPCRAFT_TARGET_ARGS --output "$SNAP_PATH")
(cd $SNAP_ROOT/code-* && sudo --preserve-env snapcraft prime $SNAPCRAFT_TARGET_ARGS && snap pack prime --compression=lzo --filename="$SNAP_PATH")
# Publish snap package
AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \

View File

@@ -94,8 +94,6 @@ steps:
- script: |
set -e
yarn gulp vscode-linux-x64-min-ci
yarn gulp vscode-web-min-ci
yarn gulp vscode-reh-web-linux-x64-min
displayName: Build
env:
VSCODE_MIXIN_PASSWORD: $(github-distro-mixin-password)
@@ -224,19 +222,6 @@ steps:
displayName: 'Signing Extensions and Langpacks'
condition: and(succeeded(), eq(variables['signed'], true))
# - script: |
# set -e
# cd ./extensions/mssql/node_modules/@microsoft/ads-kerberos
# # npx node-gyp rebuild
# yarn install
# displayName: Recompile native node modules
# - script: |
# set -e
# VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
# yarn gulp vscode-reh-web-linux-x64-min
# displayName: Build web server
- script: |
set -e
./build/azure-pipelines/linux/createDrop.sh

View File

@@ -5,7 +5,7 @@ schedules:
displayName: Mon-Fri at 7:00
branches:
include:
- master
- main
parameters:
- name: VSCODE_QUALITY
@@ -254,6 +254,15 @@ stages:
VSCODE_ARCH: x64
steps:
- template: darwin/product-build-darwin.yml
- ${{ if ne(variables['VSCODE_PUBLISH'], 'false') }}:
- job: macOSSign
dependsOn:
- macOS
timeoutInMinutes: 90
variables:
VSCODE_ARCH: x64
steps:
- template: darwin/product-build-darwin-sign.yml
- ${{ if and(eq(variables['VSCODE_CIBUILD'], false), eq(parameters.VSCODE_BUILD_MACOS_ARM64, true)) }}:
- job: macOSARM64
@@ -262,6 +271,15 @@ stages:
VSCODE_ARCH: arm64
steps:
- template: darwin/product-build-darwin.yml
- ${{ if ne(variables['VSCODE_PUBLISH'], 'false') }}:
- job: macOSARM64Sign
dependsOn:
- macOSARM64
timeoutInMinutes: 90
variables:
VSCODE_ARCH: arm64
steps:
- template: darwin/product-build-darwin-sign.yml
- ${{ if eq(variables['VSCODE_BUILD_MACOS_UNIVERSAL'], true) }}:
- job: macOSUniversal
@@ -273,6 +291,15 @@ stages:
VSCODE_ARCH: universal
steps:
- template: darwin/product-build-darwin.yml
- ${{ if ne(variables['VSCODE_PUBLISH'], 'false') }}:
- job: macOSUniversalSign
dependsOn:
- macOSUniversal
timeoutInMinutes: 90
variables:
VSCODE_ARCH: universal
steps:
- template: darwin/product-build-darwin-sign.yml
- ${{ if and(eq(variables['VSCODE_PUBLISH'], true), eq(parameters.VSCODE_COMPILE_ONLY, false)) }}:
- stage: Mooncake

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.18.3"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
@@ -30,39 +30,41 @@ steps:
git pull --no-rebase https://github.com/$(VSCODE_MIXIN_REPO).git $(node -p "require('./package.json').distro")
displayName: Merge distro
- script: |
npx https://aka.ms/enablesecurefeed standAlone
displayName: Switch to Terrapin packages
timeoutInMinutes: 5
condition: and(succeeded(), eq(variables['ENABLE_TERRAPIN'], 'true'))
- script: |
mkdir -p .build
echo -n $(VSCODE_ARCH) > .build/arch
echo -n $ENABLE_TERRAPIN > .build/terrapin
node build/azure-pipelines/common/computeNodeModulesCacheKey.js $VSCODE_ARCH $ENABLE_TERRAPIN > .build/yarnlockhash
displayName: Prepare yarn cache flags
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
# using `genericNodeModules` instead of `nodeModules` here to avoid sharing the cache with builds running inside containers
- task: Cache@2
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
key: 'genericNodeModules | $(Agent.OS) | .build/yarnlockhash'
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore node_modules cache
- script: |
set -e
export npm_config_arch=$(NPM_ARCH)
tar -xzf .build/node_modules_cache/cache.tgz
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules cache
if [ -z "$CC" ] || [ -z "$CXX" ]; then
export CC=$(which gcc-5)
export CXX=$(which g++-5)
fi
- script: |
set -e
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
if [ "$VSCODE_ARCH" == "x64" ]; then
export VSCODE_REMOTE_CC=$(which gcc-4.8)
export VSCODE_REMOTE_CXX=$(which g++-4.8)
export VSCODE_REMOTE_NODE_GYP=$(which node-gyp)
fi
- script: |
set -e
sudo apt update -y
sudo apt install -y build-essential pkg-config libx11-dev libx11-xcb-dev libxkbfile-dev libsecret-1-dev libnotify-bin
displayName: Install build tools
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- script: |
set -e
for i in {1..3}; do # try 3 times, for Terrapin
yarn --frozen-lockfile && break
if [ $i -eq 3 ]; then
@@ -75,14 +77,15 @@ steps:
ELECTRON_SKIP_BINARY_DOWNLOAD: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
displayName: Install dependencies
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
inputs:
keyfile: ".build/arch, .build/terrapin, build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock"
targetfolder: "**/node_modules, !**/node_modules/**/node_modules"
vstsFeed: "npm-vscode"
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive
# Mixin must run before optimize, because the CSS loader will inline small SVGs
- script: |
@@ -92,12 +95,8 @@ steps:
- script: |
set -e
yarn gulp compile-build
yarn gulp compile-extensions-build
yarn gulp minify-vscode
yarn gulp vscode-reh-linux-x64-min
yarn gulp vscode-reh-web-linux-x64-min
displayName: Compile
yarn npm-run-all -lp core-ci extensions-ci hygiene eslint valid-layers-check
displayName: Compile & Hygiene
- script: |
set -e
@@ -106,6 +105,19 @@ steps:
displayName: Upload sourcemaps
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -
./build/azure-pipelines/common/extract-telemetry.sh
displayName: Extract Telemetry
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -e
AZURE_WEBVIEW_STORAGE_ACCESS_KEY="$(vscode-webview-storage-key)" \
./build/azure-pipelines/common/publish-webview.sh
displayName: Publish Webview
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'))
- script: |
set -e
VERSION=`node -p "require(\"./package.json\").version"`
@@ -125,3 +137,15 @@ steps:
targetPath: $(Build.ArtifactStagingDirectory)/compilation.tar.gz
artifactName: Compilation
displayName: Publish compilation artifact
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn download-builtin-extensions-cg
displayName: Built-in extensions component details
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: "Component Detection"
inputs:
sourceScanPath: $(Build.SourcesDirectory)
continueOnError: true

View File

@@ -9,7 +9,7 @@ pr: none
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.18.3"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
@@ -61,7 +61,7 @@ steps:
TAG_VERSION=$(git describe --tags `git rev-list --tags --max-count=1`)
CHANNEL="G1C14HJ2F"
MESSAGE="DefinitelyTyped/DefinitelyTyped#vscode-types-$TAG_VERSION created. Endgame master, please open this link, examine changes and create a PR:"
MESSAGE="DefinitelyTyped/DefinitelyTyped#vscode-types-$TAG_VERSION created. Endgame champion, please open this link, examine changes and create a PR:"
LINK="https://github.com/DefinitelyTyped/DefinitelyTyped/compare/vscode-types-$TAG_VERSION?quick_pull=1&body=Updating%20VS%20Code%20Extension%20API.%20See%20https%3A%2F%2Fgithub.com%2Fmicrosoft%2Fvscode%2Fissues%2F70175%20for%20details."
MESSAGE2="[@eamodio, @jrieken, @kmaetzel, @egamma]. Please review and merge PR to publish @types/vscode."

View File

@@ -60,7 +60,7 @@ function getNewFileHeader(tag) {
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the Source EULA.`,
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
` * See https://github.com/Microsoft/vscode/blob/main/LICENSE.txt for license information.`,
` *--------------------------------------------------------------------------------------------*/`,
``,
`/**`,

View File

@@ -72,7 +72,7 @@ function getNewFileHeader(tag: string) {
`/*---------------------------------------------------------------------------------------------`,
` * Copyright (c) Microsoft Corporation. All rights reserved.`,
` * Licensed under the Source EULA.`,
` * See https://github.com/Microsoft/vscode/blob/master/LICENSE.txt for license information.`,
` * See https://github.com/Microsoft/vscode/blob/main/LICENSE.txt for license information.`,
` *--------------------------------------------------------------------------------------------*/`,
``,
`/**`,

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.x"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:

View File

@@ -15,7 +15,7 @@ jobs:
echo "##vso[build.addbuildtag]$(VSCODE_QUALITY)"
displayName: Add Quality Build Tag
- template: sql-product-compile.yml
timeoutInMinutes: 90
timeoutInMinutes: 120
- job: macOS
condition: and(succeeded(), eq(variables['VSCODE_BUILD_MACOS'], 'true'), ne(variables['VSCODE_QUALITY'], 'saw'))
@@ -47,31 +47,7 @@ jobs:
steps:
- template: linux/sql-product-build-linux.yml
parameters:
extensionsToUnitTest: ["admin-tool-ext-win", "agent", "azdata", "azurecore", "cms", "dacpac", "import", "schema-compare", "notebook", "resource-deployment", "machine-learning", "sql-database-projects", "data-workspace"]
timeoutInMinutes: 90
- job: LinuxWeb
condition: and(succeeded(), eq(variables['VSCODE_BUILD_WEB'], 'true'), ne(variables['VSCODE_QUALITY'], 'saw'))
pool:
vmImage: 'Ubuntu-18.04'
container: linux-x64
variables:
VSCODE_ARCH: x64
dependsOn:
- Compile
steps:
- template: web/sql-product-build-web.yml
timeoutInMinutes: 90
- job: Docker
condition: and(succeeded(), eq(variables['VSCODE_BUILD_DOCKER'], 'true'))
pool:
vmImage: 'Ubuntu-18.04'
container: linux-x64
dependsOn:
- Linux
steps:
- template: docker/sql-product-build-docker.yml
extensionsToUnitTest: ["admin-tool-ext-win", "agent", "azcli", "azdata", "azurecore", "cms", "dacpac", "data-workspace", "import", "machine-learning", "notebook", "resource-deployment", "schema-compare", "sql-database-projects"]
timeoutInMinutes: 90
- job: Windows
@@ -102,10 +78,8 @@ jobs:
dependsOn:
- macOS
- Linux
# - Docker
- Windows
- Windows_Test
- LinuxWeb
- macOS_Signing
steps:
- template: sql-release.yml

View File

@@ -91,8 +91,6 @@ steps:
yarn gulp compile-build
yarn gulp compile-extensions-build
yarn gulp minify-vscode
yarn gulp vscode-reh-linux-x64-min
yarn gulp vscode-reh-web-linux-x64-min
displayName: Compile
- script: |

View File

@@ -0,0 +1,29 @@
resources:
containers:
- container: linux-x64
image: sqltoolscontainers.azurecr.io/web-build-image:1
endpoint: ContainerRegistry
jobs:
- job: LinuxWeb
pool:
vmImage: 'Ubuntu-18.04'
container: linux-x64
variables:
VSCODE_ARCH: x64
steps:
- template: web/sql-product-build-web.yml
timeoutInMinutes: 90
- job: Docker
pool:
vmImage: 'Ubuntu-18.04'
container: linux-x64
dependsOn:
- LinuxWeb
steps:
- template: docker/sql-product-build-docker.yml
timeoutInMinutes: 90
trigger: none
pr: none

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.18.3"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:

View File

@@ -2,56 +2,54 @@
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
const path = require('path');
const es = require('event-stream');
Object.defineProperty(exports, "__esModule", { value: true });
const path = require("path");
const es = require("event-stream");
const vfs = require("vinyl-fs");
const util = require("../lib/util");
// @ts-ignore
const deps = require("../lib/dependencies");
const azure = require('gulp-azure-storage');
const vfs = require('vinyl-fs');
const util = require('../lib/util');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
// optionally allow to pass in explicit base/maps to upload
const [, , base, maps] = process.argv;
const fetch = function (base, maps = `${base}/**/*.map`) {
return vfs.src(maps, { base })
.pipe(es.mapSync(f => {
f.path = `${f.base}/core/${f.relative}`;
return f;
}));
};
function main() {
const sources = [];
// vscode client maps (default)
if (!base) {
const vs = fetch('out-vscode-min'); // client source-maps only
sources.push(vs);
const extensionsOut = vfs.src(['.build/extensions/**/*.js.map', '!**/node_modules/**'], { base: '.build' });
sources.push(extensionsOut);
}
// specific client base/maps
else {
sources.push(fetch(base, maps));
}
return es.merge(...sources)
.pipe(es.through(function (data) {
console.log('Uploading Sourcemap', data.relative); // debug
this.emit('data', data);
}))
.pipe(azure.upload({
account: process.env.AZURE_STORAGE_ACCOUNT,
key: process.env.AZURE_STORAGE_ACCESS_KEY,
container: 'sourcemaps',
prefix: commit + '/'
}));
function src(base, maps = `${base}/**/*.map`) {
return vfs.src(maps, { base })
.pipe(es.mapSync((f) => {
f.path = `${f.base}/core/${f.relative}`;
return f;
}));
}
function main() {
const sources = [];
// vscode client maps (default)
if (!base) {
const vs = src('out-vscode-min'); // client source-maps only
sources.push(vs);
const productionDependencies = deps.getProductionDependencies(root);
const productionDependenciesSrc = productionDependencies.map(d => path.relative(root, d.path)).map(d => `./${d}/**/*.map`);
const nodeModules = vfs.src(productionDependenciesSrc, { base: '.' })
.pipe(util.cleanNodeModules(path.join(root, 'build', '.moduleignore')));
sources.push(nodeModules);
const extensionsOut = vfs.src(['.build/extensions/**/*.js.map', '!**/node_modules/**'], { base: '.build' });
sources.push(extensionsOut);
}
// specific client base/maps
else {
sources.push(src(base, maps));
}
return es.merge(...sources)
.pipe(es.through(function (data) {
console.log('Uploading Sourcemap', data.relative); // debug
this.emit('data', data);
}))
.pipe(azure.upload({
account: process.env.AZURE_STORAGE_ACCOUNT,
key: process.env.AZURE_STORAGE_ACCESS_KEY,
container: 'sourcemaps',
prefix: commit + '/'
}));
}
main();

View File

@@ -0,0 +1,67 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
'use strict';
import * as path from 'path';
import * as es from 'event-stream';
import * as Vinyl from 'vinyl';
import * as vfs from 'vinyl-fs';
import * as util from '../lib/util';
// @ts-ignore
import * as deps from '../lib/dependencies';
const azure = require('gulp-azure-storage');
const root = path.dirname(path.dirname(__dirname));
const commit = util.getVersion(root);
// optionally allow to pass in explicit base/maps to upload
const [, , base, maps] = process.argv;
function src(base: string, maps = `${base}/**/*.map`) {
return vfs.src(maps, { base })
.pipe(es.mapSync((f: Vinyl) => {
f.path = `${f.base}/core/${f.relative}`;
return f;
}));
}
function main() {
const sources = [];
// vscode client maps (default)
if (!base) {
const vs = src('out-vscode-min'); // client source-maps only
sources.push(vs);
const productionDependencies: { name: string, path: string, version: string }[] = deps.getProductionDependencies(root);
const productionDependenciesSrc = productionDependencies.map(d => path.relative(root, d.path)).map(d => `./${d}/**/*.map`);
const nodeModules = vfs.src(productionDependenciesSrc, { base: '.' })
.pipe(util.cleanNodeModules(path.join(root, 'build', '.moduleignore')));
sources.push(nodeModules);
const extensionsOut = vfs.src(['.build/extensions/**/*.js.map', '!**/node_modules/**'], { base: '.build' });
sources.push(extensionsOut);
}
// specific client base/maps
else {
sources.push(src(base, maps));
}
return es.merge(...sources)
.pipe(es.through(function (data: Vinyl) {
console.log('Uploading Sourcemap', data.relative); // debug
this.emit('data', data);
}))
.pipe(azure.upload({
account: process.env.AZURE_STORAGE_ACCOUNT,
key: process.env.AZURE_STORAGE_ACCESS_KEY,
container: 'sourcemaps',
prefix: commit + '/'
}));
}
main();

View File

@@ -0,0 +1,24 @@
#Download base image ubuntu 21.04
FROM ubuntu:21.04
ENV TZ=America/Los_Angeles
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
# Update Software repository
RUN apt-get update && apt-get upgrade -y
RUN apt-get install -y libxkbfile-dev pkg-config libsecret-1-dev libxss1 dbus xvfb libgtk-3-0 \
libkrb5-dev git apt-transport-https ca-certificates curl gnupg-agent software-properties-common \
libnss3 libasound2 make gcc libx11-dev fakeroot rpm libgconf-2-4 libunwind8 g++ python
#docker
RUN curl -fsSL https://download.docker.com/linux/ubuntu/gpg | apt-key add -
RUN apt-key fingerprint 0EBFCD88
RUN add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
RUN apt-get update
RUN apt-get -y install docker-ce docker-ce-cli containerd.io
# This image needs to be built on a linux host; some weird stuff happens and the xvfb service won't start
# if built on a windows host.
ADD ./xvfb.init /etc/init.d/xvfb
RUN chmod +x /etc/init.d/xvfb
RUN update-rc.d xvfb defaults

View File

@@ -0,0 +1,53 @@
#!/bin/bash
#
# /etc/rc.d/init.d/xvfbd
#
# chkconfig: 345 95 28
# description: Starts/Stops X Virtual Framebuffer server
# processname: Xvfb
#
### BEGIN INIT INFO
# Provides: xvfb
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: Start xvfb at boot time
# Description: Enable xvfb provided by daemon.
### END INIT INFO
[ "${NETWORKING}" = "no" ] && exit 0
PROG="/usr/bin/Xvfb"
PROG_OPTIONS=":10 -ac -screen 0 1024x768x24"
PROG_OUTPUT="/tmp/Xvfb.out"
case "$1" in
start)
echo "Starting : X Virtual Frame Buffer "
$PROG $PROG_OPTIONS>>$PROG_OUTPUT 2>&1 &
disown -ar
;;
stop)
echo "Shutting down : X Virtual Frame Buffer"
killproc $PROG
RETVAL=$?
[ $RETVAL -eq 0 ] && /bin/rm -f /var/lock/subsys/Xvfb
/var/run/Xvfb.pid
echo
;;
restart|reload)
$0 stop
$0 start
RETVAL=$?
;;
status)
status Xvfb
RETVAL=$?
;;
*)
echo $"Usage: $0 (start|stop|restart|reload|status)"
exit 1
esac
exit $RETVAL

View File

@@ -0,0 +1,35 @@
#!/usr/bin/env bash
set -e
REPO="$(pwd)"
ROOT="$REPO/.."
# Publish tarball
mkdir -p $REPO/.build/linux/{archive,server}
PLATFORM_LINUX="linux-x64"
BUILDNAME="azuredatastudio-$PLATFORM_LINUX"
BUILD="$ROOT/$BUILDNAME"
TARBALL_FILENAME="azuredatastudio-$PLATFORM_LINUX.tar.gz"
TARBALL_PATH="$REPO/.build/linux/archive/$TARBALL_FILENAME"
rm -rf $ROOT/code-*.tar.*
(cd $ROOT && tar -czf $TARBALL_PATH $BUILDNAME)
# # Publish Remote Extension Host
# LEGACY_SERVER_BUILD_NAME="azuredatastudio-reh-$PLATFORM_LINUX"
# SERVER_BUILD_NAME="azuredatastudio-server-$PLATFORM_LINUX"
# SERVER_TARBALL_FILENAME="azuredatastudio-server-$PLATFORM_LINUX.tar.gz"
# SERVER_TARBALL_PATH="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME"
# rm -rf $ROOT/azuredatastudio-server-*.tar.*
# (cd $ROOT && mv $LEGACY_SERVER_BUILD_NAME $SERVER_BUILD_NAME && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH $SERVER_BUILD_NAME)
# Publish Remote Extension Host (Web)
LEGACY_SERVER_BUILD_NAME_WEB="azuredatastudio-reh-web-$PLATFORM_LINUX"
SERVER_BUILD_NAME_WEB="azuredatastudio-server-$PLATFORM_LINUX-web"
SERVER_TARBALL_FILENAME_WEB="azuredatastudio-server-$PLATFORM_LINUX-web.tar.gz"
SERVER_TARBALL_PATH_WEB="$REPO/.build/linux/server/$SERVER_TARBALL_FILENAME_WEB"
rm -rf $ROOT/azuredatastudio-server-*.tar.*
(cd $ROOT && mv vscode-reh-web-linux-x64 $SERVER_BUILD_NAME_WEB && tar --owner=0 --group=0 -czf $SERVER_TARBALL_PATH_WEB $SERVER_BUILD_NAME_WEB)
node build/azure-pipelines/common/copyArtifacts.js

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.18.3"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
@@ -60,6 +60,7 @@ steps:
displayName: Extract node_modules cache
- script: |
set -e
npx https://aka.ms/enablesecurefeed standAlone
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))

View File

@@ -1,4 +1,3 @@
steps:
- task: NodeTool@0
inputs:
@@ -13,17 +12,6 @@ steps:
inputs:
azureSubscription: 'ClientToolsInfra_670062 (88d5392f-a34f-4769-b405-f597fc533613)'
KeyVaultName: ado-secrets
SecretsFilter: 'github-distro-mixin-password'
- task: DownloadPipelineArtifact@2
inputs:
artifact: Compilation
displayName: Download compilation output
- script: |
set -e
tar -xzf $(Pipeline.Workspace)/compilation.tar.gz
displayName: Extract compilation output
- script: |
set -e
@@ -44,33 +32,46 @@ steps:
git merge $(node -p "require('./package.json').distro")
displayName: Merge distro
# - task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
# displayName: Restore Cache - Node Modules
# inputs:
# keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: 'npm-vscode'
- script: |
mkdir -p .build
node build/azure-pipelines/common/sql-computeNodeModulesCacheKey.js > .build/yarnlockhash
displayName: Prepare yarn cache key
- task: Cache@2
inputs:
key: 'nodeModules | $(Agent.OS) | .build/yarnlockhash'
path: .build/node_modules_cache
cacheHitVar: NODE_MODULES_RESTORED
displayName: Restore Cache - Node Modules
- script: |
set -e
tar -xzf .build/node_modules_cache/cache.tgz
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules archive
- script: |
set -e
CHILD_CONCURRENCY=1 yarn --frozen-lockfile
displayName: Install dependencies
# condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
condition: and(succeeded(), ne(variables['NODE_MODULES_RESTORED'], 'true'))
# - task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
# displayName: Save Cache - Node Modules
# inputs:
# keyfile: 'build/.cachesalt, .yarnrc, remote/.yarnrc, **/yarn.lock, !**/node_modules/**/yarn.lock, !**/.*/**/yarn.lock'
# targetfolder: '**/node_modules, !**/node_modules/**/node_modules'
# vstsFeed: 'npm-vscode'
# condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
- script: |
set -e
node build/azure-pipelines/common/listNodeModules.js .build/node_modules_list.txt
mkdir -p .build/node_modules_cache
tar -czf .build/node_modules_cache/cache.tgz --files-from .build/node_modules_list.txt
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Create node_modules archive
# - script: |
# set -e
# yarn postinstall
# displayName: Run postinstall scripts
# condition: and(succeeded(), eq(variables['CacheRestored'], 'true'))
- script: |
set -e
yarn postinstall
displayName: Run postinstall scripts
condition: and(succeeded(), eq(variables['NODE_MODULES_RESTORED'], 'true'))
# Mixin must run before optimize, because the CSS loader will
# inline small SVGs
- script: |
set -e
node build/azure-pipelines/mixin
@@ -78,25 +79,128 @@ steps:
- script: |
set -e
VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
yarn gulp vscode-web-min-ci
displayName: Build
yarn sqllint
yarn gulp hygiene
yarn strict-vscode
yarn valid-layers-check
displayName: Run hygiene, eslint
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'))
# upload only the workbench.web.api.js source maps because
# we just compiled these bits in the previous step and the
# general task to upload source maps has already been run
- script: |
set -e
AZURE_STORAGE_ACCOUNT="$(sourcemap-storage-account)" \
AZURE_STORAGE_ACCESS_KEY="$(sourcemap-storage-key)" \
node build/azure-pipelines/upload-sourcemaps out-vscode-web-min out-vscode-web-min/vs/workbench/workbench.web.api.js.map
displayName: Upload sourcemaps (Web)
yarn gulp compile-build
yarn gulp compile-extensions-build
yarn gulp minify-vscode
yarn gulp vscode-linux-x64-min-ci
yarn gulp vscode-web-min-ci
yarn gulp vscode-reh-linux-x64-min
yarn gulp vscode-reh-web-linux-x64-yarnrc-extensions
yarn gulp vscode-reh-web-linux-x64-min
displayName: Compile
# - script: |
# set -e
# AZURE_DOCUMENTDB_MASTERKEY="$(builds-docdb-key-readwrite)" \
# AZURE_STORAGE_ACCESS_KEY_2="$(vscode-storage-key)" \
# VSCODE_MIXIN_PASSWORD="$(github-distro-mixin-password)" \
# ./build/azure-pipelines/web/publish.sh
# displayName: Publish
# AZURE_STORAGE_ACCOUNT="$(sourcemap-storage-account)" \
# AZURE_STORAGE_ACCESS_KEY="$(sourcemap-storage-key)" \
# node build/azure-pipelines/upload-sourcemaps
# displayName: Upload sourcemaps
- script: |
set -e
VERSION=$(node -p "require(\"./package.json\").version")
echo -e "{ \"version\": \"$VERSION\", \"quality\": \"$VSCODE_QUALITY\", \"commit\": \"$BUILD_SOURCEVERSION\" }" > ".build/version.json"
node build/azure-pipelines/common/copyArtifacts.js
displayName: Write Version Information
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: drop'
- script: |
set -e
tar -czf $(Build.ArtifactStagingDirectory)/compilation.tar.gz .build out-*
displayName: Compress compilation artifact
- task: PublishPipelineArtifact@1
inputs:
targetPath: $(Build.ArtifactStagingDirectory)/compilation.tar.gz
artifactName: Compilation
displayName: Publish compilation artifact
- script: |
set -e
yarn gulp package-rebuild-extensions
yarn gulp compile-extensions
yarn gulp package-external-extensions
displayName: Package External extensions
- script: |
set -e
yarn gulp package-langpacks
displayName: Package Langpacks
- script: |
set -e
yarn gulp vscode-linux-x64-build-deb
displayName: Build Deb
- script: |
set -e
yarn gulp vscode-linux-x64-build-rpm
displayName: Build Rpm
- task: UseDotNet@2
displayName: 'Install .NET Core sdk for signing'
inputs:
packageType: sdk
version: 2.1.x
installationPath: $(Agent.ToolsDirectory)/dotnet
- task: SFP.build-tasks.custom-build-task-1.EsrpCodeSigning@1
inputs:
ConnectedServiceName: 'Code Signing'
FolderPath: '$(Build.SourcesDirectory)/.build'
Pattern: 'extensions/*.vsix,langpacks/*.vsix'
signConfigType: inlineSignParams
inlineOperation: |
[
{
"keyCode": "CP-233016",
"operationSetCode": "OpcSign",
"parameters": [
{
"parameterName": "FileDigest",
"parameterValue": "/fd \"SHA256\""
}
],
"toolName": "sign",
"toolVersion": "1.0"
},
{
"keyCode": "CP-233016",
"operationSetCode": "OpcVerify",
"parameters": [],
"toolName": "sign",
"toolVersion": "1.0"
}
]
SessionTimeout: 120
displayName: 'Signing Extensions and Langpacks'
condition: and(succeeded(), eq(variables['signed'], true))
- script: |
set -e
./build/azure-pipelines/web/createDrop.sh
displayName: Create Drop
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: drop'
continueOnError: true
condition: succeededOrFailed()
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
inputs:
failOnAlert: true

View File

@@ -1,7 +1,7 @@
steps:
- task: NodeTool@0
inputs:
versionSpec: "12.18.3"
versionSpec: "14.x"
- task: geeklearningio.gl-vsts-tasks-yarn.yarn-installer-task.YarnInstaller@2
inputs:
@@ -65,8 +65,10 @@ steps:
condition: and(succeeded(), eq(variables.NODE_MODULES_RESTORED, 'true'))
displayName: Extract node_modules cache
- script: |
npx https://aka.ms/enablesecurefeed standAlone
- powershell: |
. build/azure-pipelines/win32/exec.ps1
$ErrorActionPreference = "Stop"
exec { npx https://aka.ms/enablesecurefeed standAlone }
timeoutInMinutes: 5
condition: and(succeeded(), ne(variables.NODE_MODULES_RESTORED, 'true'), eq(variables['ENABLE_TERRAPIN'], 'true'))
displayName: Switch to Terrapin packages
@@ -76,6 +78,7 @@ steps:
. build/azure-pipelines/win32/retry.ps1
$ErrorActionPreference = "Stop"
$env:npm_config_arch="$(VSCODE_ARCH)"
$env:npm_config_build_from_source="true"
$env:CHILD_CONCURRENCY="1"
retry { exec { yarn --frozen-lockfile } }
env:
@@ -177,7 +180,7 @@ steps:
$ErrorActionPreference = "Stop"
exec { $env:VSCODE_REMOTE_SERVER_PATH = "$(agent.builddirectory)\vscode-reh-web-win32-$(VSCODE_ARCH)"; .\resources\server\test\test-web-integration.bat --browser firefox }
displayName: Run integration tests (Browser)
timeoutInMinutes: 7
timeoutInMinutes: 10
condition: and(succeeded(), eq(variables['VSCODE_STEP_ON_IT'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- powershell: |
@@ -320,7 +323,3 @@ steps:
artifact: vscode-server-win32-$(VSCODE_ARCH)-web
displayName: Publish web server archive
condition: and(succeeded(), ne(variables['VSCODE_PUBLISH'], 'false'), ne(variables['VSCODE_ARCH'], 'arm64'))
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: "Component Detection"
continueOnError: true

View File

@@ -23,7 +23,17 @@ ipcMain.handle('pickdir', async () => {
});
app.once('ready', () => {
window = new BrowserWindow({ width: 800, height: 600, webPreferences: { nodeIntegration: true, webviewTag: true, enableWebSQL: false, nativeWindowOpen: true } });
window = new BrowserWindow({
width: 800,
height: 600,
webPreferences: {
nodeIntegration: true,
contextIsolation: false,
webviewTag: true,
enableWebSQL: false,
nativeWindowOpen: true
}
});
window.setMenuBarVisibility(false);
window.loadURL(url.format({ pathname: path.join(__dirname, 'index.html'), protocol: 'file:', slashes: true }));
// window.webContents.openDevTools();

View File

@@ -16,14 +16,14 @@ async function main() {
throw new Error('$AGENT_BUILDDIRECTORY not set');
}
const appName = product.nameLong + '.app';
const x64AppPath = path.join(buildDir, 'vscode-x64', appName);
const arm64AppPath = path.join(buildDir, 'vscode-arm64', appName);
const x64AppPath = path.join(buildDir, 'VSCode-darwin-x64', appName);
const arm64AppPath = path.join(buildDir, 'VSCode-darwin-arm64', appName);
const x64AsarPath = path.join(x64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
const arm64AsarPath = path.join(arm64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
const productJsonPath = path.resolve(outAppPath, 'Contents', 'Resources', 'app', 'product.json');
const infoPlistPath = path.resolve(outAppPath, 'Contents', 'Info.plist');
await vscode_universal_1.makeUniversalApp({
await (0, vscode_universal_1.makeUniversalApp)({
x64AppPath,
arm64AppPath,
x64AsarPath,
@@ -33,6 +33,7 @@ async function main() {
'Credits.rtf',
'CodeResources',
'fsevents.node',
'Info.plist',
'.npmrc'
],
outAppPath,

View File

@@ -20,8 +20,8 @@ async function main() {
}
const appName = product.nameLong + '.app';
const x64AppPath = path.join(buildDir, 'vscode-x64', appName);
const arm64AppPath = path.join(buildDir, 'vscode-arm64', appName);
const x64AppPath = path.join(buildDir, 'VSCode-darwin-x64', appName);
const arm64AppPath = path.join(buildDir, 'VSCode-darwin-arm64', appName);
const x64AsarPath = path.join(x64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
const arm64AsarPath = path.join(arm64AppPath, 'Contents', 'Resources', 'app', 'node_modules.asar');
const outAppPath = path.join(buildDir, `VSCode-darwin-${arch}`, appName);
@@ -38,6 +38,7 @@ async function main() {
'Credits.rtf',
'CodeResources',
'fsevents.node',
'Info.plist', // TODO@deepak1556: regressed with 11.4.2 internal builds
'.npmrc'
],
outAppPath,

View File

@@ -5,7 +5,9 @@
'use strict';
Object.defineProperty(exports, "__esModule", { value: true });
const codesign = require("electron-osx-sign");
const fs = require("fs-extra");
const path = require("path");
const plist = require("plist");
const util = require("../lib/util");
const product = require("../../product.json");
async function main() {
@@ -25,6 +27,7 @@ async function main() {
const helperAppBaseName = product.nameShort;
const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app';
const rendererHelperAppName = helperAppBaseName + ' Helper (Renderer).app';
const infoPlistPath = path.resolve(appRoot, appName, 'Contents', 'Info.plist');
const defaultOpts = {
app: path.join(appRoot, appName),
platform: 'darwin',
@@ -46,6 +49,14 @@ async function main() {
} });
const gpuHelperOpts = Object.assign(Object.assign({}, defaultOpts), { app: path.join(appFrameworkPath, gpuHelperAppName), entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-gpu-entitlements.plist') });
const rendererHelperOpts = Object.assign(Object.assign({}, defaultOpts), { app: path.join(appFrameworkPath, rendererHelperAppName), entitlements: path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist'), 'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist') });
let infoPlistString = await fs.readFile(infoPlistPath, 'utf8');
let infoPlistJson = plist.parse(infoPlistString);
Object.assign(infoPlistJson, {
NSAppleEventsUsageDescription: 'An application in Visual Studio Code wants to use AppleScript.',
NSMicrophoneUsageDescription: 'An application in Visual Studio Code wants to use the Microphone.',
NSCameraUsageDescription: 'An application in Visual Studio Code wants to use the Camera.'
});
await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8');
await codesign.signAsync(gpuHelperOpts);
await codesign.signAsync(rendererHelperOpts);
await codesign.signAsync(appOpts);

View File

@@ -6,7 +6,9 @@
'use strict';
import * as codesign from 'electron-osx-sign';
import * as fs from 'fs-extra';
import * as path from 'path';
import * as plist from 'plist';
import * as util from '../lib/util';
import * as product from '../../product.json';
@@ -30,6 +32,7 @@ async function main(): Promise<void> {
const helperAppBaseName = product.nameShort;
const gpuHelperAppName = helperAppBaseName + ' Helper (GPU).app';
const rendererHelperAppName = helperAppBaseName + ' Helper (Renderer).app';
const infoPlistPath = path.resolve(appRoot, appName, 'Contents', 'Info.plist');
const defaultOpts: codesign.SignOptions = {
app: path.join(appRoot, appName),
@@ -68,6 +71,15 @@ async function main(): Promise<void> {
'entitlements-inherit': path.join(baseDir, 'azure-pipelines', 'darwin', 'helper-renderer-entitlements.plist'),
};
let infoPlistString = await fs.readFile(infoPlistPath, 'utf8');
let infoPlistJson = plist.parse(infoPlistString);
Object.assign(infoPlistJson, {
NSAppleEventsUsageDescription: 'An application in Visual Studio Code wants to use AppleScript.',
NSMicrophoneUsageDescription: 'An application in Visual Studio Code wants to use the Microphone.',
NSCameraUsageDescription: 'An application in Visual Studio Code wants to use the Camera.'
});
await fs.writeFile(infoPlistPath, plist.build(infoPlistJson), 'utf8');
await codesign.signAsync(gpuHelperOpts);
await codesign.signAsync(rendererHelperOpts);
await codesign.signAsync(appOpts as any);

View File

@@ -51,8 +51,10 @@ module.exports.indentationFilter = [
'!test/monaco/out/**',
'!test/smoke/out/**',
'!extensions/typescript-language-features/test-workspace/**',
'!extensions/notebook-markdown-extensions/notebook-out/**',
'!extensions/vscode-api-tests/testWorkspace/**',
'!extensions/vscode-api-tests/testWorkspace2/**',
'!extensions/vscode-custom-editor-tests/test-workspace/**',
'!build/monaco/**',
'!build/win32/**',
@@ -86,6 +88,8 @@ module.exports.indentationFilter = [
'!**/*.Dockerfile',
'!**/*.dockerfile',
'!extensions/markdown-language-features/media/*.js',
'!extensions/markdown-language-features/notebook-out/*.js',
'!extensions/notebook-markdown-extensions/notebook-out/*.js',
'!extensions/simple-browser/media/*.js',
];
@@ -115,6 +119,7 @@ module.exports.copyrightFilter = [
'!resources/completions/**',
'!extensions/configuration-editing/build/inline-allOf.ts',
'!extensions/markdown-language-features/media/highlight.css',
'!extensions/notebook-markdown-extensions/notebook-out/**',
'!extensions/html-language-features/server/src/modes/typescript/*',
'!extensions/*/server/bin/*',
'!src/vs/editor/test/node/classification/typescript-test.ts',

View File

@@ -8,6 +8,7 @@ require('events').EventEmitter.defaultMaxListeners = 100;
const gulp = require('gulp');
const path = require('path');
const child_process = require('child_process');
const nodeUtil = require('util');
const es = require('event-stream');
const filter = require('gulp-filter');
@@ -24,26 +25,55 @@ const ansiColors = require('ansi-colors');
const ext = require('./lib/extensions');
const extensionsPath = path.join(path.dirname(__dirname), 'extensions');
// {{SQL CARBON EDIT}}
const sqlLocalizedExtensions = [
'admin-tool-ext-win',
'agent',
'cms',
'dacpac',
'import',
'machine-learning',
'profiler',
'schema-compare',
'server-report',
'sql-assessment',
'sql-database-projects'
];
// {{SQL CARBON EDIT}}
// {{SQL CARBON EDIT}} - TODO: Import needs to be updated to work with langpacks.
const sqlLocalizedExtensions = [
'import',
];
// {{SQL CARBON EDIT}} Not doing this for us right now
// To save 250ms for each gulp startup, we are caching the result here
const compilations = glob.sync('**/tsconfig.json', {
cwd: extensionsPath,
ignore: ['**/out/**', '**/node_modules/**']
});
// const compilations = [
// 'configuration-editing/build/tsconfig.json',
// 'configuration-editing/tsconfig.json',
// 'css-language-features/client/tsconfig.json',
// 'css-language-features/server/tsconfig.json',
// 'debug-auto-launch/tsconfig.json',
// 'debug-server-ready/tsconfig.json',
// 'emmet/tsconfig.json',
// 'extension-editing/tsconfig.json',
// 'git/tsconfig.json',
// 'github-authentication/tsconfig.json',
// 'github/tsconfig.json',
// 'grunt/tsconfig.json',
// 'gulp/tsconfig.json',
// 'html-language-features/client/tsconfig.json',
// 'html-language-features/server/tsconfig.json',
// 'image-preview/tsconfig.json',
// 'jake/tsconfig.json',
// 'json-language-features/client/tsconfig.json',
// 'json-language-features/server/tsconfig.json',
// 'markdown-language-features/preview-src/tsconfig.json',
// 'markdown-language-features/tsconfig.json',
// 'merge-conflict/tsconfig.json',
// 'microsoft-authentication/tsconfig.json',
// 'npm/tsconfig.json',
// 'php-language-features/tsconfig.json',
// 'search-result/tsconfig.json',
// 'simple-browser/tsconfig.json',
// 'testing-editor-contributions/tsconfig.json',
// 'typescript-language-features/test-workspace/tsconfig.json',
// 'typescript-language-features/tsconfig.json',
// 'vscode-api-tests/tsconfig.json',
// 'vscode-colorize-tests/tsconfig.json',
// 'vscode-custom-editor-tests/tsconfig.json',
// 'vscode-notebook-tests/tsconfig.json',
// 'vscode-test-resolver/tsconfig.json'
// ];
const getBaseUrl = out => `https://sqlopsbuilds.blob.core.windows.net/sourcemaps/${commit}/${out}`;
@@ -175,7 +205,50 @@ exports.watchExtensionsTask = watchExtensionsTask;
const compileExtensionsBuildLegacyTask = task.define('compile-extensions-build-legacy', task.parallel(...tasks.map(t => t.compileBuildTask)));
gulp.task(compileExtensionsBuildLegacyTask);
// Azure Pipelines
//#region Extension media
// Additional projects to webpack. These typically build code for webviews
const webpackMediaConfigFiles = [
'markdown-language-features/webpack.config.js',
'simple-browser/webpack.config.js',
];
// Additional projects to run esbuild on. These typically build code for webviews
const esbuildMediaScripts = [
'markdown-language-features/esbuild.js',
'notebook-markdown-extensions/esbuild.js',
];
const compileExtensionMediaTask = task.define('compile-extension-media', () => buildExtensionMedia(false));
gulp.task(compileExtensionMediaTask);
exports.compileExtensionMediaTask = compileExtensionMediaTask;
const watchExtensionMedia = task.define('watch-extension-media', () => buildExtensionMedia(true));
gulp.task(watchExtensionMedia);
exports.watchExtensionMedia = watchExtensionMedia;
const compileExtensionMediaBuildTask = task.define('compile-extension-media-build', () => buildExtensionMedia(false, '.build/extensions'));
gulp.task(compileExtensionMediaBuildTask);
async function buildExtensionMedia(isWatch, outputRoot) {
const webpackConfigLocations = webpackMediaConfigFiles.map(p => {
return {
configPath: path.join(extensionsPath, p),
outputRoot: outputRoot ? path.join(root, outputRoot, path.dirname(p)) : undefined
};
});
return Promise.all([
webpackExtensions('webpacking extension media', isWatch, webpackConfigLocations),
esbuildExtensions('esbuilding extension media', isWatch, esbuildMediaScripts.map(p => ({
script: path.join(extensionsPath, p),
outputRoot: outputRoot ? path.join(root, outputRoot, path.dirname(p)) : undefined
}))),
]);
}
//#endregion
//#region Azure Pipelines
const cleanExtensionsBuildTask = task.define('clean-extensions-build', util.rimraf('.build/extensions'));
const compileExtensionsBuildTask = task.define('compile-extensions-build', task.series(
@@ -185,10 +258,55 @@ const compileExtensionsBuildTask = task.define('compile-extensions-build', task.
));
gulp.task(compileExtensionsBuildTask);
gulp.task(task.define('extensions-ci', task.series(compileExtensionsBuildTask)));
gulp.task(task.define('extensions-ci', task.series(compileExtensionsBuildTask, compileExtensionMediaBuildTask)));
exports.compileExtensionsBuildTask = compileExtensionsBuildTask;
//#endregion
// {{SQL CARBON EDIT}}
//#region XLF Creation
//Get every extension in 'extensions' to create XLF files.
const exportCompilations = glob.sync('**/package.json', {
cwd: extensionsPath,
ignore: ['**/out/**', '**/node_modules/**', 'package.json']
});
//Run the localization packaging task on all extensions in ADS.
const exportTasks = exportCompilations.map(function (packageFile) {
const locFunc = require('./lib/locFunc');
const relativeDirname = path.dirname(packageFile);
const extensionName = relativeDirname.replace(/\//g, '-');
const packageTask = task.define(`localization-package-extension:${extensionName}`, task.series(() => {
return locFunc.packageSingleExtensionStream(extensionName)
.pipe(gulp.dest('.build'));
}));
// Tasks
gulp.task(packageTask);
return { packageTask };
});
const packageLocalizationExtensionsTask = task.define('package-localization-extensions-task', task.series(...exportTasks.map(t => t.packageTask)));
gulp.task(packageLocalizationExtensionsTask);
//Builds all ADS extensions including external/excluded extensions (only for creating XLF files, not for compiling extensions for shipping)
const compileLocalizationExtensionsBuildTask = task.define('compile-localization-extensions-build', task.series(
cleanExtensionsBuildTask,
compileExtensionsTask,
task.define('bundle-marketplace-extensions-build', () => ext.packageMarketplaceExtensionsStream(false).pipe(gulp.dest('.build'))),
packageLocalizationExtensionsTask,
));
gulp.task(compileLocalizationExtensionsBuildTask);
exports.compileLocalizationExtensionsBuildTask = compileLocalizationExtensionsBuildTask;
//#endregion
// {{SQL CARBON EDIT}} end
const compileWebExtensionsTask = task.define('compile-web', () => buildWebExtensions(false));
gulp.task(compileWebExtensionsTask);
exports.compileWebExtensionsTask = compileWebExtensionsTask;
@@ -198,23 +316,39 @@ gulp.task(watchWebExtensionsTask);
exports.watchWebExtensionsTask = watchWebExtensionsTask;
async function buildWebExtensions(isWatch) {
const webpack = require('webpack');
const webpackConfigLocations = await nodeUtil.promisify(glob)(
path.join(extensionsPath, '**', 'extension-browser.webpack.config.js'),
{ ignore: ['**/node_modules'] }
);
return webpackExtensions('packaging web extension', isWatch, webpackConfigLocations.map(configPath => ({ configPath })));
}
/**
* @param {string} taskName
* @param {boolean} isWatch
* @param {{ configPath: string, outputRoot?: boolean}} webpackConfigLocations
*/
async function webpackExtensions(taskName, isWatch, webpackConfigLocations) {
const webpack = require('webpack');
const webpackConfigs = [];
for (const webpackConfigPath of webpackConfigLocations) {
const configOrFnOrArray = require(webpackConfigPath);
for (const { configPath, outputRoot } of webpackConfigLocations) {
const configOrFnOrArray = require(configPath);
function addConfig(configOrFn) {
let config;
if (typeof configOrFn === 'function') {
webpackConfigs.push(configOrFn({}, {}));
config = configOrFn({}, {});
webpackConfigs.push(config);
} else {
webpackConfigs.push(configOrFn);
config = configOrFn;
}
if (outputRoot) {
config.output.path = path.join(outputRoot, path.relative(path.dirname(configPath), config.output.path));
}
webpackConfigs.push(configOrFn);
}
addConfig(configOrFnOrArray);
}
@@ -225,7 +359,7 @@ async function buildWebExtensions(isWatch) {
if (outputPath) {
const relativePath = path.relative(extensionsPath, outputPath).replace(/\\/g, '/');
const match = relativePath.match(/[^\/]+(\/server|\/client)?/);
fancyLog(`Finished ${ansiColors.green('packaging web extension')} ${ansiColors.cyan(match[0])} with ${stats.errors.length} errors.`);
fancyLog(`Finished ${ansiColors.green(taskName)} ${ansiColors.cyan(match[0])} with ${stats.errors.length} errors.`);
}
if (Array.isArray(stats.errors)) {
stats.errors.forEach(error => {
@@ -263,4 +397,44 @@ async function buildWebExtensions(isWatch) {
});
}
/**
* @param {string} taskName
* @param {boolean} isWatch
* @param {{ script: string, outputRoot?: string }}} scripts
*/
async function esbuildExtensions(taskName, isWatch, scripts) {
function reporter(/** @type {string} */ stdError, /** @type {string} */script) {
const matches = (stdError || '').match(/\> (.+): error: (.+)?/g);
fancyLog(`Finished ${ansiColors.green(taskName)} ${script} with ${matches ? matches.length : 0} errors.`);
for (const match of matches || []) {
fancyLog.error(match);
}
}
const tasks = scripts.map(({ script, outputRoot }) => {
return new Promise((resolve, reject) => {
const args = [script];
if (isWatch) {
args.push('--watch');
}
if (outputRoot) {
args.push('--outputRoot', outputRoot);
}
const proc = child_process.execFile(process.argv[0], args, {}, (error, _stdout, stderr) => {
if (error) {
return reject(error);
}
reporter(stderr, script);
if (stderr) {
return reject();
}
return resolve();
});
proc.stdout.on('data', (data) => {
fancyLog(`${ansiColors.green(taskName)}: ${data.toString('utf8')}`);
});
});
});
return Promise.all(tasks);
}

View File

@@ -13,7 +13,7 @@ const util = require('./lib/util');
const task = require('./lib/task');
const compilation = require('./lib/compilation');
const { monacoTypecheckTask/* , monacoTypecheckWatchTask */ } = require('./gulpfile.editor');
const { compileExtensionsTask, watchExtensionsTask } = require('./gulpfile.extensions');
const { compileExtensionsTask, watchExtensionsTask, compileExtensionMediaTask } = require('./gulpfile.extensions');
// Fast compile for development time
const compileClientTask = task.define('compile-client', task.series(util.rimraf('out'), compilation.compileTask('src', 'out', false)));
@@ -23,7 +23,7 @@ const watchClientTask = task.define('watch-client', task.series(util.rimraf('out
gulp.task(watchClientTask);
// All
const compileTask = task.define('compile', task.parallel(monacoTypecheckTask, compileClientTask, compileExtensionsTask));
const compileTask = task.define('compile', task.parallel(monacoTypecheckTask, compileClientTask, compileExtensionsTask, compileExtensionMediaTask));
gulp.task(compileTask);
gulp.task(task.define('watch', task.parallel(/* monacoTypecheckWatchTask, */ watchClientTask, watchExtensionsTask)));

View File

@@ -28,6 +28,7 @@ const { compileBuildTask } = require('./gulpfile.compile');
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
const { vscodeWebEntryPoints, vscodeWebResourceIncludes, createVSCodeWebFileContentMapper } = require('./gulpfile.vscode.web');
const cp = require('child_process');
const { rollupAngular } = require('./lib/rollup');
const REPO_ROOT = path.dirname(__dirname);
const commit = util.getVersion(REPO_ROOT);
@@ -114,6 +115,10 @@ const serverEntryPoints = [
{
name: 'vs/platform/files/node/watcher/nsfw/watcherApp',
exclude: ['vs/css', 'vs/nls']
},
{
name: 'vs/platform/terminal/node/ptyHostMain',
exclude: ['vs/css', 'vs/nls']
}
];
@@ -437,8 +442,46 @@ function packagePkgTask(platform, arch, pkgTarget) {
const sourceFolderName = `out-vscode-${type}${dashed(minified)}`;
const destinationFolderName = `vscode-${type}${dashed(platform)}${dashed(arch)}`;
const rollupAngularTask = task.define(`vscode-web-${type}${dashed(platform)}${dashed(arch)}-angular-rollup`, () => {
return rollupAngular(REMOTE_FOLDER);
});
gulp.task(rollupAngularTask);
// rebuild extensions that contain native npm modules or have conditional webpack rules
// when building with the web .yarnrc settings (e.g. runtime=node, etc.)
// this is needed to have correct module set published with desired ABI
const rebuildExtensions = ['big-data-cluster', 'mssql', 'notebook'];
const EXTENSIONS = path.join(REPO_ROOT, 'extensions');
function exec(cmdLine, cwd) {
console.log(cmdLine);
cp.execSync(cmdLine, { stdio: 'inherit', cwd: cwd });
}
const tasks = [];
rebuildExtensions.forEach(scope => {
const root = path.join(EXTENSIONS, scope);
tasks.push(
() => gulp.src(path.join(REMOTE_FOLDER, '.yarnrc')).pipe(gulp.dest(root)),
util.rimraf(path.join(root, 'node_modules')),
() => exec('yarn', root)
);
});
const yarnrcExtensions = task.define(`vscode-${type}${dashed(platform)}${dashed(arch)}-yarnrc-extensions`, task.series(...tasks));
gulp.task(yarnrcExtensions);
const cleanupExtensions = task.define(`vscode-${type}${dashed(platform)}${dashed(arch)}-cleanup-extensions`, () => {
return Promise.all(rebuildExtensions.map(scope => {
const root = path.join(EXTENSIONS, scope);
return util.rimraf(path.join(root, '.yarnrc'))();
}));
});
gulp.task(cleanupExtensions);
const serverTaskCI = task.define(`vscode-${type}${dashed(platform)}${dashed(arch)}${dashed(minified)}-ci`, task.series(
gulp.task(`node-${platform}-${platform === 'darwin' ? 'x64' : arch}`),
yarnrcExtensions,
compileExtensionsBuildTask,
cleanupExtensions,
rollupAngularTask,
util.rimraf(path.join(BUILD_ROOT, destinationFolderName)),
packageTask(type, platform, arch, sourceFolderName, destinationFolderName)
));

View File

@@ -145,3 +145,9 @@ gulp.task('package-rebuild-extensions', task.series(
task.define('clean-rebuild-extensions', () => ext.cleanRebuildExtensions('.build/extensions')),
task.define('rebuild-extensions-build', () => ext.packageRebuildExtensionsStream().pipe(gulp.dest('.build'))),
));
gulp.task('update-langpacks', task.series(
task.define('rename-vscode-packs', () => loc.renameVscodeLangpacks()),
task.define('refresh-langpack-resources', () => loc.refreshLangpacks())
));

View File

@@ -30,7 +30,7 @@ const { getProductionDependencies } = require('./lib/dependencies');
const { config } = require('./lib/electron');
const createAsar = require('./lib/asar').createAsar;
const { compileBuildTask } = require('./gulpfile.compile');
const { compileExtensionsBuildTask } = require('./gulpfile.extensions');
const { compileExtensionsBuildTask, compileLocalizationExtensionsBuildTask } = require('./gulpfile.extensions'); // {{SQL CARBON EDIT}} Must handle localization code.
// Build
const vscodeEntryPoints = _.flatten([
@@ -51,7 +51,6 @@ const vscodeResources = [
'out-build/bootstrap-amd.js',
'out-build/bootstrap-node.js',
'out-build/bootstrap-window.js',
'out-build/paths.js',
'out-build/vs/**/*.{svg,png,html,jpg}',
'!out-build/vs/code/browser/**/*.html',
'!out-build/vs/editor/standalone/**/*.svg',
@@ -60,6 +59,7 @@ const vscodeResources = [
'out-build/vs/base/node/{stdForkStart.js,terminateProcess.sh,cpuUsage.sh,ps.sh}',
'out-build/vs/base/browser/ui/codicons/codicon/**',
'out-build/vs/base/parts/sandbox/electron-browser/preload.js',
'out-build/vs/platform/environment/node/userDataPath.js',
'out-build/vs/workbench/browser/media/*-theme.css',
'out-build/vs/workbench/contrib/debug/**/*.json',
'out-build/vs/workbench/contrib/externalTerminal/**/*.scpt',
@@ -108,6 +108,50 @@ const optimizeVSCodeTask = task.define('optimize-vscode', task.series(
));
gulp.task(optimizeVSCodeTask);
// {{SQL CARBON EDIT}} Gulp task that imports any relevant ADS XLF found in vscode-translations-export to resources/xlf/en folder.
// List of ADS extension XLF files that we want to put into the English resource folder.
const extensionsFilter = filter([
"**/admin-tool-ext-win.xlf",
"**/agent.xlf",
"**/arc.xlf",
"**/asde-deployment.xlf",
"**/azdata.xlf",
"**/azurecore.xlf",
"**/azurehybridtoolkit.xlf",
"**/big-data-cluster.xlf",
"**/cms.xlf",
"**/dacpac.xlf",
"**/data-workspace.xlf",
"**/import.xlf",
"**/kusto.xlf",
"**/machine-learning.xlf",
"**/Microsoft.sqlservernotebook.xlf",
"**/mssql.xlf",
"**/notebook.xlf",
"**/profiler.xlf",
"**/query-history.xlf",
"**/resource-deployment.xlf",
"**/schema-compare.xlf",
"**/server-report.xlf",
"**/sql-assessment.xlf",
"**/sql-database-projects.xlf",
"**/sql-migration.xlf",
"**/xml-language-features.xlf"
])
// Copy ADS extension XLFs into English resource folder.
const importExtensionsTask = task.define('import-extensions-xlfs', function () {
return es.merge(
gulp.src(`./vscode-translations-export/vscode-extensions/*.xlf`)
.pipe(extensionsFilter),
gulp.src(`./vscode-translations-export/ads-core/*.xlf`)
)
.pipe(vfs.dest(`./resources/xlf/en`));
});
gulp.task(importExtensionsTask)
// {{SQL CARBON EDIT}} end
const sourceMappingURLBase = `https://sqlopsbuilds.blob.core.windows.net/sourcemaps/${commit}`;
const minifyVSCodeTask = task.define('minify-vscode', task.series(
optimizeVSCodeTask,
@@ -233,10 +277,12 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
const productionDependencies = getProductionDependencies(root);
const dependenciesSrc = _.flatten(productionDependencies.map(d => path.relative(root, d.path)).map(d => [`${d}/**`, `!${d}/**/{test,tests}/**`]));
// {{SQL CARBON EDIT}} - fix runtime module load break
const deps = gulp.src(dependenciesSrc, { base: '.', dot: true })
.pipe(filter(['**', '!**/package-lock.json']))
.pipe(filter(['**', `!**/${config.version}/**`, '!**/bin/darwin-arm64-87/**', '!**/package-lock.json', '!**/yarn.lock', '!**/*.js.map']))
.pipe(util.cleanNodeModules(path.join(__dirname, '.moduleignore')))
.pipe(jsFilter)
.pipe(util.rewriteSourceMappingURL(sourceMappingURLBase))
.pipe(jsFilter.restore)
.pipe(createAsar(path.join(process.cwd(), 'node_modules'), ['**/*.node', '**/vscode-ripgrep/bin/*', '**/node-pty/build/Release/*', '**/*.wasm'], 'node_modules.asar'));
let all = es.merge(
@@ -271,6 +317,7 @@ function packageTask(platform, arch, sourceFolderName, destinationFolderName, op
.pipe(fileLengthFilter.restore)
.pipe(util.skipDirectories())
.pipe(util.fixWin32DirectoryPermissions())
.pipe(filter(['**', '!**/.github/**'], { dot: true })) // https://github.com/microsoft/vscode/issues/116523
.pipe(electron(_.extend({}, config, { platform, arch: arch === 'armhf' ? 'arm' : arch, ffmpegChromium: true })))
.pipe(filter(['**', '!LICENSE', '!LICENSES.chromium.html', '!version'], { dot: true }));
@@ -434,11 +481,12 @@ gulp.task(task.define(
)
));
gulp.task(task.define(
// {{SQL CARBON EDIT}} Allow for gulp task to be added to update-english-xlfs.
const vscodeTranslationsExport = task.define(
'vscode-translations-export',
task.series(
compileBuildTask,
compileExtensionsBuildTask,
compileLocalizationExtensionsBuildTask, // {{SQL CARBON EDIT}} now include all extensions in ADS, not just a subset. (replaces "compileExtensionsBuildTask" here).
optimizeVSCodeTask,
function () {
const pathToMetadata = './out-vscode/nls.metadata.json';
@@ -449,10 +497,22 @@ gulp.task(task.define(
gulp.src(pathToMetadata).pipe(i18n.createXlfFilesForCoreBundle()),
gulp.src(pathToSetup).pipe(i18n.createXlfFilesForIsl()),
gulp.src(pathToExtensions).pipe(i18n.createXlfFilesForExtensions())
).pipe(vfs.dest('../vscode-translations-export'));
).pipe(vfs.dest('./vscode-translations-export')); // {{SQL CARBON EDIT}} move vscode-translations-export into ADS (for safely deleting after use).
}
)
);
gulp.task(vscodeTranslationsExport)
// {{SQL CARBON EDIT}} Localization gulp task, runs vscodeTranslationsExport and imports a subset of the generated XLFs into the folder.
gulp.task(task.define(
'update-english-xlfs',
task.series(
vscodeTranslationsExport,
importExtensionsTask,
task.define('delete-vscode-translations-export', util.rimraf('./vscode-translations-export'))
)
));
// {{SQL CARBON EDIT}} end
gulp.task('vscode-translations-pull', function () {
return es.merge([...i18n.defaultLanguages, ...i18n.extraLanguages].map(language => {
@@ -535,7 +595,7 @@ gulp.task(task.define(
if (!shouldSetupSettingsSearch()) {
const branch = process.env.BUILD_SOURCEBRANCH;
console.log(`Only runs on master and release branches, not ${branch}`);
console.log(`Only runs on main and release branches, not ${branch}`);
return;
}
@@ -561,21 +621,21 @@ gulp.task(task.define(
function shouldSetupSettingsSearch() {
const branch = process.env.BUILD_SOURCEBRANCH;
return branch && (/\/master$/.test(branch) || branch.indexOf('/release/') >= 0);
return branch && (/\/main$/.test(branch) || branch.indexOf('/release/') >= 0);
}
function getSettingsSearchBuildId(packageJson) {
try {
const branch = process.env.BUILD_SOURCEBRANCH;
const branchId = branch.indexOf('/release/') >= 0 ? 0 :
/\/master$/.test(branch) ? 1 :
/\/main$/.test(branch) ? 1 :
2; // Some unexpected branch
const out = cp.execSync(`git rev-list HEAD --count`);
const count = parseInt(out.toString());
// <version number><commit count><branchId (avoid unlikely conflicts)>
// 1.25.1, 1,234,567 commits, master = 1250112345671
// 1.25.1, 1,234,567 commits, main = 1250112345671
return util.versionStringToNumber(packageJson.version) * 1e8 + count * 10 + branchId;
} catch (e) {
throw new Error('Could not determine build number: ' + e.toString());

View File

@@ -17,7 +17,7 @@ import { Stream } from 'stream';
const mkdirp = require('mkdirp');
interface IExtensionDefinition {
export interface IExtensionDefinition {
name: string;
version: string;
repo: string;

View File

@@ -0,0 +1,79 @@
"use strict";
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
const got_1 = require("got");
const fs = require("fs");
const path = require("path");
const url = require("url");
const ansiColors = require("ansi-colors");
const root = path.dirname(path.dirname(__dirname));
const rootCG = path.join(root, 'extensionsCG');
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
const builtInExtensions = productjson.builtInExtensions;
const webBuiltInExtensions = productjson.webBuiltInExtensions;
const token = process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined;
const contentBasePath = 'raw.githubusercontent.com';
const contentFileNames = ['package.json', 'package-lock.json', 'yarn.lock'];
async function downloadExtensionDetails(extension) {
var _a, _b, _c;
const extensionLabel = `${extension.name}@${extension.version}`;
const repository = url.parse(extension.repo).path.substr(1);
const repositoryContentBaseUrl = `https://${token ? `${token}@` : ''}${contentBasePath}/${repository}/v${extension.version}`;
const promises = [];
for (const fileName of contentFileNames) {
promises.push(new Promise(resolve => {
(0, got_1.default)(`${repositoryContentBaseUrl}/${fileName}`)
.then(response => {
resolve({ fileName, body: response.rawBody });
})
.catch(error => {
if (error.response.statusCode === 404) {
resolve({ fileName, body: undefined });
}
else {
resolve({ fileName, body: null });
}
});
}));
}
console.log(extensionLabel);
const results = await Promise.all(promises);
for (const result of results) {
if (result.body) {
const extensionFolder = path.join(rootCG, extension.name);
fs.mkdirSync(extensionFolder, { recursive: true });
fs.writeFileSync(path.join(extensionFolder, result.fileName), result.body);
console.log(` - ${result.fileName} ${ansiColors.green('✔︎')}`);
}
else if (result.body === undefined) {
console.log(` - ${result.fileName} ${ansiColors.yellow('⚠️')}`);
}
else {
console.log(` - ${result.fileName} ${ansiColors.red('🛑')}`);
}
}
// Validation
if (!((_a = results.find(r => r.fileName === 'package.json')) === null || _a === void 0 ? void 0 : _a.body)) {
// throw new Error(`The "package.json" file could not be found for the built-in extension - ${extensionLabel}`);
}
if (!((_b = results.find(r => r.fileName === 'package-lock.json')) === null || _b === void 0 ? void 0 : _b.body) &&
!((_c = results.find(r => r.fileName === 'yarn.lock')) === null || _c === void 0 ? void 0 : _c.body)) {
// throw new Error(`The "package-lock.json"/"yarn.lock" could not be found for the built-in extension - ${extensionLabel}`);
}
}
async function main() {
for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) {
await downloadExtensionDetails(extension);
}
}
main().then(() => {
console.log(`Built-in extensions component data downloaded ${ansiColors.green('✔︎')}`);
process.exit(0);
}, err => {
console.log(`Built-in extensions component data could not be downloaded ${ansiColors.red('🛑')}`);
console.error(err);
process.exit(1);
});

View File

@@ -0,0 +1,83 @@
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import got from 'got';
import * as fs from 'fs';
import * as path from 'path';
import * as url from 'url';
import ansiColors = require('ansi-colors');
import { IExtensionDefinition } from './builtInExtensions';
const root = path.dirname(path.dirname(__dirname));
const rootCG = path.join(root, 'extensionsCG');
const productjson = JSON.parse(fs.readFileSync(path.join(__dirname, '../../product.json'), 'utf8'));
const builtInExtensions = <IExtensionDefinition[]>productjson.builtInExtensions;
const webBuiltInExtensions = <IExtensionDefinition[]>productjson.webBuiltInExtensions;
const token = process.env['VSCODE_MIXIN_PASSWORD'] || process.env['GITHUB_TOKEN'] || undefined;
const contentBasePath = 'raw.githubusercontent.com';
const contentFileNames = ['package.json', 'package-lock.json', 'yarn.lock'];
async function downloadExtensionDetails(extension: IExtensionDefinition): Promise<void> {
const extensionLabel = `${extension.name}@${extension.version}`;
const repository = url.parse(extension.repo).path!.substr(1);
const repositoryContentBaseUrl = `https://${token ? `${token}@` : ''}${contentBasePath}/${repository}/v${extension.version}`;
const promises = [];
for (const fileName of contentFileNames) {
promises.push(new Promise<{ fileName: string, body: Buffer | undefined | null }>(resolve => {
got(`${repositoryContentBaseUrl}/${fileName}`)
.then(response => {
resolve({ fileName, body: response.rawBody });
})
.catch(error => {
if (error.response.statusCode === 404) {
resolve({ fileName, body: undefined });
} else {
resolve({ fileName, body: null });
}
});
}));
}
console.log(extensionLabel);
const results = await Promise.all(promises);
for (const result of results) {
if (result.body) {
const extensionFolder = path.join(rootCG, extension.name);
fs.mkdirSync(extensionFolder, { recursive: true });
fs.writeFileSync(path.join(extensionFolder, result.fileName), result.body);
console.log(` - ${result.fileName} ${ansiColors.green('✔︎')}`);
} else if (result.body === undefined) {
console.log(` - ${result.fileName} ${ansiColors.yellow('⚠️')}`);
} else {
console.log(` - ${result.fileName} ${ansiColors.red('🛑')}`);
}
}
// Validation
if (!results.find(r => r.fileName === 'package.json')?.body) {
// throw new Error(`The "package.json" file could not be found for the built-in extension - ${extensionLabel}`);
}
if (!results.find(r => r.fileName === 'package-lock.json')?.body &&
!results.find(r => r.fileName === 'yarn.lock')?.body) {
// throw new Error(`The "package-lock.json"/"yarn.lock" could not be found for the built-in extension - ${extensionLabel}`);
}
}
async function main(): Promise<void> {
for (const extension of [...builtInExtensions, ...webBuiltInExtensions]) {
await downloadExtensionDetails(extension);
}
}
main().then(() => {
console.log(`Built-in extensions component data downloaded ${ansiColors.green('✔︎')}`);
process.exit(0);
}, err => {
console.log(`Built-in extensions component data could not be downloaded ${ansiColors.red('🛑')}`);
console.error(err);
process.exit(1);
});

View File

@@ -17,7 +17,7 @@ const fancyLog = require("fancy-log");
const ansiColors = require("ansi-colors");
const os = require("os");
const watch = require('./watch');
const reporter = reporter_1.createReporter();
const reporter = (0, reporter_1.createReporter)();
function getTypeScriptCompilerOptions(src) {
const rootDir = path.join(__dirname, `../../${src}`);
let options = {};

View File

@@ -28,7 +28,7 @@ exports.config = {
version: util.getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
copyright: 'Copyright (C) 2021 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',

View File

@@ -32,7 +32,7 @@ export const config = {
version: util.getElectronVersion(),
productAppName: product.nameLong,
companyName: 'Microsoft Corporation',
copyright: 'Copyright (C) 2019 Microsoft. All rights reserved',
copyright: 'Copyright (C) 2021 Microsoft. All rights reserved',
darwinIcon: 'resources/darwin/code.icns',
darwinBundleIdentifier: product.darwinBundleIdentifier,
darwinApplicationCategoryType: 'public.app-category.developer-tools',

View File

@@ -21,7 +21,7 @@ module.exports = new class {
const configs = context.options;
for (const config of configs) {
if (minimatch(context.getFilename(), config.target)) {
return utils_1.createImportRuleListener((node, value) => this._checkImport(context, config, node, value));
return (0, utils_1.createImportRuleListener)((node, value) => this._checkImport(context, config, node, value));
}
}
return {};
@@ -29,7 +29,7 @@ module.exports = new class {
_checkImport(context, config, node, path) {
// resolve relative paths
if (path[0] === '.') {
path = path_1.join(context.getFilename(), path);
path = (0, path_1.join)(context.getFilename(), path);
}
let restrictions;
if (typeof config.restrictions === 'string') {

View File

@@ -17,7 +17,7 @@ module.exports = new class {
};
}
create(context) {
const fileDirname = path_1.dirname(context.getFilename());
const fileDirname = (0, path_1.dirname)(context.getFilename());
const parts = fileDirname.split(/\\|\//);
const ruleArgs = context.options[0];
let config;
@@ -39,11 +39,11 @@ module.exports = new class {
// nothing
return {};
}
return utils_1.createImportRuleListener((node, path) => {
return (0, utils_1.createImportRuleListener)((node, path) => {
if (path[0] === '.') {
path = path_1.join(path_1.dirname(context.getFilename()), path);
path = (0, path_1.join)((0, path_1.dirname)(context.getFilename()), path);
}
const parts = path_1.dirname(path).split(/\\|\//);
const parts = (0, path_1.dirname)(path).split(/\\|\//);
for (let i = parts.length - 1; i >= 0; i--) {
const part = parts[i];
if (config.allowed.has(part)) {

View File

@@ -20,10 +20,10 @@ module.exports = new class NoNlsInStandaloneEditorRule {
|| /vs(\/|\\)editor(\/|\\)editor.api/.test(fileName)
|| /vs(\/|\\)editor(\/|\\)editor.main/.test(fileName)
|| /vs(\/|\\)editor(\/|\\)editor.worker/.test(fileName)) {
return utils_1.createImportRuleListener((node, path) => {
return (0, utils_1.createImportRuleListener)((node, path) => {
// resolve relative paths
if (path[0] === '.') {
path = path_1.join(context.getFilename(), path);
path = (0, path_1.join)(context.getFilename(), path);
}
if (/vs(\/|\\)nls/.test(path)) {
context.report({

View File

@@ -21,10 +21,10 @@ module.exports = new class NoNlsInStandaloneEditorRule {
// the vs/editor folder is allowed to use the standalone editor
return {};
}
return utils_1.createImportRuleListener((node, path) => {
return (0, utils_1.createImportRuleListener)((node, path) => {
// resolve relative paths
if (path[0] === '.') {
path = path_1.join(context.getFilename(), path);
path = (0, path_1.join)(context.getFilename(), path);
}
if (/vs(\/|\\)editor(\/|\\)standalone(\/|\\)/.test(path)
|| /vs(\/|\\)editor(\/|\\)common(\/|\\)standalone(\/|\\)/.test(path)

View File

@@ -15,7 +15,7 @@ module.exports = new (_a = class TranslationRemind {
};
}
create(context) {
return utils_1.createImportRuleListener((node, path) => this._checkImport(context, node, path));
return (0, utils_1.createImportRuleListener)((node, path) => this._checkImport(context, node, path));
}
_checkImport(context, node, path) {
if (path !== TranslationRemind.NLS_MODULE) {
@@ -31,7 +31,7 @@ module.exports = new (_a = class TranslationRemind {
let resourceDefined = false;
let json;
try {
json = fs_1.readFileSync('./build/lib/i18n.resources.json', 'utf8');
json = (0, fs_1.readFileSync)('./build/lib/i18n.resources.json', 'utf8');
}
catch (e) {
console.error('[translation-remind rule]: File with resources to pull from Transifex was not found. Aborting translation resource check for newly defined workbench part/service.');

View File

@@ -4,7 +4,7 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.translatePackageJSON = exports.packageRebuildExtensionsStream = exports.cleanRebuildExtensions = exports.packageExternalExtensionsStream = exports.scanBuiltinExtensions = exports.packageMarketplaceExtensionsStream = exports.packageLocalExtensionsStream = exports.fromMarketplace = void 0;
exports.translatePackageJSON = exports.packageRebuildExtensionsStream = exports.cleanRebuildExtensions = exports.packageExternalExtensionsStream = exports.scanBuiltinExtensions = exports.packageMarketplaceExtensionsStream = exports.packageLocalExtensionsStream = exports.fromMarketplace = exports.fromLocalNormal = exports.fromLocal = void 0;
const es = require("event-stream");
const fs = require("fs");
const glob = require("glob");
@@ -71,6 +71,7 @@ function fromLocal(extensionPath, forWeb) {
}
return input;
}
exports.fromLocal = fromLocal;
function fromLocalWebpack(extensionPath, webpackConfigFileName) {
const result = es.through();
const packagedDependencies = [];
@@ -143,7 +144,7 @@ function fromLocalWebpack(extensionPath, webpackConfigFileName) {
console.error(packagedDependencies);
result.emit('error', err);
});
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
return result.pipe((0, stats_1.createStatsStream)(path.basename(extensionPath)));
}
function fromLocalNormal(extensionPath) {
const result = es.through();
@@ -161,8 +162,9 @@ function fromLocalNormal(extensionPath) {
es.readArray(files).pipe(result);
})
.catch(err => result.emit('error', err));
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
return result.pipe((0, stats_1.createStatsStream)(path.basename(extensionPath)));
}
exports.fromLocalNormal = fromLocalNormal;
const baseHeaders = {
'X-Market-Client-Id': 'VSCode Build',
'User-Agent': 'VSCode Build',
@@ -212,8 +214,10 @@ const externalExtensions = [
'agent',
'arc',
'asde-deployment',
'azcli',
'azdata',
'azurehybridtoolkit',
'azuremonitor',
'cms',
'dacpac',
'import',

View File

@@ -54,7 +54,7 @@ function updateExtensionPackageJSON(input: Stream, update: (data: any) => any):
.pipe(packageJsonFilter.restore);
}
function fromLocal(extensionPath: string, forWeb: boolean): Stream {
export function fromLocal(extensionPath: string, forWeb: boolean): Stream { // {{SQL CARBON EDIT}} - Needed in locFunc
const webpackConfigFileName = forWeb ? 'extension-browser.webpack.config.js' : 'extension.webpack.config.js';
const isWebPacked = fs.existsSync(path.join(extensionPath, webpackConfigFileName));
@@ -171,7 +171,7 @@ function fromLocalWebpack(extensionPath: string, webpackConfigFileName: string):
return result.pipe(createStatsStream(path.basename(extensionPath)));
}
function fromLocalNormal(extensionPath: string): Stream {
export function fromLocalNormal(extensionPath: string): Stream { // {{SQL CARBON EDIT}} - Needed in locFunc
const result = es.through();
const vsce = require('vsce') as typeof import('vsce');
@@ -249,8 +249,10 @@ const externalExtensions = [
'agent',
'arc',
'asde-deployment',
'azcli',
'azdata',
'azurehybridtoolkit',
'azuremonitor',
'cms',
'dacpac',
'import',

View File

@@ -4,7 +4,7 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.prepareIslFiles = exports.prepareI18nPackFiles = exports.pullI18nPackFiles = exports.prepareI18nFiles = exports.pullSetupXlfFiles = exports.pullCoreAndExtensionsXlfFiles = exports.findObsoleteResources = exports.pushXlfFiles = exports.createXlfFilesForIsl = exports.createXlfFilesForExtensions = exports.createXlfFilesForCoreBundle = exports.getResource = exports.processNlsFiles = exports.Limiter = exports.XLF = exports.Line = exports.externalExtensionsWithTranslations = exports.extraLanguages = exports.defaultLanguages = void 0;
exports.prepareIslFiles = exports.prepareI18nPackFiles = exports.pullI18nPackFiles = exports.i18nPackVersion = exports.createI18nFile = exports.prepareI18nFiles = exports.pullSetupXlfFiles = exports.pullCoreAndExtensionsXlfFiles = exports.findObsoleteResources = exports.pushXlfFiles = exports.createXlfFilesForIsl = exports.createXlfFilesForExtensions = exports.createXlfFilesForCoreBundle = exports.getResource = exports.processNlsFiles = exports.Limiter = exports.XLF = exports.Line = exports.externalExtensionsWithTranslations = exports.extraLanguages = exports.defaultLanguages = void 0;
const path = require("path");
const fs = require("fs");
const event_stream_1 = require("event-stream");
@@ -237,14 +237,14 @@ XLF.parse = function (xlfString) {
}
let val = unit.target[0];
if (typeof val !== 'string') {
val = val._;
// We allow empty source values so support them for translations as well.
val = val._ ? val._ : '';
}
if (key && val) {
messages[key] = decodeEntities(val);
}
else {
reject(new Error(`XLF parsing error: XLIFF file ${originalFilePath} does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.`));
if (!key) {
reject(new Error(`XLF parsing error: trans-unit ${JSON.stringify(unit, undefined, 0)} defined in file ${originalFilePath} is missing the ID attribute.`));
return;
}
messages[key] = decodeEntities(val);
});
files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() });
}
@@ -463,7 +463,7 @@ function processCoreBundleFormat(fileHeader, languages, json, emitter) {
});
}
function processNlsFiles(opts) {
return event_stream_1.through(function (file) {
return (0, event_stream_1.through)(function (file) {
let fileName = path.basename(file.path);
if (fileName === 'nls.metadata.json') {
let json = null;
@@ -484,7 +484,7 @@ function processNlsFiles(opts) {
exports.processNlsFiles = processNlsFiles;
const editorProject = 'vscode-editor', workbenchProject = 'vscode-workbench', extensionsProject = 'vscode-extensions', setupProject = 'vscode-setup';
// {{SQL CARBON EDIT}}
const sqlopsProject = 'sqlops-core';
const adsProject = 'ads-core';
function getResource(sourceFile) {
let resource;
if (/^vs\/platform/.test(sourceFile)) {
@@ -515,13 +515,13 @@ function getResource(sourceFile) {
}
// {{SQL CARBON EDIT}}
else if (/^sql/.test(sourceFile)) {
return { name: 'sql', project: sqlopsProject };
return { name: 'sql', project: adsProject };
}
throw new Error(`Could not identify the XLF bundle for ${sourceFile}`);
}
exports.getResource = getResource;
function createXlfFilesForCoreBundle() {
return event_stream_1.through(function (file) {
return (0, event_stream_1.through)(function (file) {
const basename = path.basename(file.path);
if (basename === 'nls.metadata.json') {
if (file.isBuffer()) {
@@ -572,7 +572,7 @@ function createXlfFilesForExtensions() {
let counter = 0;
let folderStreamEnded = false;
let folderStreamEndEmitted = false;
return event_stream_1.through(function (extensionFolder) {
return (0, event_stream_1.through)(function (extensionFolder) {
const folderStream = this;
const stat = fs.statSync(extensionFolder.path);
if (!stat.isDirectory()) {
@@ -590,7 +590,7 @@ function createXlfFilesForExtensions() {
}
return _xlf;
}
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe(event_stream_1.through(function (file) {
gulp.src([`.build/extensions/${extensionName}/package.nls.json`, `.build/extensions/${extensionName}/**/nls.metadata.json`], { allowEmpty: true }).pipe((0, event_stream_1.through)(function (file) {
if (file.isBuffer()) {
const buffer = file.contents;
const basename = path.basename(file.path);
@@ -649,7 +649,7 @@ function createXlfFilesForExtensions() {
}
exports.createXlfFilesForExtensions = createXlfFilesForExtensions;
function createXlfFilesForIsl() {
return event_stream_1.through(function (file) {
return (0, event_stream_1.through)(function (file) {
let projectName, resourceFile;
if (path.basename(file.path) === 'Default.isl') {
projectName = setupProject;
@@ -703,7 +703,7 @@ exports.createXlfFilesForIsl = createXlfFilesForIsl;
function pushXlfFiles(apiHostname, username, password) {
let tryGetPromises = [];
let updateCreatePromises = [];
return event_stream_1.through(function (file) {
return (0, event_stream_1.through)(function (file) {
const project = path.dirname(file.relative);
const fileName = path.basename(file.path);
const slug = fileName.substr(0, fileName.length - '.xlf'.length);
@@ -765,7 +765,7 @@ function getAllResources(project, apiHostname, username, password) {
function findObsoleteResources(apiHostname, username, password) {
let resourcesByProject = Object.create(null);
resourcesByProject[extensionsProject] = [].concat(exports.externalExtensionsWithTranslations); // clone
return event_stream_1.through(function (file) {
return (0, event_stream_1.through)(function (file) {
const project = path.dirname(file.relative);
const fileName = path.basename(file.path);
const slug = fileName.substr(0, fileName.length - '.xlf'.length);
@@ -942,7 +942,7 @@ function pullXlfFiles(apiHostname, username, password, language, resources) {
const credentials = `${username}:${password}`;
let expectedTranslationsCount = resources.length;
let translationsRetrieved = 0, called = false;
return event_stream_1.readable(function (_count, callback) {
return (0, event_stream_1.readable)(function (_count, callback) {
// Mark end of stream when all resources were retrieved
if (translationsRetrieved === expectedTranslationsCount) {
return this.emit('end');
@@ -1000,7 +1000,7 @@ function retrieveResource(language, resource, apiHostname, credentials) {
}
function prepareI18nFiles() {
let parsePromises = [];
return event_stream_1.through(function (xlf) {
return (0, event_stream_1.through)(function (xlf) {
let stream = this;
let parsePromise = XLF.parse(xlf.contents.toString());
parsePromises.push(parsePromise);
@@ -1038,7 +1038,8 @@ function createI18nFile(originalFilePath, messages) {
contents: Buffer.from(content, 'utf8')
});
}
const i18nPackVersion = '1.0.0';
exports.createI18nFile = createI18nFile;
exports.i18nPackVersion = '1.0.0'; // {{SQL CARBON EDIT}} Needed in locfunc.
function pullI18nPackFiles(apiHostname, username, password, language, resultingTranslationPaths) {
return pullCoreAndExtensionsXlfFiles(apiHostname, username, password, language, exports.externalExtensionsWithTranslations)
.pipe(prepareI18nPackFiles(exports.externalExtensionsWithTranslations, resultingTranslationPaths, language.id === 'ps'));
@@ -1046,10 +1047,10 @@ function pullI18nPackFiles(apiHostname, username, password, language, resultingT
exports.pullI18nPackFiles = pullI18nPackFiles;
function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pseudo = false) {
let parsePromises = [];
let mainPack = { version: i18nPackVersion, contents: {} };
let mainPack = { version: exports.i18nPackVersion, contents: {} };
let extensionsPacks = {};
let errors = [];
return event_stream_1.through(function (xlf) {
return (0, event_stream_1.through)(function (xlf) {
let project = path.basename(path.dirname(xlf.relative));
let resource = path.basename(xlf.relative, '.xlf');
let contents = xlf.contents.toString();
@@ -1062,7 +1063,7 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
if (project === extensionsProject) {
let extPack = extensionsPacks[resource];
if (!extPack) {
extPack = extensionsPacks[resource] = { version: i18nPackVersion, contents: {} };
extPack = extensionsPacks[resource] = { version: exports.i18nPackVersion, contents: {} };
}
const externalId = externalExtensions[resource];
if (!externalId) { // internal extension: remove 'extensions/extensionId/' segnent
@@ -1110,7 +1111,7 @@ function prepareI18nPackFiles(externalExtensions, resultingTranslationPaths, pse
exports.prepareI18nPackFiles = prepareI18nPackFiles;
function prepareIslFiles(language, innoSetupConfig) {
let parsePromises = [];
return event_stream_1.through(function (xlf) {
return (0, event_stream_1.through)(function (xlf) {
let stream = this;
let parsePromise = XLF.parse(xlf.contents.toString());
parsePromises.push(parsePromise);

View File

@@ -30,10 +30,6 @@
"name": "vs/workbench/api/common",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/backup",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/bulkEdit",
"project": "vscode-workbench"
@@ -218,6 +214,10 @@
"name": "vs/workbench/contrib/webviewPanel",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/workspace",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/contrib/workspaces",
"project": "vscode-workbench"
@@ -254,6 +254,10 @@
"name": "vs/workbench/services/authToken",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/backup",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/bulkEdit",
"project": "vscode-workbench"
@@ -294,6 +298,10 @@
"name": "vs/workbench/services/files",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/history",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/log",
"project": "vscode-workbench"
@@ -389,6 +397,10 @@
{
"name": "vs/workbench/services/gettingStarted",
"project": "vscode-workbench"
},
{
"name": "vs/workbench/services/host",
"project": "vscode-workbench"
}
]
}

View File

@@ -64,7 +64,7 @@ export const externalExtensionsWithTranslations = {
};
interface Map<V> {
export interface Map<V> { // {{SQL CARBON EDIT}} Needed in locfunc.
[key: string]: V;
}
@@ -79,7 +79,7 @@ export interface Resource {
project: string;
}
interface ParsedXLF {
export interface ParsedXLF { // {{SQL CARBON EDIT}} Needed in locfunc.
messages: Map<string>;
originalFilePath: string;
language: string;
@@ -339,13 +339,14 @@ export class XLF {
let val = unit.target[0];
if (typeof val !== 'string') {
val = val._;
// We allow empty source values so support them for translations as well.
val = val._ ? val._ : '';
}
if (key && val) {
messages[key] = decodeEntities(val);
} else {
reject(new Error(`XLF parsing error: XLIFF file ${originalFilePath} does not contain full localization data. ID or target translation for one of the trans-unit nodes is not present.`));
if (!key) {
reject(new Error(`XLF parsing error: trans-unit ${JSON.stringify(unit, undefined, 0)} defined in file ${originalFilePath} is missing the ID attribute.`));
return;
}
messages[key] = decodeEntities(val);
});
files.push({ messages: messages, originalFilePath: originalFilePath, language: language.toLowerCase() });
}
@@ -610,7 +611,7 @@ const editorProject: string = 'vscode-editor',
setupProject: string = 'vscode-setup';
// {{SQL CARBON EDIT}}
const sqlopsProject: string = 'sqlops-core';
const adsProject: string = 'ads-core';
export function getResource(sourceFile: string): Resource {
let resource: string;
@@ -637,7 +638,7 @@ export function getResource(sourceFile: string): Resource {
// {{SQL CARBON EDIT}}
else if (/^sql/.test(sourceFile)) {
return { name: 'sql', project: sqlopsProject };
return { name: 'sql', project: adsProject };
}
throw new Error(`Could not identify the XLF bundle for ${sourceFile}`);
@@ -1166,7 +1167,7 @@ export function prepareI18nFiles(): ThroughStream {
});
}
function createI18nFile(originalFilePath: string, messages: any): File {
export function createI18nFile(originalFilePath: string, messages: any): File { // {{SQL CARBON EDIT}} Needed for locfunc.
let result = Object.create(null);
result[''] = [
'--------------------------------------------------------------------------------------------',
@@ -1189,14 +1190,14 @@ function createI18nFile(originalFilePath: string, messages: any): File {
});
}
interface I18nPack {
export interface I18nPack { // {{SQL CARBON EDIT}} Needed in locfunc.
version: string;
contents: {
[path: string]: Map<string>;
};
}
const i18nPackVersion = '1.0.0';
export const i18nPackVersion = '1.0.0'; // {{SQL CARBON EDIT}} Needed in locfunc.
export interface TranslationPath {
id: string;

View File

@@ -56,7 +56,9 @@ const CORE_TYPES = [
const NATIVE_TYPES = [
'NativeParsedArgs',
'INativeEnvironmentService',
'INativeWindowConfiguration'
'AbstractNativeEnvironmentService',
'INativeWindowConfiguration',
'ICommonNativeHostService'
];
const RULES = [
// Tests: skip
@@ -79,19 +81,9 @@ const RULES = [
'@types/node' // no node.js
]
},
// Common: vs/platform/environment/common/argv.ts
// Common: vs/platform/environment/common/*
{
target: '**/{vs,sql}/platform/environment/common/argv.ts',
disallowedTypes: [ /* Ignore native types that are defined from here */],
allowedTypes: CORE_TYPES,
disallowedDefinitions: [
'lib.dom.d.ts',
'@types/node' // no node.js
]
},
// Common: vs/platform/environment/common/environment.ts
{
target: '**/{vs,sql}/platform/environment/common/environment.ts',
target: '**/{vs,sql}/platform/environment/common/*.ts',
disallowedTypes: [ /* Ignore native types that are defined from here */],
allowedTypes: CORE_TYPES,
disallowedDefinitions: [
@@ -109,6 +101,16 @@ const RULES = [
'@types/node' // no node.js
]
},
// Common: vs/platform/native/common/native.ts
{
target: '**/vs/platform/native/common/native.ts',
disallowedTypes: [ /* Ignore native types that are defined from here */],
allowedTypes: CORE_TYPES,
disallowedDefinitions: [
'lib.dom.d.ts',
'@types/node' // no node.js
]
},
// Common: vs/workbench/api/common/extHostExtensionService.ts
{
target: '**/{vs,sql}/workbench/api/common/extHostExtensionService.ts',
@@ -197,7 +199,7 @@ const RULES = [
]
}
];
const TS_CONFIG_PATH = path_1.join(__dirname, '../../', 'src', 'tsconfig.json');
const TS_CONFIG_PATH = (0, path_1.join)(__dirname, '../../', 'src', 'tsconfig.json');
let hasErrors = false;
function checkFile(program, sourceFile, rule) {
checkNode(sourceFile);
@@ -248,8 +250,8 @@ function checkFile(program, sourceFile, rule) {
}
function createProgram(tsconfigPath) {
const tsConfig = ts.readConfigFile(tsconfigPath, ts.sys.readFile);
const configHostParser = { fileExists: fs_1.existsSync, readDirectory: ts.sys.readDirectory, readFile: file => fs_1.readFileSync(file, 'utf8'), useCaseSensitiveFileNames: process.platform === 'linux' };
const tsConfigParsed = ts.parseJsonConfigFileContent(tsConfig.config, configHostParser, path_1.resolve(path_1.dirname(tsconfigPath)), { noEmit: true });
const configHostParser = { fileExists: fs_1.existsSync, readDirectory: ts.sys.readDirectory, readFile: file => (0, fs_1.readFileSync)(file, 'utf8'), useCaseSensitiveFileNames: process.platform === 'linux' };
const tsConfigParsed = ts.parseJsonConfigFileContent(tsConfig.config, configHostParser, (0, path_1.resolve)((0, path_1.dirname)(tsconfigPath)), { noEmit: true });
const compilerHost = ts.createCompilerHost(tsConfigParsed.options, true);
return ts.createProgram(tsConfigParsed.fileNames, tsConfigParsed.options, compilerHost);
}
@@ -259,7 +261,7 @@ function createProgram(tsconfigPath) {
const program = createProgram(TS_CONFIG_PATH);
for (const sourceFile of program.getSourceFiles()) {
for (const rule of RULES) {
if (minimatch_1.match([sourceFile.fileName], rule.target).length > 0) {
if ((0, minimatch_1.match)([sourceFile.fileName], rule.target).length > 0) {
if (!rule.skip) {
checkFile(program, sourceFile, rule);
}

View File

@@ -58,7 +58,9 @@ const CORE_TYPES = [
const NATIVE_TYPES = [
'NativeParsedArgs',
'INativeEnvironmentService',
'INativeWindowConfiguration'
'AbstractNativeEnvironmentService',
'INativeWindowConfiguration',
'ICommonNativeHostService'
];
const RULES = [
@@ -86,20 +88,9 @@ const RULES = [
]
},
// Common: vs/platform/environment/common/argv.ts
// Common: vs/platform/environment/common/*
{
target: '**/{vs,sql}/platform/environment/common/argv.ts',
disallowedTypes: [/* Ignore native types that are defined from here */],
allowedTypes: CORE_TYPES,
disallowedDefinitions: [
'lib.dom.d.ts', // no DOM
'@types/node' // no node.js
]
},
// Common: vs/platform/environment/common/environment.ts
{
target: '**/{vs,sql}/platform/environment/common/environment.ts',
target: '**/{vs,sql}/platform/environment/common/*.ts',
disallowedTypes: [/* Ignore native types that are defined from here */],
allowedTypes: CORE_TYPES,
disallowedDefinitions: [
@@ -119,6 +110,17 @@ const RULES = [
]
},
// Common: vs/platform/native/common/native.ts
{
target: '**/vs/platform/native/common/native.ts',
disallowedTypes: [/* Ignore native types that are defined from here */],
allowedTypes: CORE_TYPES,
disallowedDefinitions: [
'lib.dom.d.ts', // no DOM
'@types/node' // no node.js
]
},
// Common: vs/workbench/api/common/extHostExtensionService.ts
{
target: '**/{vs,sql}/workbench/api/common/extHostExtensionService.ts',

View File

@@ -4,14 +4,20 @@
* Licensed under the Source EULA. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
Object.defineProperty(exports, "__esModule", { value: true });
exports.packageLangpacksStream = void 0;
exports.renameVscodeLangpacks = exports.refreshLangpacks = exports.modifyI18nPackFiles = exports.packageSingleExtensionStream = exports.packageLangpacksStream = void 0;
const es = require("event-stream");
const path = require("path");
const fs = require("fs");
const stats_1 = require("./stats");
const File = require("vinyl");
const glob = require("glob");
const rename = require("gulp-rename");
const ext = require("./extensions");
//imports for langpack refresh.
const event_stream_1 = require("event-stream");
const i18n = require("./i18n");
const fs = require("fs");
const File = require("vinyl");
const rimraf = require("rimraf");
const gulp = require("gulp");
const vfs = require("vinyl-fs");
const root = path.dirname(path.dirname(__dirname));
// Modified packageLocalExtensionsStream from extensions.ts, but for langpacks.
function packageLangpacksStream() {
@@ -22,28 +28,353 @@ function packageLangpacksStream() {
return { name: langpackName, path: langpackPath };
});
const builtLangpacks = langpackDescriptions.map(langpack => {
return fromLocalNormal(langpack.path)
return ext.fromLocalNormal(langpack.path)
.pipe(rename(p => p.dirname = `langpacks/${langpack.name}/${p.dirname}`));
});
return es.merge(builtLangpacks);
}
exports.packageLangpacksStream = packageLangpacksStream;
//copied from extensions.
function fromLocalNormal(extensionPath) {
const result = es.through();
const vsce = require('vsce');
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
.then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath)
}));
es.readArray(files).pipe(result);
})
.catch(err => result.emit('error', err));
return result.pipe(stats_1.createStatsStream(path.basename(extensionPath)));
// Modified packageLocalExtensionsStream but for any ADS extensions including excluded/external ones.
function packageSingleExtensionStream(name) {
const extenalExtensionDescriptions = glob.sync(`extensions/${name}/package.json`)
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
});
const builtExtension = extenalExtensionDescriptions.map(extension => {
return ext.fromLocal(extension.path, false)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
});
return es.merge(builtExtension);
}
exports.packageSingleExtensionStream = packageSingleExtensionStream;
// Langpack creation functions go here.
/**
* Function combines the contents of the SQL core XLF file into the current main i18n file contianing the vs core strings.
* Based on createI18nFile in i18n.ts
*/
function updateMainI18nFile(existingTranslationFilePath, originalFilePath, messages) {
let currFilePath = path.join(existingTranslationFilePath + '.i18n.json');
let currentContent = fs.readFileSync(currFilePath);
let currentContentObject = JSON.parse(currentContent.toString());
let objectContents = currentContentObject.contents;
let result = Object.create(null);
// Delete any SQL strings that are no longer part of ADS in current langpack.
for (let contentKey of Object.keys(objectContents)) {
if (contentKey.startsWith('sql') && messages.contents[contentKey] === undefined) {
delete objectContents[`${contentKey}`];
}
}
messages.contents = Object.assign(Object.assign({}, objectContents), messages.contents);
result[''] = [
'--------------------------------------------------------------------------------------------',
'Copyright (c) Microsoft Corporation. All rights reserved.',
'Licensed under the Source EULA. See License.txt in the project root for license information.',
'--------------------------------------------------------------------------------------------',
'Do not edit this file. It is machine generated.'
];
for (let key of Object.keys(messages)) {
result[key] = messages[key];
}
let content = JSON.stringify(result, null, '\t');
if (process.platform === 'win32') {
content = content.replace(/\n/g, '\r\n');
}
return new File({
path: path.join(originalFilePath + '.i18n.json'),
contents: Buffer.from(content, 'utf8'),
});
}
/**
* Function handles the processing of xlf resources and turning them into i18n.json files.
* It adds the i18n files translation paths to be added back into package.main.
* Based on prepareI18nPackFiles in i18n.ts
*/
function modifyI18nPackFiles(existingTranslationFolder, resultingTranslationPaths, pseudo = false) {
let parsePromises = [];
let mainPack = { version: i18n.i18nPackVersion, contents: {} };
let extensionsPacks = {};
let errors = [];
return (0, event_stream_1.through)(function (xlf) {
let rawResource = path.basename(xlf.relative, '.xlf');
let resource = rawResource.substring(0, rawResource.lastIndexOf('.'));
let contents = xlf.contents.toString();
let parsePromise = pseudo ? i18n.XLF.parsePseudo(contents) : i18n.XLF.parse(contents);
parsePromises.push(parsePromise);
parsePromise.then(resolvedFiles => {
resolvedFiles.forEach(file => {
const path = file.originalFilePath;
const firstSlash = path.indexOf('/');
//exclude core sql file from extension processing.
if (resource !== 'sql') {
let extPack = extensionsPacks[resource];
if (!extPack) {
extPack = extensionsPacks[resource] = { version: i18n.i18nPackVersion, contents: {} };
}
//remove extensions/extensionId section as all extensions will be webpacked.
const secondSlash = path.indexOf('/', firstSlash + 1);
extPack.contents[path.substr(secondSlash + 1)] = file.messages;
}
else {
mainPack.contents[path.substr(firstSlash + 1)] = file.messages;
}
});
}).catch(reason => {
errors.push(reason);
});
}, function () {
Promise.all(parsePromises)
.then(() => {
if (errors.length > 0) {
throw errors;
}
const translatedMainFile = updateMainI18nFile(existingTranslationFolder + '\\main', './main', mainPack);
this.queue(translatedMainFile);
for (let extension in extensionsPacks) {
const translatedExtFile = i18n.createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
this.queue(translatedExtFile);
//handle edge case for 'Microsoft.sqlservernotebook' where extension name is the same as extension ID.
//(Other extensions need to have publisher appended in front as their ID.)
const adsExtensionId = (extension === 'Microsoft.sqlservernotebook') ? extension : 'Microsoft.' + extension;
resultingTranslationPaths.push({ id: adsExtensionId, resourceName: `extensions/${extension}.i18n.json` });
}
this.queue(null);
})
.catch((reason) => {
this.emit('error', reason);
});
});
}
exports.modifyI18nPackFiles = modifyI18nPackFiles;
const textFields = {
"nameText": 'ads',
"displayNameText": 'Azure Data Studio',
"publisherText": 'Microsoft',
"licenseText": 'SEE SOURCE EULA LICENSE IN LICENSE.txt',
"updateText": 'cd ../vscode && npm run update-localization-extension ',
"vscodeVersion": '*',
"azdataPlaceholder": '^0.0.0',
"gitUrl": 'https://github.com/Microsoft/azuredatastudio'
};
//list of extensions from vscode that are to be included with ADS.
const VSCODEExtensions = [
"bat",
"configuration-editing",
"docker",
"extension-editing",
"git-ui",
"git",
"github-authentication",
"github",
"image-preview",
"json-language-features",
"json",
"markdown-basics",
"markdown-language-features",
"merge-conflict",
"microsoft-authentication",
"powershell",
"python",
"r",
"search-result",
"sql",
"theme-abyss",
"theme-defaults",
"theme-kimbie-dark",
"theme-monokai-dimmed",
"theme-monokai",
"theme-quietlight",
"theme-red",
"theme-seti",
"theme-solarized-dark",
"theme-solarized-light",
"theme-tomorrow-night-blue",
"typescript-basics",
"xml",
"yaml"
];
/**
* A heavily modified version of update-localization-extension that runs using local xlf resources, no arguments required to pass in.
* It converts a renamed vscode langpack to an ADS one or updates the existing langpack to use current XLF resources.
* It runs this process on all langpacks currently in the ADS i18n folder.
* (Replace an individual ADS langpack folder with a corresponding vscode langpack folder renamed to "ads" instead of "vscode"
* in order to update vscode core strings and extensions for that langpack)
*
* It removes the resources of vscode that we do not support, and adds in new i18n json files created from the xlf files in the folder.
* It also merges in the sql core XLF strings with the langpack's existing core strings into a combined main i18n json file.
*
* After running this gulp task, for each language pack:
*
* 1. Remember to change the version of the langpacks to continue from the previous version of the ADS langpack.
*
* 2. Also change the azdata version to match the current ADS version number.
*
* 3. Update the changelog with the new version of the language pack.
*
* IMPORTANT: If you have run this gulp task on langpacks that originated from vscode, for each affected vscode langpack, you must
* replace the changelog and readme files with the ones from the previous ADS version of the langpack before doing the above steps.
*
* This is mainly for consistency with previous langpacks and to provide proper information to the user.
*/
function refreshLangpacks() {
let supportedLocations = [...i18n.defaultLanguages, ...i18n.extraLanguages];
for (let i = 0; i < supportedLocations.length; i++) {
let langId = supportedLocations[i].id;
if (langId === "zh-cn") {
langId = "zh-hans";
}
if (langId === "zh-tw") {
langId = "zh-hant";
}
let location = path.join('.', 'resources', 'xlf');
let locExtFolder = path.join('.', 'i18n', `ads-language-pack-${langId}`);
try {
fs.statSync(locExtFolder);
}
catch (_a) {
console.log('Language is not included in ADS yet: ' + langId);
continue;
}
let packageJSON = JSON.parse(fs.readFileSync(path.join(locExtFolder, 'package.json')).toString());
//processing extension fields, version and folder name must be changed manually.
packageJSON['name'] = packageJSON['name'].replace('vscode', textFields.nameText);
packageJSON['displayName'] = packageJSON['displayName'].replace('Visual Studio Code', textFields.displayNameText);
packageJSON['publisher'] = textFields.publisherText;
packageJSON['license'] = textFields.licenseText;
packageJSON['scripts']['update'] = textFields.updateText + langId;
packageJSON['engines']['vscode'] = textFields.vscodeVersion;
packageJSON['repository']['url'] = textFields.gitUrl;
packageJSON['engines']['azdata'] = textFields.azdataPlaceholder; // Remember to change this to the appropriate version at the end.
let contributes = packageJSON['contributes'];
if (!contributes) {
throw new Error('The extension must define a "localizations" contribution in the "package.json"');
}
let localizations = contributes['localizations'];
if (!localizations) {
throw new Error('The extension must define a "localizations" contribution of type array in the "package.json"');
}
localizations.forEach(function (localization) {
if (!localization.languageId || !localization.languageName || !localization.localizedLanguageName) {
throw new Error('Each localization contribution must define "languageId", "languageName" and "localizedLanguageName" properties.');
}
let languageId = localization.transifexId || localization.languageId;
let translationDataFolder = path.join(locExtFolder, 'translations');
if (languageId === "zh-cn") {
languageId = "zh-hans";
}
if (languageId === "zh-tw") {
languageId = "zh-hant";
}
console.log(`Importing translations for ${languageId} from '${location}' to '${translationDataFolder}' ...`);
let translationPaths = [];
gulp.src(path.join(location, languageId, '**', '*.xlf'))
.pipe(modifyI18nPackFiles(translationDataFolder, translationPaths, languageId === 'ps'))
.on('error', (error) => {
console.log(`Error occurred while importing translations:`);
translationPaths = undefined;
if (Array.isArray(error)) {
error.forEach(console.log);
}
else if (error) {
console.log(error);
}
else {
console.log('Unknown error');
}
})
.pipe(vfs.dest(translationDataFolder))
.on('end', function () {
if (translationPaths !== undefined) {
let nonExistantExtensions = [];
for (let curr of localization.translations) {
try {
if (curr.id === 'vscode.theme-seti') {
//handle edge case where 'theme-seti' has a different id.
curr.id = 'vscode.vscode-theme-seti';
}
fs.statSync(path.join(translationDataFolder, curr.path.replace('./translations', '')));
}
catch (_a) {
nonExistantExtensions.push(curr);
}
}
for (let nonExt of nonExistantExtensions) {
let index = localization.translations.indexOf(nonExt);
if (index > -1) {
localization.translations.splice(index, 1);
}
}
for (let tp of translationPaths) {
let finalPath = `./translations/${tp.resourceName}`;
let isFound = false;
for (let i = 0; i < localization.translations.length; i++) {
if (localization.translations[i].path === finalPath) {
localization.translations[i].id = tp.id;
isFound = true;
break;
}
}
if (!isFound) {
localization.translations.push({ id: tp.id, path: finalPath });
}
}
fs.writeFileSync(path.join(locExtFolder, 'package.json'), JSON.stringify(packageJSON, null, '\t'));
}
});
});
}
console.log("Langpack Refresh Completed.");
return Promise.resolve();
}
exports.refreshLangpacks = refreshLangpacks;
/**
* Function for adding replacing ads language packs with vscode ones.
* For new languages, remember to add to i18n.extraLanguages so that it will be recognized by ADS.
*/
function renameVscodeLangpacks() {
let supportedLocations = [...i18n.defaultLanguages, ...i18n.extraLanguages];
for (let i = 0; i < supportedLocations.length; i++) {
let langId = supportedLocations[i].id;
if (langId === "zh-cn") {
langId = "zh-hans";
}
if (langId === "zh-tw") {
langId = "zh-hant";
}
let locADSFolder = path.join('.', 'i18n', `ads-language-pack-${langId}`);
let locVSCODEFolder = path.join('.', 'i18n', `vscode-language-pack-${langId}`);
let translationDataFolder = path.join(locVSCODEFolder, 'translations');
let xlfFolder = path.join('.', 'resources', 'xlf');
try {
fs.statSync(locVSCODEFolder);
}
catch (_a) {
console.log('vscode pack is not in ADS yet: ' + langId);
continue;
}
// Delete extension files in vscode language pack that are not in ADS.
if (fs.existsSync(translationDataFolder)) {
let totalExtensions = fs.readdirSync(path.join(translationDataFolder, 'extensions'));
for (let extensionTag in totalExtensions) {
let extensionFileName = totalExtensions[extensionTag];
let xlfPath = path.join(xlfFolder, `${langId}`, extensionFileName.replace('.i18n.json', '.xlf'));
if (!(fs.existsSync(xlfPath) || VSCODEExtensions.indexOf(extensionFileName.replace('.i18n.json', '')) !== -1)) {
let filePath = path.join(translationDataFolder, 'extensions', extensionFileName);
rimraf.sync(filePath);
}
}
}
//Get list of md files in ADS langpack, to copy to vscode langpack prior to renaming.
let globArray = glob.sync(path.join(locADSFolder, '*.md'));
//Copy files to vscode langpack, then remove the ADS langpack, and finally rename the vscode langpack to match the ADS one.
globArray.forEach(element => {
fs.copyFileSync(element, path.join(locVSCODEFolder, path.parse(element).base));
});
rimraf.sync(locADSFolder);
fs.renameSync(locVSCODEFolder, locADSFolder);
}
console.log("Langpack Rename Completed.");
return Promise.resolve();
}
exports.renameVscodeLangpacks = renameVscodeLangpacks;

View File

@@ -5,12 +5,17 @@
import * as es from 'event-stream';
import * as path from 'path';
import * as fs from 'fs';
import { createStatsStream } from './stats';
import * as File from 'vinyl';
import { Stream } from 'stream';
import * as glob from 'glob';
import rename = require('gulp-rename');
import ext = require('./extensions');
//imports for langpack refresh.
import { through, ThroughStream } from 'event-stream';
import i18n = require('./i18n')
import * as fs from 'fs';
import * as File from 'vinyl';
import * as rimraf from 'rimraf';
import * as gulp from 'gulp';
import * as vfs from 'vinyl-fs';
const root = path.dirname(path.dirname(__dirname));
@@ -24,33 +29,377 @@ export function packageLangpacksStream(): NodeJS.ReadWriteStream {
})
const builtLangpacks = langpackDescriptions.map(langpack => {
return fromLocalNormal(langpack.path)
return ext.fromLocalNormal(langpack.path)
.pipe(rename(p => p.dirname = `langpacks/${langpack.name}/${p.dirname}`));
});
return es.merge(builtLangpacks);
}
//copied from extensions.
function fromLocalNormal(extensionPath: string): Stream {
const result = es.through();
const vsce = require('vsce') as typeof import('vsce');
vsce.listFiles({ cwd: extensionPath, packageManager: vsce.PackageManager.Yarn })
.then(fileNames => {
const files = fileNames
.map(fileName => path.join(extensionPath, fileName))
.map(filePath => new File({
path: filePath,
stat: fs.statSync(filePath),
base: extensionPath,
contents: fs.createReadStream(filePath) as any
}));
es.readArray(files).pipe(result);
// Modified packageLocalExtensionsStream but for any ADS extensions including excluded/external ones.
export function packageSingleExtensionStream(name: string): NodeJS.ReadWriteStream {
const extenalExtensionDescriptions = (<string[]>glob.sync(`extensions/${name}/package.json`))
.map(manifestPath => {
const extensionPath = path.dirname(path.join(root, manifestPath));
const extensionName = path.basename(extensionPath);
return { name: extensionName, path: extensionPath };
})
.catch(err => result.emit('error', err));
return result.pipe(createStatsStream(path.basename(extensionPath)));
const builtExtension = extenalExtensionDescriptions.map(extension => {
return ext.fromLocal(extension.path, false)
.pipe(rename(p => p.dirname = `extensions/${extension.name}/${p.dirname}`));
});
return es.merge(builtExtension);
}
// Langpack creation functions go here.
/**
* Function combines the contents of the SQL core XLF file into the current main i18n file contianing the vs core strings.
* Based on createI18nFile in i18n.ts
*/
function updateMainI18nFile(existingTranslationFilePath: string, originalFilePath: string, messages: any): File {
let currFilePath = path.join(existingTranslationFilePath + '.i18n.json');
let currentContent = fs.readFileSync(currFilePath);
let currentContentObject = JSON.parse(currentContent.toString());
let objectContents = currentContentObject.contents;
let result = Object.create(null);
// Delete any SQL strings that are no longer part of ADS in current langpack.
for (let contentKey of Object.keys(objectContents)) {
if(contentKey.startsWith('sql') && messages.contents[contentKey] === undefined){
delete objectContents[`${contentKey}`]
}
}
messages.contents = { ...objectContents, ...messages.contents };
result[''] = [
'--------------------------------------------------------------------------------------------',
'Copyright (c) Microsoft Corporation. All rights reserved.',
'Licensed under the Source EULA. See License.txt in the project root for license information.',
'--------------------------------------------------------------------------------------------',
'Do not edit this file. It is machine generated.'
];
for (let key of Object.keys(messages)) {
result[key] = messages[key];
}
let content = JSON.stringify(result, null, '\t');
if (process.platform === 'win32') {
content = content.replace(/\n/g, '\r\n');
}
return new File({
path: path.join(originalFilePath + '.i18n.json'),
contents: Buffer.from(content, 'utf8'),
})
}
/**
* Function handles the processing of xlf resources and turning them into i18n.json files.
* It adds the i18n files translation paths to be added back into package.main.
* Based on prepareI18nPackFiles in i18n.ts
*/
export function modifyI18nPackFiles(existingTranslationFolder: string, resultingTranslationPaths: i18n.TranslationPath[], pseudo = false): NodeJS.ReadWriteStream {
let parsePromises: Promise<i18n.ParsedXLF[]>[] = [];
let mainPack: i18n.I18nPack = { version: i18n.i18nPackVersion, contents: {} };
let extensionsPacks: i18n.Map<i18n.I18nPack> = {};
let errors: any[] = [];
return through(function (this: ThroughStream, xlf: File) {
let rawResource = path.basename(xlf.relative, '.xlf');
let resource = rawResource.substring(0, rawResource.lastIndexOf('.'));
let contents = xlf.contents.toString();
let parsePromise = pseudo ? i18n.XLF.parsePseudo(contents) : i18n.XLF.parse(contents);
parsePromises.push(parsePromise);
parsePromise.then(
resolvedFiles => {
resolvedFiles.forEach(file => {
const path = file.originalFilePath;
const firstSlash = path.indexOf('/');
//exclude core sql file from extension processing.
if (resource !== 'sql') {
let extPack = extensionsPacks[resource];
if (!extPack) {
extPack = extensionsPacks[resource] = { version: i18n.i18nPackVersion, contents: {} };
}
//remove extensions/extensionId section as all extensions will be webpacked.
const secondSlash = path.indexOf('/', firstSlash + 1);
extPack.contents[path.substr(secondSlash + 1)] = file.messages;
} else {
mainPack.contents[path.substr(firstSlash + 1)] = file.messages;
}
});
}
).catch(reason => {
errors.push(reason);
});
}, function () {
Promise.all(parsePromises)
.then(() => {
if (errors.length > 0) {
throw errors;
}
const translatedMainFile = updateMainI18nFile(existingTranslationFolder + '\\main', './main', mainPack);
this.queue(translatedMainFile);
for (let extension in extensionsPacks) {
const translatedExtFile = i18n.createI18nFile(`extensions/${extension}`, extensionsPacks[extension]);
this.queue(translatedExtFile);
//handle edge case for 'Microsoft.sqlservernotebook' where extension name is the same as extension ID.
//(Other extensions need to have publisher appended in front as their ID.)
const adsExtensionId = (extension === 'Microsoft.sqlservernotebook') ? extension : 'Microsoft.' + extension;
resultingTranslationPaths.push({ id: adsExtensionId, resourceName: `extensions/${extension}.i18n.json` });
}
this.queue(null);
})
.catch((reason) => {
this.emit('error', reason);
});
});
}
const textFields = {
"nameText": 'ads',
"displayNameText": 'Azure Data Studio',
"publisherText": 'Microsoft',
"licenseText": 'SEE SOURCE EULA LICENSE IN LICENSE.txt',
"updateText": 'cd ../vscode && npm run update-localization-extension ',
"vscodeVersion": '*',
"azdataPlaceholder": '^0.0.0',
"gitUrl": 'https://github.com/Microsoft/azuredatastudio'
}
//list of extensions from vscode that are to be included with ADS.
const VSCODEExtensions = [
"bat",
"configuration-editing",
"docker",
"extension-editing",
"git-ui",
"git",
"github-authentication",
"github",
"image-preview",
"json-language-features",
"json",
"markdown-basics",
"markdown-language-features",
"merge-conflict",
"microsoft-authentication",
"powershell",
"python",
"r",
"search-result",
"sql",
"theme-abyss",
"theme-defaults",
"theme-kimbie-dark",
"theme-monokai-dimmed",
"theme-monokai",
"theme-quietlight",
"theme-red",
"theme-seti",
"theme-solarized-dark",
"theme-solarized-light",
"theme-tomorrow-night-blue",
"typescript-basics",
"xml",
"yaml"
];
/**
* A heavily modified version of update-localization-extension that runs using local xlf resources, no arguments required to pass in.
* It converts a renamed vscode langpack to an ADS one or updates the existing langpack to use current XLF resources.
* It runs this process on all langpacks currently in the ADS i18n folder.
* (Replace an individual ADS langpack folder with a corresponding vscode langpack folder renamed to "ads" instead of "vscode"
* in order to update vscode core strings and extensions for that langpack)
*
* It removes the resources of vscode that we do not support, and adds in new i18n json files created from the xlf files in the folder.
* It also merges in the sql core XLF strings with the langpack's existing core strings into a combined main i18n json file.
*
* After running this gulp task, for each language pack:
*
* 1. Remember to change the version of the langpacks to continue from the previous version of the ADS langpack.
*
* 2. Also change the azdata version to match the current ADS version number.
*
* 3. Update the changelog with the new version of the language pack.
*
* IMPORTANT: If you have run this gulp task on langpacks that originated from vscode, for each affected vscode langpack, you must
* replace the changelog and readme files with the ones from the previous ADS version of the langpack before doing the above steps.
*
* This is mainly for consistency with previous langpacks and to provide proper information to the user.
*/
export function refreshLangpacks(): Promise<void> {
let supportedLocations = [...i18n.defaultLanguages, ...i18n.extraLanguages];
for (let i = 0; i < supportedLocations.length; i++) {
let langId = supportedLocations[i].id;
if (langId === "zh-cn") {
langId = "zh-hans";
}
if (langId === "zh-tw") {
langId = "zh-hant";
}
let location = path.join('.', 'resources', 'xlf');
let locExtFolder = path.join('.', 'i18n', `ads-language-pack-${langId}`);
try {
fs.statSync(locExtFolder);
}
catch {
console.log('Language is not included in ADS yet: ' + langId);
continue;
}
let packageJSON = JSON.parse(fs.readFileSync(path.join(locExtFolder, 'package.json')).toString());
//processing extension fields, version and folder name must be changed manually.
packageJSON['name'] = packageJSON['name'].replace('vscode', textFields.nameText);
packageJSON['displayName'] = packageJSON['displayName'].replace('Visual Studio Code', textFields.displayNameText);
packageJSON['publisher'] = textFields.publisherText;
packageJSON['license'] = textFields.licenseText;
packageJSON['scripts']['update'] = textFields.updateText + langId;
packageJSON['engines']['vscode'] = textFields.vscodeVersion;
packageJSON['repository']['url'] = textFields.gitUrl
packageJSON['engines']['azdata'] = textFields.azdataPlaceholder // Remember to change this to the appropriate version at the end.
let contributes = packageJSON['contributes'];
if (!contributes) {
throw new Error('The extension must define a "localizations" contribution in the "package.json"');
}
let localizations = contributes['localizations'];
if (!localizations) {
throw new Error('The extension must define a "localizations" contribution of type array in the "package.json"');
}
localizations.forEach(function (localization: any) {
if (!localization.languageId || !localization.languageName || !localization.localizedLanguageName) {
throw new Error('Each localization contribution must define "languageId", "languageName" and "localizedLanguageName" properties.');
}
let languageId = localization.transifexId || localization.languageId;
let translationDataFolder = path.join(locExtFolder, 'translations');
if (languageId === "zh-cn") {
languageId = "zh-hans";
}
if (languageId === "zh-tw") {
languageId = "zh-hant";
}
console.log(`Importing translations for ${languageId} from '${location}' to '${translationDataFolder}' ...`);
let translationPaths: any = [];
gulp.src(path.join(location, languageId, '**', '*.xlf'))
.pipe(modifyI18nPackFiles(translationDataFolder, translationPaths, languageId === 'ps'))
.on('error', (error: any) => {
console.log(`Error occurred while importing translations:`);
translationPaths = undefined;
if (Array.isArray(error)) {
error.forEach(console.log);
} else if (error) {
console.log(error);
} else {
console.log('Unknown error');
}
})
.pipe(vfs.dest(translationDataFolder))
.on('end', function () {
if (translationPaths !== undefined) {
let nonExistantExtensions = [];
for (let curr of localization.translations) {
try {
if (curr.id === 'vscode.theme-seti') {
//handle edge case where 'theme-seti' has a different id.
curr.id = 'vscode.vscode-theme-seti';
}
fs.statSync(path.join(translationDataFolder, curr.path.replace('./translations', '')));
}
catch {
nonExistantExtensions.push(curr);
}
}
for (let nonExt of nonExistantExtensions) {
let index = localization.translations.indexOf(nonExt);
if (index > -1) {
localization.translations.splice(index, 1);
}
}
for (let tp of translationPaths) {
let finalPath = `./translations/${tp.resourceName}`;
let isFound = false;
for (let i = 0; i < localization.translations.length; i++) {
if (localization.translations[i].path === finalPath) {
localization.translations[i].id = tp.id;
isFound = true;
break;
}
}
if (!isFound) {
localization.translations.push({ id: tp.id, path: finalPath });
}
}
fs.writeFileSync(path.join(locExtFolder, 'package.json'), JSON.stringify(packageJSON, null, '\t'));
}
});
});
}
console.log("Langpack Refresh Completed.");
return Promise.resolve();
}
/**
* Function for adding replacing ads language packs with vscode ones.
* For new languages, remember to add to i18n.extraLanguages so that it will be recognized by ADS.
*/
export function renameVscodeLangpacks(): Promise<void> {
let supportedLocations = [...i18n.defaultLanguages, ...i18n.extraLanguages];
for (let i = 0; i < supportedLocations.length; i++) {
let langId = supportedLocations[i].id;
if (langId === "zh-cn") {
langId = "zh-hans";
}
if (langId === "zh-tw") {
langId = "zh-hant";
}
let locADSFolder = path.join('.', 'i18n', `ads-language-pack-${langId}`);
let locVSCODEFolder = path.join('.', 'i18n', `vscode-language-pack-${langId}`);
let translationDataFolder = path.join(locVSCODEFolder, 'translations');
let xlfFolder = path.join('.', 'resources', 'xlf');
try {
fs.statSync(locVSCODEFolder);
}
catch {
console.log('vscode pack is not in ADS yet: ' + langId);
continue;
}
// Delete extension files in vscode language pack that are not in ADS.
if (fs.existsSync(translationDataFolder)) {
let totalExtensions = fs.readdirSync(path.join(translationDataFolder, 'extensions'));
for (let extensionTag in totalExtensions) {
let extensionFileName = totalExtensions[extensionTag];
let xlfPath = path.join(xlfFolder, `${langId}`, extensionFileName.replace('.i18n.json', '.xlf'))
if (!(fs.existsSync(xlfPath) || VSCODEExtensions.indexOf(extensionFileName.replace('.i18n.json', '')) !== -1)) {
let filePath = path.join(translationDataFolder, 'extensions', extensionFileName);
rimraf.sync(filePath);
}
}
}
//Get list of md files in ADS langpack, to copy to vscode langpack prior to renaming.
let globArray = glob.sync(path.join(locADSFolder, '*.md'));
//Copy files to vscode langpack, then remove the ADS langpack, and finally rename the vscode langpack to match the ADS one.
globArray.forEach(element => {
fs.copyFileSync(element, path.join(locVSCODEFolder,path.parse(element).base));
});
rimraf.sync(locADSFolder);
fs.renameSync(locVSCODEFolder, locADSFolder);
}
console.log("Langpack Rename Completed.");
return Promise.resolve();
}

View File

@@ -53,8 +53,8 @@ define([], [${wrap + lines.map(l => indent + l).join(',\n') + wrap}]);`;
* Returns a stream containing the patched JavaScript and source maps.
*/
function nls() {
const input = event_stream_1.through();
const output = input.pipe(event_stream_1.through(function (f) {
const input = (0, event_stream_1.through)();
const output = input.pipe((0, event_stream_1.through)(function (f) {
if (!f.sourceMap) {
return this.emit('error', new Error(`File ${f.relative} does not have sourcemaps.`));
}
@@ -72,7 +72,7 @@ function nls() {
}
_nls.patchFiles(f, typescript).forEach(f => this.emit('data', f));
}));
return event_stream_1.duplex(input, output);
return (0, event_stream_1.duplex)(input, output);
}
exports.nls = nls;
function isImportNode(ts, node) {

View File

@@ -98,7 +98,7 @@ function toConcatStream(src, bundledFileHeader, sources, dest, fileContentMapper
return es.readArray(treatedSources)
.pipe(useSourcemaps ? util.loadSourcemaps() : es.through())
.pipe(concat(dest))
.pipe(stats_1.createStatsStream(dest));
.pipe((0, stats_1.createStatsStream)(dest));
}
function toBundleStream(src, bundledFileHeader, bundles, fileContentMapper) {
return es.merge(bundles.map(function (bundle) {
@@ -155,7 +155,7 @@ function optimizeTask(opts) {
addComment: true,
includeContent: true
}))
.pipe(opts.languages && opts.languages.length ? i18n_1.processNlsFiles({
.pipe(opts.languages && opts.languages.length ? (0, i18n_1.processNlsFiles)({
fileHeader: bundledFileHeader,
languages: opts.languages
}) : es.through())

View File

@@ -12,7 +12,7 @@ const yarn = process.platform === 'win32' ? 'yarn.cmd' : 'yarn';
const rootDir = path.resolve(__dirname, '..', '..');
function runProcess(command, args = []) {
return new Promise((resolve, reject) => {
const child = child_process_1.spawn(command, args, { cwd: rootDir, stdio: 'inherit', env: process.env });
const child = (0, child_process_1.spawn)(command, args, { cwd: rootDir, stdio: 'inherit', env: process.env });
child.on('exit', err => !err ? resolve() : process.exit(err !== null && err !== void 0 ? err : 1));
child.on('error', reject);
});

Some files were not shown because too many files have changed in this diff Show More